diff --git a/.env b/.env index 70bda0f60..31d551eaa 100644 --- a/.env +++ b/.env @@ -1,8 +1,9 @@ # Django environment variables -ENVIRONMENT=development +ENVIRONMENT=local DJANGO_SECRET_KEY=local UWSGI_HTTP=0.0.0.0:8000 UWSGI_STATIC_MAP=/static=/static +CORS_ORIGIN_WHITELIST=http://0.0.0.0:2999,http://localhost:2999,http://zaken-frontend.localhost,https://zaken-frontend.localhost DATABASE_NAME=zaken DATABASE_USER=zaken DATABASE_PASSWORD=insecure @@ -10,18 +11,18 @@ DATABASE_HOST=database DATABASE_PORT=5432 SENTRY_DSN=https://foo@foo.data.amsterdam.nl/0 LOCAL_DEVELOPMENT_AUTHENTICATION=True +LOGGING_LEVEL=DEBUG # To prevent flooding the logging in local development. Default is DEBUG. SECRET_KEY_TOP_ZAKEN=SECRET_KEY_TOP_ZAKEN SECRET_KEY_TON_ZAKEN=SECRET_KEY_TON_ZAKEN BELASTING_API_URL=https://api-acc.belastingen.centric.eu/bel/inn/afne/vora/v1/vorderingenidentificatienummer/ BAG_API_SEARCH_URL=https://api.data.amsterdam.nl/atlas/search/adres/ BAG_API_NUMMERAANDUIDING_SEARCH_URL=https://api.data.amsterdam.nl/v1/bag/nummeraanduidingen/ +BAG_API_BENKAGG_SEARCH_URL=https://api.data.amsterdam.nl/v1/benkagg/adresseerbareobjecten/ DECOS_JOIN_USERNAME=ZakenTop DECOS_JOIN_PASSWORD=insecure -RABBIT_MQ_USERNAME=rabbit_zaken_user -RABBIT_MQ_PASSWORD=rabbit_zaken_password -RABBIT_MQ_URL=rabbitmq -RABBIT_MQ_PORT=5672 -REDIS=default:password@zaak-redis:6379 +REDIS_HOST=zaak-redis +REDIS_PORT=6379 +REDIS_PASSWORD=password DEFAULT_THEME=Vakantieverhuur DEFAULT_REASON=SIA melding VAKANTIEVERHUUR_TOERISTISCHE_VERHUUR_API_URL=https://api.acceptatie.toeristischeverhuur.nl/api/ @@ -45,12 +46,8 @@ POSTGRES_DB=zaken POSTGRES_USER=zaken POSTGRES_PASSWORD=insecure -# RabbitMQ service environment variables -DEFAULT_USER=rabbit_zaken_user -DEFAULT_PASS=rabbit_zaken_password - # OpenZaak environment variables -OPENZAAK_ENABLED=True +OPENZAAK_ENABLED=False OPENZAAK_CATALOGI_URL=http://172.17.0.1:8000/catalogi/api/v1/catalogussen/c135a500-beac-4774-bc0d-b582630e605d OPENZAAK_DEFAULT_INFORMATIEOBJECTTYPE=http://172.17.0.1:8000/catalogi/api/v1/informatieobjecttypen/b96d14b0-03ad-421a-bc7c-b501652a04b2 HOST=http://172.17.0.1:8080 diff --git a/.github/workflows/e2e-tests.yml b/.github/workflows/e2e-tests.yml index fb0e50446..e3cb4e76b 100644 --- a/.github/workflows/e2e-tests.yml +++ b/.github/workflows/e2e-tests.yml @@ -22,7 +22,7 @@ jobs: - run: docker network create zaken_network - run: docker network create top_and_zaak_backend_bridge - - run: docker-compose -f docker-compose.test.yml up --detach + - run: docker-compose -f docker-compose.local.yml up --detach - run: sleep 30 - run: bash bin/setup_credentials.sh - run: ./e2e-tests/fix_models.sh diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index e70d121cf..16439fad3 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -28,7 +28,7 @@ jobs: - run: docker network create top_and_zaak_backend_bridge working-directory: backend - - run: docker-compose up --build --detach + - run: docker-compose -f docker-compose.local.yml up --build --detach working-directory: backend ################################################### diff --git a/.github/workflows/main.workflow.yml b/.github/workflows/main.workflow.yml index 41089279d..8c15f940d 100644 --- a/.github/workflows/main.workflow.yml +++ b/.github/workflows/main.workflow.yml @@ -17,12 +17,12 @@ jobs: - name: Linting run: bash bin/cleanup_pre_commit.sh - name: Build Docker image - run: docker-compose build + run: docker-compose build -f docker-compose.local.yml - name: Create Docker network run: docker network create zaken_network - name: Create TOP and Zaken Docker network run: docker network create top_and_zaak_backend_bridge - name: Start images - run: docker-compose up -d + run: docker-compose -f docker-compose.local.yml up -d - name: Run Tests run: docker-compose exec -T zaak-gateway python manage.py test diff --git a/Jenkinsfile b/Jenkinsfile deleted file mode 100644 index eb9a0d589..000000000 --- a/Jenkinsfile +++ /dev/null @@ -1,126 +0,0 @@ -#!groovy - -def tag_image_as(docker_image_url, tag) { - // Tag image, push to repo, remove local tagged image - script { - docker.image("${docker_image_url}:${env.COMMIT_HASH}").push(tag) - sh "docker rmi ${docker_image_url}:${tag} || true" - } -} - -def deploy(app_name, environment) { - // Deploys the app to the given environment - build job: 'Subtask_Openstack_Playbook', - parameters: [ - [$class: 'StringParameterValue', name: 'INVENTORY', value: environment], - [$class: 'StringParameterValue', name: 'PLAYBOOK', value: 'deploy.yml'], - [$class: 'StringParameterValue', name: 'PLAYBOOKPARAMS', value: "-e cmdb_id=app_${app_name}"], - ] -} - -def tag_and_deploy(docker_image_url, app_name, environment) { - // Tags the Docker with the environment, en deploys to the same environment - script { - tag_image_as(docker_image_url, environment) - deploy(app_name, environment) - } -} - -def build_image(docker_image_url, source) { - // Builds the image given the source, and pushes it to the Amsterdam Docker registry - script { - def image = docker.build("${docker_image_url}:${env.COMMIT_HASH}", - "--no-cache " + - "--shm-size 1G " + - "--build-arg COMMIT_HASH=${env.COMMIT_HASH} " + - "--build-arg BRANCH_NAME=${env.BRANCH_NAME} " + - " ${source}") - image.push() - tag_image_as(docker_image_url, "latest") - } -} - -def remove_image(docker_image_url) { - // delete original image built on the build server - script { - sh "docker rmi ${docker_image_url}:${env.COMMIT_HASH} || true" - } -} - -pipeline { - agent any - environment { - PRODUCTION = "production" - ACCEPTANCE = "acceptance" - - ZAKEN_IMAGE_URL = "${DOCKER_REGISTRY_NO_PROTOCOL}/fixxx/zaken" - ZAKEN_SOURCE = "./app" - ZAKEN_NAME = "zaken" - - // There is no need to deploy Redis every time. - REDIS_IMAGE_URL = "${DOCKER_REGISTRY_NO_PROTOCOL}/fixxx/zaken-redis" - REDIS_SOURCE = "./redis" - REDIS_NAME = "zaken-redis" - - OPEN_ZAAK_IMAGE_URL = "${DOCKER_REGISTRY_NO_PROTOCOL}/fixxx/zaken-open-zaak" - OPEN_ZAAK_SOURCE = "./open-zaak/open-zaak" - OPEN_ZAAK_NAME = "zaken-open-zaak" - - OPEN_NOTIFICATIES_IMAGE_URL = "${DOCKER_REGISTRY_NO_PROTOCOL}/fixxx/zaken-open-notificaties" - OPEN_NOTIFICATIES_SOURCE = "./open-zaak/open-notificaties" - OPEN_NOTIFICATIES_NAME = "zaken-open-notificaties" - } - - stages { - stage("Checkout") { - steps { - checkout scm - script { - env.COMMIT_HASH = sh(returnStdout: true, script: "git log -n 1 --pretty=format:'%h'").trim() - } - } - } - - stage("Build docker images") { - steps { - build_image(env.ZAKEN_IMAGE_URL, env.ZAKEN_SOURCE) - build_image(env.OPEN_ZAAK_IMAGE_URL, env.OPEN_ZAAK_SOURCE) - build_image(env.OPEN_NOTIFICATIES_IMAGE_URL, env.OPEN_NOTIFICATIES_SOURCE) - // build_image(env.REDIS_IMAGE_URL, env.REDIS_SOURCE) - } - } - - stage("Push and deploy acceptance images") { - when { - not { buildingTag() } - branch 'main' - } - steps { - tag_and_deploy(env.ZAKEN_IMAGE_URL, env.ZAKEN_NAME, env.ACCEPTANCE) - tag_and_deploy(env.OPEN_ZAAK_IMAGE_URL, env.OPEN_ZAAK_NAME, env.ACCEPTANCE) - tag_and_deploy(env.OPEN_NOTIFICATIES_IMAGE_URL, env.OPEN_NOTIFICATIES_NAME, env.ACCEPTANCE) - // tag_and_deploy(env.REDIS_IMAGE_URL, env.REDIS_NAME, env.ACCEPTANCE) - } - } - - stage("Push and deploy production images") { - // Only deploy to production if there is a tag - when { buildingTag() } - steps { - tag_and_deploy(env.ZAKEN_IMAGE_URL, env.ZAKEN_NAME, env.PRODUCTION) - // tag_and_deploy(env.OPEN_ZAAK_IMAGE_URL, env.OPEN_ZAAK_NAME, env.PRODUCTION) - // tag_and_deploy(env.OPEN_NOTIFICATIES_IMAGE_URL, env.OPEN_NOTIFICATIES_NAME, env.PRODUCTION) - // tag_and_deploy(env.REDIS_IMAGE_URL, env.REDIS_NAME, env.PRODUCTION) - } - } - } - - post { - always { - remove_image(env.ZAKEN_IMAGE_URL) - remove_image(env.OPEN_ZAAK_IMAGE_URL) - remove_image(env.OPEN_NOTIFICATIES_IMAGE_URL) - // remove_image(env.REDIS_IMAGE_URL) - } - } -} diff --git a/Makefile b/Makefile new file mode 100644 index 000000000..f3e7825ab --- /dev/null +++ b/Makefile @@ -0,0 +1,47 @@ +.PHONY: manifests deploy + +dc = docker-compose + +ENVIRONMENT ?= local +HELM_ARGS = manifests/chart \ + -f manifests/values.yaml \ + -f manifests/env/${ENVIRONMENT}.yaml \ + --set image.tag=${VERSION} + +REGISTRY ?= 127.0.0.1:5001 +REPOSITORY ?= salmagundi/zaken-backend +VERSION ?= latest + +build: + $(dc) build + +test: + echo "No tests available" + +migrate: + +push: + $(dc) push + + +manifests: + @helm template wonen $(HELM_ARGS) $(ARGS) + +deploy: manifests + helm upgrade --install wonen $(HELM_ARGS) $(ARGS) + +update-chart: + rm -rf manifests/chart + git clone --branch 1.5.2 --depth 1 git@github.com:Amsterdam/helm-application.git manifests/chart + rm -rf manifests/chart/.git + +clean: + $(dc) down -v --remove-orphans + +reset: + helm uninstall wonen + +refresh: reset build push deploy + +dev: + nohup kubycat kubycat-config.yaml > /dev/null 2>&1& diff --git a/README.md b/README.md index b675d66ff..507b5f7fc 100644 --- a/README.md +++ b/README.md @@ -45,13 +45,13 @@ First, make sure you have built the project and executed the database migrations ``` docker network create top_and_zaak_backend_bridge docker network create zaken_network -docker compose build +docker-compose -f docker-compose.local.yml build ``` Start AZA backend: ``` -docker compose up +docker-compose -f docker-compose.local.yml up ``` To create all necessary credentials run the following command: diff --git a/app/Dockerfile b/app/Dockerfile index 936c33a82..dc109dbbb 100644 --- a/app/Dockerfile +++ b/app/Dockerfile @@ -30,4 +30,6 @@ RUN chmod +x /app/wait-for.sh RUN chmod +x /app/celery.sh RUN chmod +x /app/deploy/docker-entrypoint.sh -CMD ["bash", "/app/deploy/docker-entrypoint.sh"] + +ENTRYPOINT ["/app/deploy/docker-entrypoint.sh"] +CMD ["uwsgi", "--ini", "/app/deploy/config.ini"] diff --git a/app/apps/addresses/models.py b/app/apps/addresses/models.py index 678a71aeb..f62aa20cc 100644 --- a/app/apps/addresses/models.py +++ b/app/apps/addresses/models.py @@ -2,10 +2,8 @@ from django.db import models from utils.api_queries_bag import ( + do_bag_search_benkagg_by_bag_id, do_bag_search_by_bag_id, - do_bag_search_nummeraanduiding_id_by_address, - do_bag_search_nummeraanduiding_id_by_bag_id, - get_bag_data_by_verblijfsobject_url, ) logger = logging.getLogger(__name__) @@ -56,7 +54,10 @@ class Address(models.Model): ) @property - def full_address(self): + def full_address(self) -> str: + """ + Retrieves a string with the full address of the object. + """ full_address = f"{self.street_name} {self.number}" if self.suffix or self.suffix_letter: full_address = f"{full_address}-{self.suffix}{self.suffix_letter}" @@ -78,10 +79,6 @@ def get_or_create_by_bag_id(bag_id): return Address.objects.get_or_create(bag_id=bag_id)[0] def get_bag_address_data(self): - # When moving the import to the beginning of the file, a Django error follows: - # ImproperlyConfigured: AUTH_USER_MODEL refers to model 'users.User' that has not been installed. - from utils.exceptions import DistrictNotFoundError - bag_search_response = do_bag_search_by_bag_id(self.bag_id) bag_search_results = bag_search_response.get("results", []) @@ -108,61 +105,46 @@ def get_bag_address_data(self): self.lng = centroid[0] self.lat = centroid[1] - verblijfsobject_url = ( - found_bag_data.get("_links", {}).get("self", {}).get("href") - ) - verblijfsobject = ( - verblijfsobject_url - and get_bag_data_by_verblijfsobject_url(verblijfsobject_url) - ) - district_name = verblijfsobject and verblijfsobject.get( - "_stadsdeel", {} - ).get("naam") - if district_name: - self.district = District.objects.get_or_create(name=district_name)[0] - else: - raise DistrictNotFoundError( - f"verblijfsobject_url: {verblijfsobject_url}, verblijfsobject: {verblijfsobject}" - ) - - def get_bag_nummeraanduiding_id(self): - nummeraanduidingen = [] - # Searching by bag_id should be performed first because it returns the fewest results. - # For example: A search for Weesperzijde 112 returns 14 results (112A, 112B, 112C etc). - response = do_bag_search_nummeraanduiding_id_by_bag_id(self.bag_id) - nummeraanduidingen = response.get("_embedded", {}).get("nummeraanduidingen", []) - - # If no nummeraanduidingen is found, try to search for BAG with address params. - if not nummeraanduidingen and self.street_name: - response = do_bag_search_nummeraanduiding_id_by_address(self) - nummeraanduidingen = response.get("_embedded", {}).get( - "nummeraanduidingen", [] - ) + def get_bag_identificatie_and_stadsdeel(self): + """ + Retrieves the identificatie(nummeraanduiding_id) and stadsdeel of an address by bag_id. + nummeraanduiding_id is needed for BRP. + stadsdeel is needed for filtering. + """ + # When moving the import to the beginning of the file, a Django error follows: + # ImproperlyConfigured: AUTH_USER_MODEL refers to model 'users.User' that has not been installed. + from utils.exceptions import DistrictNotFoundError + + response = do_bag_search_benkagg_by_bag_id(self.bag_id) + adresseerbareobjecten = response.get("_embedded", {}).get( + "adresseerbareobjecten", [] + ) - # If there are multiple results, find the result with the same house number. - # TODO: What if Weesperzijde 112 and Weesperzijde 112A have the same bag_id? - found_bag_nummeraanduiding = next( + # There are adresseerbareobjecten with the same bag_id. Find the best result. + found_bag_object = next( ( - nummeraanduiding - for nummeraanduiding in nummeraanduidingen - if nummeraanduiding.get("huisnummer", None) == self.number + adresseerbareobject + for adresseerbareobject in adresseerbareobjecten + if adresseerbareobject.get("huisnummer", None) == self.number ), {}, ) - - nummeraanduiding_id = ( - found_bag_nummeraanduiding.get("_links", {}) - .get("self", {}) - .get("identificatie", "") - ) + nummeraanduiding_id = found_bag_object.get("identificatie") if nummeraanduiding_id: self.nummeraanduiding_id = nummeraanduiding_id + # Add Stadsdeel to address. + district_name = found_bag_object.get("gebiedenStadsdeelNaam") + if district_name: + self.district = District.objects.get_or_create(name=district_name)[0] + else: + raise DistrictNotFoundError(f"API benkagg bag_id: {self.bag_id}") + def update_bag_data(self): self.get_bag_address_data() # Prevent a nummeraanduiding_id error while creating a case. try: - self.get_bag_nummeraanduiding_id() + self.get_bag_identificatie_and_stadsdeel() except Exception as e: logger.error( f"Could not retrieve nummeraanduiding_id for bag_id:{self.bag_id}: {e}" diff --git a/app/apps/addresses/tests/tests_models.py b/app/apps/addresses/tests/tests_models.py index 300f20a34..d85b07282 100644 --- a/app/apps/addresses/tests/tests_models.py +++ b/app/apps/addresses/tests/tests_models.py @@ -27,7 +27,6 @@ def test_can_create_address(self): self.assertEquals(Address.objects.count(), 1) - @patch("apps.addresses.models.get_bag_data_by_verblijfsobject_url") @patch("apps.addresses.models.do_bag_search_by_bag_id") def test_can_create_address_with_bag_result_without_verblijftobject_url( self, mock_do_bag_search_id, mock_get_bag_data @@ -46,7 +45,6 @@ def test_can_create_address_with_bag_result_without_verblijftobject_url( baker.make(Address) mock_do_bag_search_id.assert_called() - @patch("apps.addresses.models.get_bag_data_by_verblijfsobject_url") @patch("apps.addresses.models.do_bag_search_by_bag_id") def test_can_create_address_with_bag_result_without_verblijftobject_stadsdeel( self, mock_do_bag_search_id, mock_get_bag_data @@ -62,11 +60,7 @@ def test_can_create_address_with_bag_result_without_verblijftobject_stadsdeel( with self.assertRaises(DistrictNotFoundError): baker.make(Address) mock_do_bag_search_id.assert_called() - mock_get_bag_data.assert_called_with( - "https://api.data.amsterdam.nl/bag/v1.1/verblijfsobject/0363010001028805/" - ) - @patch("apps.addresses.models.get_bag_data_by_verblijfsobject_url") @patch("apps.addresses.models.do_bag_search_by_bag_id") def test_can_create_address_with_bag_result( self, mock_do_bag_search_id, mock_get_bag_data @@ -82,9 +76,6 @@ def test_can_create_address_with_bag_result( baker.make(Address) mock_do_bag_search_id.assert_called() - mock_get_bag_data.assert_called_with( - "https://api.data.amsterdam.nl/bag/v1.1/verblijfsobject/0363010001028805/" - ) self.assertEquals(Address.objects.count(), 1) self.assertEquals(District.objects.count(), 1) diff --git a/app/apps/cases/views/case.py b/app/apps/cases/views/case.py index 628e5352e..fa8bfc035 100644 --- a/app/apps/cases/views/case.py +++ b/app/apps/cases/views/case.py @@ -1,4 +1,5 @@ import io +import logging import operator from functools import reduce @@ -11,6 +12,7 @@ CaseStateType, CaseTheme, Subject, + Tag, ) from apps.cases.serializers import ( AdvertisementSerializer, @@ -63,6 +65,8 @@ from rest_framework.response import Response from utils.mimetypes import get_mimetype +logger = logging.getLogger(__name__) + class MultipleValueField(MultipleChoiceField): def __init__(self, *args, field_class, **kwargs): @@ -167,6 +171,9 @@ class CaseFilter(filters.FilterSet): to_field_name="name", ) suffix = filters.CharFilter(method="get_suffix") + tag = filters.ModelMultipleChoiceFilter( + queryset=Tag.objects.all(), method="get_tag" + ) task = filters.ModelMultipleChoiceFilter( queryset=CaseUserTask.objects.filter(completed=False), method="get_task", @@ -309,6 +316,13 @@ def get_subject(self, queryset, name, value): ).distinct() return queryset + def get_tag(self, queryset, name, value): + if value: + return queryset.filter( + tags__in=value, + ).distinct() + return queryset + def get_theme(self, queryset, name, value): if value: return queryset.filter( @@ -396,6 +410,7 @@ class StandardResultsSetPagination(EmptyPagination): OpenApiParameter("state_types__name", OpenApiTypes.STR, OpenApiParameter.QUERY), OpenApiParameter("subject", OpenApiTypes.NUMBER, OpenApiParameter.QUERY), OpenApiParameter("subject_name", OpenApiTypes.STR, OpenApiParameter.QUERY), + OpenApiParameter("tag", OpenApiTypes.NUMBER, OpenApiParameter.QUERY), OpenApiParameter("task", OpenApiTypes.STR, OpenApiParameter.QUERY), OpenApiParameter("theme", OpenApiTypes.NUMBER, OpenApiParameter.QUERY), OpenApiParameter("theme_name", OpenApiTypes.STR, OpenApiParameter.QUERY), @@ -410,6 +425,9 @@ class CaseViewSet( mixins.RetrieveModelMixin, viewsets.GenericViewSet, ): + def list(self, request, *args, **kwargs): + return super().list(request, *args, **kwargs) + permission_classes = rest_permission_classes_for_top() + [CanAccessSensitiveCases] serializer_class = CaseSerializer queryset = Case.objects.all() @@ -782,7 +800,9 @@ def download(self, request, pk): class DocumentTypeViewSet(viewsets.ViewSet): + serializer_class = DocumentTypeSerializer + def list(self, request): document_types = get_document_types() - serializer = DocumentTypeSerializer(document_types, many=True) + serializer = self.serializer_class(document_types, many=True) return Response(serializer.data) diff --git a/app/apps/fines/api_queries_belastingen.py b/app/apps/fines/api_queries_belastingen.py index f079b0327..eff6e36af 100644 --- a/app/apps/fines/api_queries_belastingen.py +++ b/app/apps/fines/api_queries_belastingen.py @@ -16,16 +16,14 @@ def get_fines(id, use_retry=True): def _get_fines_internal(): parameter = {"identificatienummer": id} header = {"authorization": f"Bearer {settings.BELASTING_API_ACCESS_TOKEN}"} - response = requests.get( url=settings.BELASTING_API_URL, headers=header, params=parameter, - verify="/usr/local/share/ca-certificates/adp_rootca.crt", + # verify="/usr/local/share/ca-certificates/adp_rootca.crt", timeout=6, ) response.raise_for_status() - return response.json() if use_retry: diff --git a/app/apps/fines/views.py b/app/apps/fines/views.py index 59cc26d64..f15097dd5 100644 --- a/app/apps/fines/views.py +++ b/app/apps/fines/views.py @@ -1,14 +1,18 @@ from apps.fines.api_queries_belastingen import get_fines from apps.fines.serializers import FineListSerializer -from drf_spectacular.utils import extend_schema +from drf_spectacular.utils import OpenApiParameter, extend_schema from rest_framework import status from rest_framework.response import Response from rest_framework.viewsets import ViewSet class FinesViewSet(ViewSet): + queryset = [] # Add this line to set an empty queryset + @extend_schema( - parameters=[id], + parameters=[ + OpenApiParameter(name="id", description="Bag ID", required=True, type=int) + ], description="Get permit checkmarks based on bag id", responses={200: FineListSerializer()}, ) diff --git a/app/apps/health/apps.py b/app/apps/health/apps.py index e84b8d94f..20d0182e2 100644 --- a/app/apps/health/apps.py +++ b/app/apps/health/apps.py @@ -8,8 +8,7 @@ class HealthConfig(AppConfig): def ready(self): from .health_checks import ( # OpenZaakZaken,; OpenZaakZakenAlfresco,; OpenZaakZakenCatalogus, BAGAtlasServiceCheck, - BAGNummeraanduidingenServiceCheck, - BAGVerblijfsobjectServiceCheck, + BAGBenkaggNummeraanduidingenServiceCheck, Belastingdienst, BRPServiceCheck, CeleryExecuteTask, @@ -20,8 +19,7 @@ def ready(self): ) plugin_dir.register(BAGAtlasServiceCheck) - plugin_dir.register(BAGNummeraanduidingenServiceCheck) - plugin_dir.register(BAGVerblijfsobjectServiceCheck) + plugin_dir.register(BAGBenkaggNummeraanduidingenServiceCheck) plugin_dir.register(BRPServiceCheck) plugin_dir.register(Belastingdienst) plugin_dir.register(CeleryExecuteTask) @@ -33,3 +31,5 @@ def ready(self): plugin_dir.register(PowerBrowser) plugin_dir.register(Toeristischeverhuur) # plugin_dir.register(VakantieVerhuurRegistratieCheck) + plugin_dir.register(PowerBrowser) + plugin_dir.register(DecosJoinCheck) diff --git a/app/apps/health/health_checks.py b/app/apps/health/health_checks.py index 257513648..7e11e3953 100644 --- a/app/apps/health/health_checks.py +++ b/app/apps/health/health_checks.py @@ -7,7 +7,7 @@ from health_check.backends import BaseHealthCheckBackend from health_check.exceptions import ServiceUnavailable from requests.exceptions import HTTPError, SSLError, Timeout -from utils.api_queries_bag import do_bag_search_nummeraanduiding_id_by_bag_id +from utils.api_queries_bag import do_bag_search_benkagg_by_bag_id from utils.api_queries_toeristische_verhuur import ( get_bag_vakantieverhuur_registrations, get_bsn_vakantieverhuur_registrations, @@ -16,7 +16,7 @@ ) logger = logging.getLogger(__name__) -timeout_in_sec = 10 +TIMEOUT_IN_SEC = 20 class APIServiceCheckBackend(BaseHealthCheckBackend): @@ -38,7 +38,7 @@ def check_status(self): return try: - response = requests.get(api_url, timeout=timeout_in_sec) + response = requests.get(api_url, timeout=TIMEOUT_IN_SEC) response.raise_for_status() except ConnectionRefusedError as e: logger.error(e) @@ -51,7 +51,7 @@ def check_status(self): self.add_error(ServiceUnavailable(f"Service not found. {api_url}")) except Timeout: self.add_error( - ServiceUnavailable(f"Exceeded timeout of {timeout_in_sec} seconds.") + ServiceUnavailable(f"Exceeded timeout of {TIMEOUT_IN_SEC} seconds") ) except SSLError as e: logger.error(e) @@ -79,16 +79,6 @@ class BAGAtlasServiceCheck(APIServiceCheckBackend): verbose_name = "BAG Atlas" -class BAGVerblijfsobjectServiceCheck(APIServiceCheckBackend): - """ - Endpoint for checking the BAG Verblijfsobject Service API Endpoint - """ - - critical_service = True - api_url = settings.BAG_API_VERBLIJFSOBJECT_URL - verbose_name = "BAG Verblijfsobject" - - class BRPServiceCheck(APIServiceCheckBackend): """ Endpoint for checking the BRP Service API Endpoint @@ -99,22 +89,25 @@ class BRPServiceCheck(APIServiceCheckBackend): verbose_name = "BRP" -class BAGNummeraanduidingenServiceCheck(BaseHealthCheckBackend): +class BAGBenkaggNummeraanduidingenServiceCheck(BaseHealthCheckBackend): """ - Endpoint for checking the BAG Nummeraanduidingen API + Endpoint for checking the BAG Benkagg adresseerbareobjecten API """ critical_service = True - verbose_name = "BAG Nummeraanduidingen" + verbose_name = "BAG Benkagg Nummeraanduidingen" def check_status(self): try: - response = do_bag_search_nummeraanduiding_id_by_bag_id( - settings.BAG_ID_AMSTEL_1 - ) + response = do_bag_search_benkagg_by_bag_id(settings.BAG_ID_AMSTEL_1) message = response.get("message") if message: self.add_error(ServiceUnavailable(f"{message}"), message) + adresseerbareobjecten = response.get("_embedded", {}).get( + "adresseerbareobjecten", [] + ) + if len(adresseerbareobjecten) == 0: + self.add_error(ServiceUnavailable("No results")) except HTTPError as e: logger.error(e) self.add_error(ServiceUnavailable(f"HTTPError {e.response.status_code}.")) diff --git a/app/apps/openzaak/apps.py b/app/apps/openzaak/apps.py index 22a4eda88..3eb019a08 100644 --- a/app/apps/openzaak/apps.py +++ b/app/apps/openzaak/apps.py @@ -1,5 +1,5 @@ from django.apps import AppConfig -from django.conf import settings +from utils.openzaak_enabled import is_openzaak_enabled class OpenzaakConfig(AppConfig): @@ -8,5 +8,5 @@ class OpenzaakConfig(AppConfig): app_label = "openzaak" def ready(self): - if settings.OPENZAAK_ENABLED: + if is_openzaak_enabled(): import apps.openzaak.signals # noqa diff --git a/app/apps/permits/api_queries_decos_join.py b/app/apps/permits/api_queries_decos_join.py index 69ed05727..e50f282ee 100644 --- a/app/apps/permits/api_queries_decos_join.py +++ b/app/apps/permits/api_queries_decos_join.py @@ -158,9 +158,6 @@ def _process_request_to_decos_join(self, url): "Authorization": f"Basic {settings.DECOS_JOIN_AUTH_BASE64}", } ) - - logger.info(url) - response = requests.get(url, **request_params) return response.json() diff --git a/app/apps/users/serializers.py b/app/apps/users/serializers.py index d3dace044..7e7533c7b 100644 --- a/app/apps/users/serializers.py +++ b/app/apps/users/serializers.py @@ -88,3 +88,7 @@ class Meta: class OIDCAuthenticateSerializer(serializers.Serializer): code = serializers.CharField(required=True) + + +class IsAuthorizedSerializer(serializers.Serializer): + is_authorized = serializers.BooleanField() diff --git a/app/apps/users/views.py b/app/apps/users/views.py index 8be7fe8ce..2739e9dc5 100644 --- a/app/apps/users/views.py +++ b/app/apps/users/views.py @@ -14,6 +14,7 @@ from .auth import AuthenticationBackend from .models import User from .serializers import ( + IsAuthorizedSerializer, OIDCAuthenticateSerializer, UserDetailSerializer, UserSerializer, @@ -56,6 +57,7 @@ def list(self, request): class IsAuthorizedView(APIView): permission_classes = () + serializer_class = IsAuthorizedSerializer def get(self, request): is_authorized = IsInAuthorizedRealm().has_permission(request, self) diff --git a/app/apps/workflow/serializers.py b/app/apps/workflow/serializers.py index 1ac324df3..4124d8fba 100644 --- a/app/apps/workflow/serializers.py +++ b/app/apps/workflow/serializers.py @@ -180,7 +180,10 @@ class CaseWorkflowSerializer(CaseWorkflowBaseSerializer): tasks = serializers.SerializerMethodField() information = serializers.SerializerMethodField() - def get_information(self, obj): + def get_information(self, obj) -> str: + """ + Retrieves information from the object. Value from names. + """ return obj.data.get("names", {}).get("value", "") @extend_schema_field(CaseUserTaskWorkdflowSerializer(many=True)) diff --git a/app/apps/workflow/views.py b/app/apps/workflow/views.py index 0ce24c7c5..cd12c263d 100644 --- a/app/apps/workflow/views.py +++ b/app/apps/workflow/views.py @@ -6,6 +6,7 @@ CaseStateType, CaseTheme, Subject, + Tag, ) from apps.main.filters import RelatedOrderingFilter from apps.main.pagination import EmptyPagination @@ -120,6 +121,10 @@ class CaseUserTaskFilter(filters.FilterSet): method="get_subject", to_field_name="name", ) + tag = filters.ModelMultipleChoiceFilter( + queryset=Tag.objects.all(), + method="get_tag", + ) theme = filters.ModelMultipleChoiceFilter( queryset=CaseTheme.objects.all(), method="get_theme", @@ -176,6 +181,13 @@ def get_subject(self, queryset, name, value): ).distinct() return queryset + def get_tag(self, queryset, name, value): + if value: + return queryset.filter( + case__tags__in=value, + ).distinct() + return queryset + def get_theme(self, queryset, name, value): if value: return queryset.filter( @@ -263,6 +275,7 @@ class StandardResultsSetPagination(EmptyPagination): OpenApiParameter("state_types", OpenApiTypes.NUMBER, OpenApiParameter.QUERY), OpenApiParameter("subject", OpenApiTypes.NUMBER, OpenApiParameter.QUERY), OpenApiParameter("subject_name", OpenApiTypes.STR, OpenApiParameter.QUERY), + OpenApiParameter("tag", OpenApiTypes.NUMBER, OpenApiParameter.QUERY), OpenApiParameter("theme", OpenApiTypes.NUMBER, OpenApiParameter.QUERY), OpenApiParameter("theme_name", OpenApiTypes.STR, OpenApiParameter.QUERY), OpenApiParameter("ton_ids", OpenApiTypes.NUMBER, OpenApiParameter.QUERY), diff --git a/app/config/azure_settings.py b/app/config/azure_settings.py new file mode 100644 index 000000000..eaa3f340f --- /dev/null +++ b/app/config/azure_settings.py @@ -0,0 +1,68 @@ +import os +import shlex +import subprocess +from subprocess import PIPE + +from azure.identity import DefaultAzureCredential, WorkloadIdentityCredential + + +class AzureAuth: + def __init__(self): + self._credential = None + + @property + def credential(self): + if not self._credential: + self._credential = self.get_credential() + + return self._credential + + def get_credential(self): + credential = None + federated_token_file = os.getenv("AZURE_FEDERATED_TOKEN_FILE") + if federated_token_file: + # This relies on environment variables that get injected. + # AZURE_AUTHORITY_HOST: (Injected by the webhook) + # AZURE_CLIENT_ID: (Injected by the webhook) + # AZURE_TENANT_ID: (Injected by the webhook) + # AZURE_FEDERATED_TOKEN_FILE: (Injected by the webhook) + credential = WorkloadIdentityCredential() + elif os.isatty(0): + account = subprocess.run( + shlex.split("az account show"), + check=False, + stdout=PIPE, + stderr=PIPE, + ) + if account.returncode != 0: + subprocess.run( + shlex.split("az login"), + check=True, + stdout=PIPE, + ) + + credential = DefaultAzureCredential(managed_identity_client_id=None) + else: + raise Exception("cannot connect to azure") + + return credential + + @property + def db_password(self) -> object: + # return access_token.token + class DynamicString: + def __init__(self, credential, scopes) -> None: + self.credential = credential + self.scopes = scopes + + def __str__(self): + access_token = self.credential.get_token(*self.scopes) + return access_token.token + + scopes = ["https://ossrdbms-aad.database.windows.net/.default"] + return DynamicString(self.credential, scopes) + + +class Azure: + def __init__(self) -> None: + self.auth = AzureAuth() diff --git a/app/config/celery.py b/app/config/celery.py index b132b09da..1f3f3ecdf 100644 --- a/app/config/celery.py +++ b/app/config/celery.py @@ -5,6 +5,7 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings") app = Celery("proj") +app.conf.task_default_queue = "AZA" app.config_from_object("django.conf:settings", namespace="CELERY") app.autodiscover_tasks() diff --git a/app/config/settings.py b/app/config/settings.py index 453f3ce46..51184d3e8 100644 --- a/app/config/settings.py +++ b/app/config/settings.py @@ -1,20 +1,26 @@ import os +import socket from datetime import timedelta from os.path import join -import sentry_sdk from celery.schedules import crontab from dotenv import load_dotenv from keycloak_oidc.default_settings import * # noqa -from sentry_sdk.integrations.django import DjangoIntegration +from opencensus.ext.azure.trace_exporter import AzureExporter + +from .azure_settings import Azure + +azure = Azure() load_dotenv() +# config_integration.trace_integrations(["requests", "logging"]) + BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY") ENVIRONMENT = os.getenv("ENVIRONMENT") -DEBUG = ENVIRONMENT == "development" +DEBUG = ENVIRONMENT == "local" # Settings to improve security is_secure_environment = not DEBUG @@ -29,13 +35,7 @@ ALLOWED_HOSTS = "*" -# TODO: Configure this in the environment variables -CORS_ORIGIN_WHITELIST = ( - "https://wonen.zaken.amsterdam.nl", - "https://acc.wonen.zaken.amsterdam.nl", - "http://0.0.0.0:2999", - "http://localhost:2999", -) +CORS_ORIGIN_WHITELIST = os.environ.get("CORS_ORIGIN_WHITELIST").split(",") CORS_ORIGIN_ALLOW_ALL = False INSTALLED_APPS = ( @@ -64,7 +64,6 @@ "health_check", "health_check.db", "health_check.contrib.migrations", - "health_check.contrib.rabbitmq", "health_check.contrib.celery_ping", # Apps "apps.users", @@ -103,18 +102,32 @@ "show_fields": False, } +DATABASE_HOST = os.getenv("DATABASE_HOST", "database") +DATABASE_NAME = os.getenv("DATABASE_NAME", "dev") +DATABASE_USER = os.getenv("DATABASE_USER", "dev") +DATABASE_PASSWORD = os.getenv("DATABASE_PASSWORD", "dev") +DATABASE_PORT = os.getenv("DATABASE_PORT", "5432") +DATABASE_OPTIONS = {"sslmode": "allow", "connect_timeout": 5} + +if "azure.com" in DATABASE_HOST: + DATABASE_PASSWORD = azure.auth.db_password + DATABASE_OPTIONS["sslmode"] = "require" + DATABASES = { "default": { - "ENGINE": "django.db.backends.postgresql", - "NAME": os.environ.get("DATABASE_NAME"), - "USER": os.environ.get("DATABASE_USER"), - "PASSWORD": os.environ.get("DATABASE_PASSWORD"), - "HOST": os.environ.get("DATABASE_HOST"), - "PORT": os.environ.get("DATABASE_PORT"), + "ENGINE": "django.contrib.gis.db.backends.postgis", + "NAME": DATABASE_NAME, + "USER": DATABASE_USER, + "PASSWORD": DATABASE_PASSWORD, + "HOST": DATABASE_HOST, + "CONN_MAX_AGE": 60 * 5, + "PORT": DATABASE_PORT, + "OPTIONS": {"sslmode": "allow", "connect_timeout": 5}, }, } MIDDLEWARE = ( + "opencensus.ext.django.middleware.OpencensusMiddleware", "corsheaders.middleware.CorsMiddleware", "django.middleware.common.CommonMiddleware", "django.middleware.security.SecurityMiddleware", @@ -185,34 +198,85 @@ TAG_NAME = os.getenv("TAG_NAME", "default-release") -# Error logging through Sentry -sentry_sdk.init( - dsn=os.environ.get("SENTRY_DSN"), - integrations=[DjangoIntegration()], - release=TAG_NAME, -) +LOGGING_LEVEL = os.getenv("LOGGING_LEVEL", "DEBUG") LOGGING = { "version": 1, - "disable_existing_loggers": False, + "disable_existing_loggers": True, "handlers": { - "console": {"class": "logging.StreamHandler", "level": "DEBUG"}, + "console": {"class": "logging.StreamHandler", "level": LOGGING_LEVEL}, + "celery": {"level": LOGGING_LEVEL, "class": "logging.StreamHandler"}, }, + "root": {"handlers": ["console"], "level": LOGGING_LEVEL}, "loggers": { "apps": { "handlers": ["console"], - "level": "INFO", + "level": LOGGING_LEVEL, "propagate": True, }, "utils": { "handlers": ["console"], - "level": "INFO", + "level": LOGGING_LEVEL, + "propagate": True, + }, + "django": { + "handlers": ["console"], + "level": LOGGING_LEVEL, + "propagate": True, + }, + "": { + "level": LOGGING_LEVEL, + "handlers": ["console"], + "propagate": True, + }, + "celery": { + "handlers": ["celery", "console"], + "level": LOGGING_LEVEL, "propagate": True, }, - "mozilla_django_oidc": {"handlers": ["console"], "level": "DEBUG"}, + "mozilla_django_oidc": {"handlers": ["console"], "level": "INFO"}, }, } +APPLICATIONINSIGHTS_CONNECTION_STRING = os.getenv( + "APPLICATIONINSIGHTS_CONNECTION_STRING" +) + +if APPLICATIONINSIGHTS_CONNECTION_STRING: + # Only log queries when in DEBUG due to high cost + def filter_traces(envelope): + if LOGGING_LEVEL == "DEBUG": + return True + log_data = envelope.data.baseData + if "query" in log_data["name"].lower(): + return False + if log_data["name"] == "GET /": + return False + if "applicationinsights" in log_data.message.lower(): + return False + return True + + exporter = AzureExporter(connection_string=APPLICATIONINSIGHTS_CONNECTION_STRING) + exporter.add_telemetry_processor(filter_traces) + OPENCENSUS = { + "TRACE": { + "SAMPLER": "opencensus.trace.samplers.ProbabilitySampler(rate=1)", + "EXPORTER": exporter, + } + } + LOGGING["handlers"]["azure"] = { + "level": LOGGING_LEVEL, + "class": "opencensus.ext.azure.log_exporter.AzureLogHandler", + "connection_string": APPLICATIONINSIGHTS_CONNECTION_STRING, + } + + LOGGING["root"]["handlers"] = ["azure", "console"] + LOGGING["loggers"]["django"]["handlers"] = ["azure", "console"] + LOGGING["loggers"][""]["handlers"] = ["azure", "console"] + LOGGING["loggers"]["apps"]["handlers"] = ["azure", "console"] + LOGGING["loggers"]["utils"]["handlers"] = ["azure", "console"] + LOGGING["loggers"]["celery"]["handlers"] = ["azure", "console", "celery"] + """ TODO: Only a few of these settings are actually used for our current flow, but the mozilla_django_oidc OIDCAuthenticationBackend required these to be set. @@ -237,23 +301,23 @@ OIDC_OP_AUTHORIZATION_ENDPOINT = os.getenv( "OIDC_OP_AUTHORIZATION_ENDPOINT", - "https://iam.amsterdam.nl/auth/realms/datapunt-ad-acc/protocol/openid-connect/auth", + "https://acc.iam.amsterdam.nl/auth/realms/datapunt-ad-acc/protocol/openid-connect/auth", ) OIDC_OP_TOKEN_ENDPOINT = os.getenv( "OIDC_OP_TOKEN_ENDPOINT", - "https://iam.amsterdam.nl/auth/realms/datapunt-ad-acc/protocol/openid-connect/token", + "https://acc.iam.amsterdam.nl/auth/realms/datapunt-ad-acc/protocol/openid-connect/token", ) OIDC_OP_USER_ENDPOINT = os.getenv( "OIDC_OP_USER_ENDPOINT", - "https://iam.amsterdam.nl/auth/realms/datapunt-ad-acc/protocol/openid-connect/userinfo", + "https://acc.iam.amsterdam.nl/auth/realms/datapunt-ad-acc/protocol/openid-connect/userinfo", ) OIDC_OP_JWKS_ENDPOINT = os.getenv( "OIDC_OP_JWKS_ENDPOINT", - "https://iam.amsterdam.nl/auth/realms/datapunt-ad-acc/protocol/openid-connect/certs", + "https://acc.iam.amsterdam.nl/auth/realms/datapunt-ad-acc/protocol/openid-connect/certs", ) OIDC_OP_LOGOUT_ENDPOINT = os.getenv( "OIDC_OP_LOGOUT_ENDPOINT", - "https://iam.amsterdam.nl/auth/realms/datapunt-ad-acc/protocol/openid-connect/logout", + "https://acc.iam.amsterdam.nl/auth/realms/datapunt-ad-acc/protocol/openid-connect/logout", ) LOCAL_DEVELOPMENT_AUTHENTICATION = ( @@ -297,10 +361,10 @@ "BAG_API_NUMMERAANDUIDING_SEARCH_URL", "https://api.data.amsterdam.nl/v1/bag/nummeraanduidingen/", ) -# BAG Verblijfsobject -BAG_API_VERBLIJFSOBJECT_URL = os.getenv( - "BAG_API_VERBLIJFSOBJECT_URL", - "https://api.data.amsterdam.nl/bag/v1.1/verblijfsobject/", +# BAG benkagg for nummeraanduidingen and stadsdeel +BAG_API_BENKAGG_SEARCH_URL = os.getenv( + "BAG_API_BENKAGG_SEARCH_URL", + "https://api.data.amsterdam.nl/v1/benkagg/adresseerbareobjecten/", ) # API key for the public Amsterdam API (api.data.amsterdam.nl). # This key is NOT used for authorization, but to identify who is using the API for communication purposes. @@ -321,10 +385,7 @@ BRP_API_URL = "/".join( [ - os.getenv( - "BRP_API_URL", - "https://acc.hc.data.amsterdam.nl/brp", - ), + os.getenv("BRP_API_URL", "https://acc.bp.data.amsterdam.nl/brp"), "ingeschrevenpersonen", ] ) @@ -455,14 +516,29 @@ SECRET_KEY_AZA_TOP = os.getenv("SECRET_KEY_AZA_TOP") TOP_API_URL = os.getenv("TOP_API_URL") -RABBIT_MQ_URL = os.getenv("RABBIT_MQ_URL") -RABBIT_MQ_USERNAME = os.getenv("RABBIT_MQ_USERNAME") -RABBIT_MQ_PASSWORD = os.getenv("RABBIT_MQ_PASSWORD") +REDIS_HOST = os.getenv("REDIS_HOST") +REDIS_PORT = os.getenv("REDIS_PORT") +REDIS_PASSWORD = os.getenv("REDIS_PASSWORD") +REDIS_PREFIX = "rediss" if is_secure_environment else "redis" +REDIS_URL = f"{REDIS_PREFIX}://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}" + +CACHES = { + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": REDIS_URL, + "OPTIONS": { + "CLIENT_CLASS": "django_redis.client.DefaultClient", + "SOCKET_CONNECT_TIMEOUT": 5, + "SOCKET_TIMEOUT": 5, + }, + } +} CELERY_TASK_TRACK_STARTED = True CELERY_TASK_TIME_LIMIT = 30 * 60 -CELERY_BROKER_URL = f"amqp://{RABBIT_MQ_USERNAME}:{RABBIT_MQ_PASSWORD}@{RABBIT_MQ_URL}" - +CELERY_BROKER_URL = REDIS_URL +BROKER_CONNECTION_MAX_RETRIES = None +BROKER_CONNECTION_TIMEOUT = 120 BROKER_URL = CELERY_BROKER_URL CELERY_TASK_TRACK_STARTED = True CELERY_RESULT_BACKEND = "django-db" @@ -474,20 +550,16 @@ }, } -REDIS = os.getenv("REDIS") -REDIS_URL = f"redis://{REDIS}" -CACHES = { - "default": { - "BACKEND": "django_redis.cache.RedisCache", - "LOCATION": REDIS_URL, - "OPTIONS": { - "CLIENT_CLASS": "django_redis.client.DefaultClient", - "SOCKET_CONNECT_TIMEOUT": 5, - "SOCKET_TIMEOUT": 5, - }, - } +CELERY_BROKER_TRANSPORT_OPTIONS = { + "socket_keepalive": True, + "socket_keepalive_options": { + socket.TCP_KEEPIDLE: 60, + socket.TCP_KEEPCNT: 5, + socket.TCP_KEEPINTVL: 10, + }, } + LOGOUT_REDIRECT_URL = "/admin" DEFAULT_THEME = os.getenv("DEFAULT_THEME", "Vakantieverhuur") @@ -626,7 +698,9 @@ ) DEFAULT_WORKFLOW_TIMER_DURATIONS = { + "local": timedelta(seconds=20), "development": timedelta(seconds=20), + "test": timedelta(seconds=20), "acceptance": timedelta(seconds=240), } diff --git a/app/config/urls.py b/app/config/urls.py index cdb6d5a66..1a0e0c090 100644 --- a/app/config/urls.py +++ b/app/config/urls.py @@ -38,7 +38,9 @@ from django.conf.urls import include, url from django.conf.urls.static import static from django.contrib import admin +from django.http import JsonResponse from django.urls import path +from django.views.generic import View from drf_spectacular.views import SpectacularAPIView, SpectacularSwaggerView from rest_framework.routers import DefaultRouter @@ -85,6 +87,12 @@ router.register(r"generic-tasks", GenericCompletedTaskViewSet, basename="generic-tasks") + +class MyView(View): + def get(self, request, *args, **kwargs): + return JsonResponse({}, status=204) + + urlpatterns = [ # Admin environment path("admin/download_data/", download_data), @@ -96,12 +104,6 @@ path("admin/", admin.site.urls), # API Routing path("api/v1/", include(router.urls)), - path("api/v1/schema/", SpectacularAPIView.as_view(), name="schema"), - path( - "api/v1/swagger/", - SpectacularSwaggerView.as_view(url_name="schema"), - name="swagger-ui", - ), # Authentication endpoint for exchanging an OIDC code for a token path( "api/v1/oidc-authenticate/", @@ -122,8 +124,20 @@ ), path("data-model/", include("django_spaghetti.urls")), url("health/", include("health_check.urls")), + url(regex=r"^$", view=MyView.as_view(), name="index"), ] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) +if settings.DEBUG: + urlpatterns += [ + # Swagger/OpenAPI documentation + path("api/v1/schema/", SpectacularAPIView.as_view(), name="schema"), + path( + "api/v1/swagger/", + SpectacularSwaggerView.as_view(url_name="schema"), + name="swagger-ui", + ), + ] + # JSON handlers for errors handler500 = "rest_framework.exceptions.server_error" handler400 = "rest_framework.exceptions.bad_request" diff --git a/app/deploy/config.ini b/app/deploy/config.ini index de7905e74..24cca1ab8 100644 --- a/app/deploy/config.ini +++ b/app/deploy/config.ini @@ -25,7 +25,7 @@ worker-reload-mercy = 60 ; How long to wait before forcefully killin cheaper-algo = busyness processes = 128 ; Maximum number of workers allowed -cheaper = 8 ; Minimum number of workers allowed +cheaper = 16 ; Minimum number of workers allowed cheaper-initial = 16 ; Workers created at startup cheaper-overload = 1 ; Length of a cycle in seconds cheaper-step = 16 ; How many workers to spawn at a time @@ -35,3 +35,5 @@ cheaper-busyness-min = 20 ; Below this threshold, kill workers (if st cheaper-busyness-max = 70 ; Above this threshold, spawn new workers cheaper-busyness-backlog-alert = 16 ; Spawn emergency workers if more than this many requests are waiting in the queue cheaper-busyness-backlog-step = 2 ; How many emergency workers to create if there are too many requests in the queue +buffer-size = 32768 +lazy-apps = true diff --git a/app/deploy/docker-entrypoint.sh b/app/deploy/docker-entrypoint.sh index 1564ea138..e3094cb79 100644 --- a/app/deploy/docker-entrypoint.sh +++ b/app/deploy/docker-entrypoint.sh @@ -3,18 +3,8 @@ set -u # crash on missing env variables set -e # stop on any error set -x -until PGPASSWORD=$DATABASE_PASSWORD psql -h $DATABASE_HOST -U $DATABASE_USER -c '\q'; do - echo "Postgres is unavailable - sleeping" - sleep 1 -done -echo "Postgres is up!" - echo Collecting static files python manage.py collectstatic --no-input -chmod -R 777 /static - -# echo Clear tables -# python manage.py shell -c "from django.db import connection; cursor = connection.cursor(); cursor.execute('drop table if exists "django_migrations" cascade; drop table if exists "django_content_type" cascade; drop table if exists "auth_permission" cascade; drop table if exists "auth_group" cascade; drop table if exists "auth_group_permissions" cascade; drop table if exists "users_user" cascade; drop table if exists "users_user_groups" cascade; drop table if exists "users_user_user_permissions" cascade; drop table if exists "django_admin_log" cascade; drop table if exists "django_session" cascade;'); cursor.close();" echo Apply migrations python manage.py migrate --noinput @@ -25,8 +15,4 @@ python manage.py axes_reset python manage.py loaddata fixture -# echo Create root user -# python manage.py shell -c "from django.contrib.auth import get_user_model; get_user_model().objects.create_superuser('admin@admin.com', 'admin')" -celery -A config worker -l info -D -celery -A config beat -l INFO --scheduler django_celery_beat.schedulers:DatabaseScheduler --detach -exec uwsgi --ini /app/deploy/config.ini #--py-auto-reload=1 +exec "$@" diff --git a/app/requirements.txt b/app/requirements.txt index 26fe85bf8..592e679a1 100644 --- a/app/requirements.txt +++ b/app/requirements.txt @@ -1,6 +1,7 @@ amqp<6.0.0 asgiref==3.4.1 attrs==21.2.0 +azure-identity beautifulsoup4==4.10.0 billiard==3.6.4.0 cached-property==1.5.2 @@ -55,13 +56,12 @@ prompt-toolkit==3.0.19 psycopg2==2.9.1 pycparser==2.20 # pygraphviz==1.7 -PyJWT==2.1.0 +PyJWT pyOpenSSL==23.2.0 pyrsistent==0.18.0 python-crontab==2.5.1 python-dateutil==2.8.2 pytz==2022.1 -PyJWT<2.4.0 redis==3.5.3 requests==2.26.0 requests-mock==1.9.3 @@ -81,3 +81,11 @@ zgw-consumers==0.18.0 zipp==3.5.0 django_webtest==1.9.9 python-dotenv==0.19.2 +applicationinsights==0.11.10 +opencensus==0.11.2 +opencensus-context==0.1.3 +opencensus-ext-azure==1.1.9 +opencensus-ext-django==0.8.0 +opencensus-ext-logging==0.1.1 +opencensus-ext-postgresql==0.1.3 +opencensus-ext-requests==0.8.0 diff --git a/app/utils/api_queries_bag.py b/app/utils/api_queries_bag.py index 41caccaec..5f191549a 100644 --- a/app/utils/api_queries_bag.py +++ b/app/utils/api_queries_bag.py @@ -10,33 +10,13 @@ @retry(stop=stop_after_attempt(3), after=after_log(logger, logging.ERROR)) -def do_bag_search_nummeraanduiding_id_by_bag_id(bag_id): +def do_bag_search_benkagg_by_bag_id(bag_id): """ - Search BAG nummeraanduiding_id using an adresseertVerblijfsobjectId + Search BAG identificatie (nummeraanduiding_id) and stadsdeel using an adresseertVerblijfsobjectId """ address_search = requests.get( - settings.BAG_API_NUMMERAANDUIDING_SEARCH_URL, - params={"adresseertVerblijfsobject.identificatie": bag_id}, - headers=headers, - timeout=30, - ) - return address_search.json() - - -@retry(stop=stop_after_attempt(3), after=after_log(logger, logging.ERROR)) -def do_bag_search_nummeraanduiding_id_by_address(address): - """ - Search BAG nummeraanduiding_id by using an address - """ - params = {"postcode": address.postal_code, "huisnummer": address.number} - if address.suffix: - params["huisnummertoevoeging"] = address.suffix - if address.suffix_letter: - params["huisletter"] = address.suffix_letter - - address_search = requests.get( - settings.BAG_API_NUMMERAANDUIDING_SEARCH_URL, - params=params, + settings.BAG_API_BENKAGG_SEARCH_URL, + params={"adresseertVerblijfsobjectIdentificatie": bag_id}, headers=headers, timeout=30, ) @@ -49,20 +29,11 @@ def do_bag_search_by_bag_id(bag_id): Search BAG using a BWV 'landelijk BAG ID' """ address_search = requests.get( - settings.BAG_API_SEARCH_URL, params={"q": bag_id}, timeout=0.5 + settings.BAG_API_SEARCH_URL, params={"q": bag_id}, timeout=5 ) return address_search.json() -@retry(stop=stop_after_attempt(3), after=after_log(logger, logging.ERROR)) -def get_bag_data_by_verblijfsobject_url(verblijfsobject_url): - """ - Does a BAG Query given a URI - """ - bag_data = requests.get(verblijfsobject_url, timeout=0.5) - return bag_data.json() - - # BWV migration queries def get_bag_search_query(address): """ diff --git a/app/utils/openzaak_enabled.py b/app/utils/openzaak_enabled.py new file mode 100644 index 000000000..df176f453 --- /dev/null +++ b/app/utils/openzaak_enabled.py @@ -0,0 +1,7 @@ +from django.conf import settings + + +def is_openzaak_enabled(): + if settings.OPENZAAK_ENABLED == "True" or settings.OPENZAAK_ENABLED is True: + return True + return False diff --git a/bin/setup_credentials.sh b/bin/setup_credentials.sh index cda42f0a0..39854b0a7 100755 --- a/bin/setup_credentials.sh +++ b/bin/setup_credentials.sh @@ -1,2 +1,2 @@ # Creates a superuser for the zaak-gateway backend -echo "from django.contrib.auth import get_user_model; get_user_model().objects.create_superuser('admin@admin.com', 'admin')" | docker-compose run --rm zaak-gateway python manage.py shell +echo "from django.contrib.auth import get_user_model; get_user_model().objects.create_superuser('admin@admin.com', 'admin')" | docker-compose -f docker-compose.local.yml run --rm zaak-gateway python manage.py shell diff --git a/docker-compose.test.yml b/docker-compose.local.yml similarity index 70% rename from docker-compose.test.yml rename to docker-compose.local.yml index 45f40c844..2de3a2779 100644 --- a/docker-compose.test.yml +++ b/docker-compose.local.yml @@ -7,31 +7,33 @@ services: - top_and_zaak_backend_bridge build: app hostname: zaak-gateway - image: zaak-gateway + image: ${REGISTRY:-127.0.0.1:5001}/${REPOSITORY:-salmagundi/zaken-backend}:${VERSION:-latest} ports: - 8080:8000 - 5678:5678 depends_on: - database + - zaak-redis env_file: - .env command: bash -c "/app/wait-for.sh zaak-redis:6379 && /app/wait-for.sh database:5432 && /app/deploy/docker-entrypoint.development.sh" - volumes: - - ./app:/app stdin_open: true tty: true zaak-redis: - image: redis + build: redis environment: - REDIS_PASSWORD=password + - REDIS_MAXMEM=100mb networks: - zaken_network + ports: + - "6379:6379" database: networks: - zaken_network - image: amsterdam/postgres11 + image: amsterdam/postgres12 shm_size: "512m" ports: - "6409:5432" @@ -40,24 +42,15 @@ services: volumes: - postgresql-data:/var/lib/postgresql/data - rabbitmq: - image: rabbitmq:3.9 - networks: - - zaken_network - ports: - - 5672:5672 - env_file: - - .env - zaken_celery_worker: - image: zaak-gateway + image: ${REGISTRY:-127.0.0.1:5001}/${REPOSITORY:-salmagundi/zaken-backend}:${VERSION:-latest} container_name: zaken_celery_worker hostname: zaken_celery_worker - command: bash -c "/app/wait-for.sh zaak-redis:6379 && /app/wait-for.sh rabbitmq:5672 && /app/wait-for.sh zaak-gateway:8000 -- /app/celery.sh" + command: bash -c "/app/wait-for.sh zaak-redis:6379 && /app/wait-for.sh zaak-gateway:8000 -- /app/celery.sh" depends_on: - zaak-gateway - database - - rabbitmq + - zaak-redis env_file: - .env networks: @@ -65,14 +58,14 @@ services: restart: on-failure zaken_celery_beat: - image: zaak-gateway + image: ${REGISTRY:-127.0.0.1:5001}/${REPOSITORY:-salmagundi/zaken-backend}:${VERSION:-latest} container_name: zaken_celery_beat hostname: zaken_celery_beat - command: sh -c "/app/wait-for.sh zaak-redis:6379 && /app/wait-for.sh rabbitmq:5672 && /app/wait-for.sh zaak-gateway:8000 -- celery -A config beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler" + command: sh -c "/app/wait-for.sh zaak-redis:6379 && /app/wait-for.sh zaak-gateway:8000 -- celery -A config beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler" depends_on: - zaak-gateway - database - - rabbitmq + - zaak-redis env_file: - .env networks: diff --git a/docker-compose.yml b/docker-compose.yml index 89c57e8e9..874be84b7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,95 +1,16 @@ -version: "3.5" services: zaak-gateway: - networks: - - zaken_network - - top_and_zaak_backend_bridge build: app hostname: zaak-gateway - image: zaak-gateway + image: ${REGISTRY:-127.0.0.1:5001}/${REPOSITORY:-salmagundi/zaken-backend}:${VERSION:-latest} ports: - 8080:8000 - 5678:5678 - depends_on: - - database - - zaak-redis env_file: - .env command: bash -c "/app/wait-for.sh zaak-redis:6379 && /app/wait-for.sh database:5432 && /app/deploy/docker-entrypoint.development.sh" - volumes: - - ./app:/app + # volumes: + # - ./app:/app stdin_open: true tty: true - - zaak-redis: - build: redis - environment: - - REDIS_PASSWORD=password - - REDIS_MAXMEM=100mb - networks: - - zaken_network - ports: - - "6379:6379" - - database: - networks: - - zaken_network - image: amsterdam/postgres12 - shm_size: "512m" - ports: - - "6409:5432" - env_file: - - .env - volumes: - - postgresql-data:/var/lib/postgresql/data - - rabbitmq: - image: rabbitmq:3.9 - networks: - - zaken_network - ports: - - 5672:5672 - env_file: - - .env - - zaken_celery_worker: - image: zaak-gateway - container_name: zaken_celery_worker - hostname: zaken_celery_worker - command: bash -c "/app/wait-for.sh zaak-redis:6379 && /app/wait-for.sh rabbitmq:5672 && /app/wait-for.sh zaak-gateway:8000 -- /app/celery.sh" - depends_on: - - zaak-gateway - - database - - rabbitmq - - zaak-redis - env_file: - - .env - networks: - - zaken_network - restart: on-failure - - zaken_celery_beat: - image: zaak-gateway - container_name: zaken_celery_beat - hostname: zaken_celery_beat - command: sh -c "/app/wait-for.sh zaak-redis:6379 && /app/wait-for.sh rabbitmq:5672 && /app/wait-for.sh zaak-gateway:8000 -- celery -A config beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler" - depends_on: - - zaak-gateway - - database - - rabbitmq - - zaak-redis - env_file: - - .env - networks: - - zaken_network - restart: on-failure - -networks: - zaken_network: - external: true - top_and_zaak_backend_bridge: - external: true - -volumes: - postgresql-data: diff --git a/e2e-tests/api/client.py b/e2e-tests/api/client.py index 292d96e4c..2489b35da 100644 --- a/e2e-tests/api/client.py +++ b/e2e-tests/api/client.py @@ -32,7 +32,7 @@ def request(self, verb, url, headers=None, json=None, task_name="SYSTEM"): if not res.ok: raise Exception( - f"Error {task_name}: status: {res.status_code} on url: {url} with data: {json}" + f"Error {task_name}: status: {res.status_code} on url: {url} with data: {json} and text:\n{res.text[:5000]}\n" ) return res.json() diff --git a/e2e-tests/fix_models.sh b/e2e-tests/fix_models.sh index a6d1bda38..f6ade8364 100755 --- a/e2e-tests/fix_models.sh +++ b/e2e-tests/fix_models.sh @@ -1,10 +1,9 @@ #!/bin/bash - -docker-compose run --rm zaak-gateway python manage.py shell -c " +docker-compose -f docker-compose.local.yml run --rm zaak-gateway python manage.py shell -c " from django.contrib.auth import get_user_model get_user_model().objects.get_or_create(email='local.user@dev.com', first_name='local', last_name='user')" -docker-compose run --rm zaak-gateway python manage.py shell -c " +docker-compose -f docker-compose.local.yml run --rm zaak-gateway python manage.py shell -c " from apps.users.models import User, UserGroup from django.contrib.auth.models import Permission (group, _) = UserGroup.objects.get_or_create(name='PROJECTHANDHAVER', display_name='Projecthandhaver') @@ -15,7 +14,7 @@ group.permissions.add(Permission.objects.get(name='Can read gevoelige dossiers') user = User.objects.get(email='local.user@dev.com') user.groups.add(group)" -docker-compose run --rm zaak-gateway python manage.py shell -c " +docker-compose -f docker-compose.local.yml run --rm zaak-gateway python manage.py shell -c " from django_celery_beat.models import PeriodicTask, IntervalSchedule schedule, created = IntervalSchedule.objects.get_or_create(every=10, period=IntervalSchedule.SECONDS) PeriodicTask.objects.get_or_create(interval=schedule, name='Update workflows', task='apps.workflow.tasks.task_update_workflows')" diff --git a/e2e-tests/setup_or_reset_and_start.sh b/e2e-tests/setup_or_reset_and_start.sh index f9599fe73..c4c5f08b6 100755 --- a/e2e-tests/setup_or_reset_and_start.sh +++ b/e2e-tests/setup_or_reset_and_start.sh @@ -4,7 +4,7 @@ docker-compose down docker volume rm $(docker volume ls -q) -docker-compose -f ../docker-compose.test.yml build +docker-compose -f ../docker-compose.local.yml build docker-compose run --rm zaak-gateway python manage.py migrate @@ -12,4 +12,4 @@ bash ../bin/setup_credentials.sh ./fix_models.sh -docker-compose -f ../docker-compose.test.yml up +docker-compose -f ../docker-compose.local.yml up diff --git a/e2e-tests/unittest.cfg b/e2e-tests/unittest.cfg index 7164b3257..d8425d66c 100644 --- a/e2e-tests/unittest.cfg +++ b/e2e-tests/unittest.cfg @@ -3,4 +3,4 @@ plugins = nose2.plugins.mp [multiprocess] always-on = True -processes = 12 +processes = 6