diff --git a/server/.env.example b/server/.env.example index 778fac167..f39d40ff2 100644 --- a/server/.env.example +++ b/server/.env.example @@ -1,32 +1,27 @@ -############################### API SETTINGS ############################# - -# Server -HOST=0.0.0.0 -PORT=5000 -DEBUG=0 -ACCESS_LOG=1 -AUTO_RELOAD=1 -WORKERS=1 -TMP_DIR=./__tmp__ -UPDATE_ON_START=0 - -# Database -DATABASE_URL=postgresql://311_user:311_pass@db:5432/311_db -DATABASE_LOG_QUERIES=0 - -# Redis -REDIS_ENABLED=1 -REDIS_URL=redis://redis:6379 -REDIS_TTL_SECONDS=3600 - -# Picklebase -PICKLEBASE_ENABLED=0 -PICKLEBASE_BATCH_SIZE=400000 - -# Picklecache -PICKLECACHE_ENABLED=0 -PICKLECACHE_TTL_SECONDS=3600 + +######################### DOCKER-COMPOSE SETTINGS ######################## + +COMPOSE_PROJECT_NAME=311_data + +# host/public configs +API_HOST_PORT=5000 +DB_HOST_PORT=5433 + +# api config +DEBUG=True +API_RESTART_POLICY=no +APP_PORT=5000 + +# db connection config +DB_HOST=db +DB_PORT=5432 +DB_USER=311_user +DB_PASS=311_pass +DB_NAME=311_db +DB_ECHO=False + +############################ 3RD PARTY TOOLS ############################## # Socrata SOCRATA_TOKEN= @@ -45,14 +40,4 @@ SLACK_ERROR_CODES=[400, 500] # Sendgrid SENDGRID_API_KEY= -######################### DOCKER-COMPOSE SETTINGS ######################## - -COMPOSE_PROJECT_NAME=311_data -DB_USER=311_user -DB_PASS=311_pass -DB_NAME=311_db -DB_HOST_PORT=5433 -API_HOST_PORT=5000 -API_RESTART_POLICY=no - -################################# OVERRIDES ############################## +############################################################################ diff --git a/server/api/.dockerignore b/server/api/.dockerignore new file mode 100644 index 000000000..8f9d89f87 --- /dev/null +++ b/server/api/.dockerignore @@ -0,0 +1,5 @@ +# ignore .git and .cache folders +Pipfile +Pipfile.lock +__pycache__ +__tmp__ diff --git a/server/api/Dockerfile b/server/api/Dockerfile index 8718ed7f7..0e19bb076 100644 --- a/server/api/Dockerfile +++ b/server/api/Dockerfile @@ -1,23 +1,17 @@ -FROM python:3.7-slim +FROM python:3.7 -RUN apt-get update && \ - apt-get install -yq \ - python3 \ - python3-dev \ - gcc \ - g++ \ - gfortran \ - musl-dev && \ - pip install --upgrade pip +WORKDIR /home/api -COPY requirements.txt /home/api/ +COPY requirements.txt . RUN pip install --no-cache-dir -r /home/api/requirements.txt -COPY setup.cfg /home/api/ -COPY /bin /home/api/bin/ -COPY .env* /src /home/api/src/ +COPY . . -WORKDIR /home/api +ENV APP_HOST=0.0.0.0 +# need add the src folder to sys.path for compatibility +ENV PYTHONPATH=/home/api/src + +EXPOSE 5000 -CMD python bin/api_check.py && python bin/api_start.py +CMD python code/run.py diff --git a/server/api/Pipfile b/server/api/Pipfile index db8545fc4..f35734683 100644 --- a/server/api/Pipfile +++ b/server/api/Pipfile @@ -24,6 +24,10 @@ sendgrid = "*" sodapy = "*" SQLAlchemy = "*" tabulate = "*" +gino = {extras = ["starlette"], version = "*"} +fastapi = "*" +uvicorn = "*" +gunicorn = "*" [requires] python_version = "3.7" diff --git a/server/api/Pipfile.lock b/server/api/Pipfile.lock index e62ac529a..12a7dc482 100644 --- a/server/api/Pipfile.lock +++ b/server/api/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "9c31d5fffafd7b7faebd5a67959ccc2953e1c4d86e9a1f7ed7f63160cf14963b" + "sha256": "6cf778b290d427ebf1790460fe80f042f45f68644247d5b28c4ad1654bc11f5c" }, "pipfile-spec": 6, "requires": { @@ -23,6 +23,37 @@ ], "version": "==0.5.0" }, + "asyncpg": { + "hashes": [ + "sha256:09badce47a4645cfe523cc8a182bd047d5d62af0caaea77935e6a3c9e77dc364", + "sha256:22d161618b59e4b56fb2a5cc956aa9eeb336d07cae924a5b90c9aa1c2d137f15", + "sha256:28584783dd0d21b2a0db3bfe54fb12f21425a4cc015e4419083ea99e6de0de9b", + "sha256:308b8ba32c42ea1ed84c034320678ec307296bb4faf3fbbeb9f9e20b46db99a5", + "sha256:3ade59cef35bffae6dbc6f5f3ef56e1d53c67f0a7adc3cc4c714f07568d2d717", + "sha256:4421407b07b4e22291a226d9de0bf6f3ea8158aa1c12d83bfedbf5c22e13cd55", + "sha256:53cb2a0eb326f61e34ef4da2db01d87ce9c0ebe396f65a295829df334e31863f", + "sha256:615c7e3adb46e1f2e3aff45e4ee9401b4f24f9f7153e5530a0753369be72a5c6", + "sha256:68f7981f65317a5d5f497ec76919b488dbe0e838f8b924e7517a680bdca0f308", + "sha256:6b7807bfedd24dd15cfb2c17c60977ce01410615ecc285268b5144a944ec97ff", + "sha256:7e51d1a012b779e0ebf0195f80d004f65d3c60cc06f0fa1cef9d3e536262abbd", + "sha256:7ee29c4707eb8fb3d3a0348ac4495e06f4afaca3ee38c3bebedc9c8b239125ff", + "sha256:823eca36108bd64a8600efe7bbf1230aa00f2defa3be42852f3b61ab40cf1226", + "sha256:8587e206d78e739ca83a40c9982e03b28f8904c95a54dc782da99e86cf768f73", + "sha256:888593b6688faa7ec1c97ff7f2ca3b5a5b8abb15478fe2a13c5012b607a28737", + "sha256:915cebc8a7693c8a5e89804fa106678dbedcc50d0270ebab0b75f16e668bd59b", + "sha256:a4c1feb285ec3807ecd5b54ab718a3d065bb55c93ebaf800670eadde31484be8", + "sha256:aa2e0cb14c01a2f58caeeca7196681b30aa22dd22c82845560b401df5e98e171", + "sha256:b1b10916c006e5c2c0dcd5dadeb38cbf61ecd20d66c50164e82f31c22c7e329d", + "sha256:dddf4d4c5e781310a36529c3c87c1746837c2d2c7ec0f2ec4e4f06450d83c50a", + "sha256:dfd491e9865e64a3e91f1587b1d88d71dde1cfb850429253a73d4d44b98c3a0f", + "sha256:e7bfb9269aeb11d78d50accf1be46823683ced99209b7199e307cdf7da849522", + "sha256:ea26604932719b3612541e606508d9d604211f56a65806ccf8c92c64104f4f8a", + "sha256:ecd5232cf64f58caac3b85103f1223fdf20e9eb43bfa053c56ef9e5dd76ab099", + "sha256:f2d1aa890ffd1ad062a38b7ff7488764b3da4b0a24e0c83d7bbb1d1a6609df15" + ], + "markers": "python_full_version >= '3.5.0'", + "version": "==0.21.0" + }, "certifi": { "hashes": [ "sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3", @@ -37,6 +68,14 @@ ], "version": "==3.0.4" }, + "click": { + "hashes": [ + "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", + "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==7.1.2" + }, "contextvars": { "hashes": [ "sha256:f38c908aaa59c14335eeea12abea5f443646216c4e29380d7bf34d2018e2c39e" @@ -44,6 +83,48 @@ "markers": "python_version < '3.7'", "version": "==2.4" }, + "dataclasses": { + "hashes": [ + "sha256:3459118f7ede7c8bea0fe795bff7c6c2ce287d01dd226202f7c9ebc0610a7836", + "sha256:494a6dcae3b8bcf80848eea2ef64c0cc5cd307ffc263e17cdf42f3e5420808e6" + ], + "markers": "python_version < '3.7'", + "version": "==0.7" + }, + "fastapi": { + "hashes": [ + "sha256:29c12dd0d4ac825d13c2db4762d2863281c18085ae521825829182e977fd25ac", + "sha256:d0b3f629f8d165a21ee082bf31e1697c391c1cdf940304408614b5b7c59d1fb3" + ], + "index": "pypi", + "version": "==0.61.0" + }, + "gino": { + "extras": [ + "starlette" + ], + "hashes": [ + "sha256:56df57cfdefbaf897a7c4897c265a0e91a8cca80716fb64f7d3cf6d501fdfb3d", + "sha256:fe4189e82fe9d20c4a5f03fc775fb91c168061c5176b4c95623caeef22316150" + ], + "index": "pypi", + "version": "==1.0.1" + }, + "gino-starlette": { + "hashes": [ + "sha256:a1afe419b34146449a502a5483085a60a75a46639534fff50510172b47c930fb", + "sha256:de6ec87168097a52668359c842e9e3be4d339423c7805c615377975a1a19cb6c" + ], + "version": "==0.1.1" + }, + "gunicorn": { + "hashes": [ + "sha256:1904bb2b8a43658807108d59c3f3d56c2b6121a701161de0ddf9ad140073c626", + "sha256:cd4a810dd51bf497552cf3f863b575dabd73d6ad6a91075b65936b151cbf4f9c" + ], + "index": "pypi", + "version": "==20.0.4" + }, "h11": { "hashes": [ "sha256:acca6a44cb52a32ab442b1779adf0875c443c689e9e028f8d831a3769f9c5208", @@ -67,11 +148,11 @@ }, "hstspreload": { "hashes": [ - "sha256:13cf2e9fcd064cd81d220432de9a66dd7e4f10862a03574c45e5f61fc522f312", - "sha256:5e3b6b2376c6f412086ee21cdd29cd5e0af5b28c967e5f1f026323d0f31dc84b" + "sha256:3129613419c13ea62411ec7375d79840e28004cbb6a585909ddcbeee401bea14", + "sha256:c96401eca4669340b423abd711d2d5d03ddf0685461f95e9cfe500d5e9acf3d2" ], "markers": "python_version >= '3.6'", - "version": "==2020.8.18" + "version": "==2020.8.25" }, "http3": { "hashes": [ @@ -95,6 +176,7 @@ "sha256:fa3cd71e31436911a44620473e873a256851e1f53dee56669dae403ba41756a4", "sha256:fea04e126014169384dee76a153d4573d90d0cbd1d12185da089f73c78390437" ], + "markers": "sys_platform != 'win32' and sys_platform != 'cygwin' and platform_python_implementation != 'PyPy'", "version": "==0.1.1" }, "httpx": { @@ -138,6 +220,14 @@ "markers": "python_version >= '3.5'", "version": "==0.14" }, + "importlib-metadata": { + "hashes": [ + "sha256:90bb658cdbbf6d1735b6341ce708fc7024a3e14e99ffdc5783edea9f9b077f83", + "sha256:dc15b2969b4ce36305c51eebe62d418ac7791e9a157911d58bfb1f9ccd8e2070" + ], + "markers": "python_version < '3.8'", + "version": "==1.7.0" + }, "multidict": { "hashes": [ "sha256:1ece5a3369835c20ed57adadc663400b5525904e53bae59ec854a5d36b39b21a", @@ -251,6 +341,29 @@ "index": "pypi", "version": "==2.8.5" }, + "pydantic": { + "hashes": [ + "sha256:1783c1d927f9e1366e0e0609ae324039b2479a1a282a98ed6a6836c9ed02002c", + "sha256:2dc946b07cf24bee4737ced0ae77e2ea6bc97489ba5a035b603bd1b40ad81f7e", + "sha256:2de562a456c4ecdc80cf1a8c3e70c666625f7d02d89a6174ecf63754c734592e", + "sha256:36dbf6f1be212ab37b5fda07667461a9219c956181aa5570a00edfb0acdfe4a1", + "sha256:3fa799f3cfff3e5f536cbd389368fc96a44bb30308f258c94ee76b73bd60531d", + "sha256:40d765fa2d31d5be8e29c1794657ad46f5ee583a565c83cea56630d3ae5878b9", + "sha256:418b84654b60e44c0cdd5384294b0e4bc1ebf42d6e873819424f3b78b8690614", + "sha256:4900b8820b687c9a3ed753684337979574df20e6ebe4227381d04b3c3c628f99", + "sha256:530d7222a2786a97bc59ee0e0ebbe23728f82974b1f1ad9a11cd966143410633", + "sha256:54122a8ed6b75fe1dd80797f8251ad2063ea348a03b77218d73ea9fe19bd4e73", + "sha256:6c3f162ba175678218629f446a947e3356415b6b09122dcb364e58c442c645a7", + "sha256:b49c86aecde15cde33835d5d6360e55f5e0067bb7143a8303bf03b872935c75b", + "sha256:b5b3489cb303d0f41ad4a7390cf606a5f2c7a94dcba20c051cd1c653694cb14d", + "sha256:cf3933c98cb5e808b62fae509f74f209730b180b1e3c3954ee3f7949e083a7df", + "sha256:eb75dc1809875d5738df14b6566ccf9fd9c0bcde4f36b72870f318f16b9f5c20", + "sha256:f769141ab0abfadf3305d4fcf36660e5cf568a666dd3efab7c3d4782f70946b1", + "sha256:f8af9b840a9074e08c0e6dc93101de84ba95df89b267bf7151d74c553d66833b" + ], + "markers": "python_version >= '3.6'", + "version": "==1.6.1" + }, "pysupercluster": { "hashes": [ "sha256:1e9739e9dad6126f346b5816dd6d9affc31d8482ef89fd30ff87f8412530726a" @@ -386,10 +499,13 @@ "sha256:072766c3bd09294d716b2d114d46ffc5ccf8ea0b714a4e1c48253014b771c6bb", "sha256:107d4af989831d7b091e382d192955679ec07a9209996bf8090f1f539ffc5804", "sha256:15c0bcd3c14f4086701c33a9e87e2c7ceb3bcb4a246cd88ec54a49cf2a5bd1a6", + "sha256:26c5ca9d09f0e21b8671a32f7d83caad5be1f6ff45eef5ec2f6fd0db85fc5dc0", "sha256:276936d41111a501cf4a1a0543e25449108d87e9f8c94714f7660eaea89ae5fe", "sha256:3292a28344922415f939ee7f4fc0c186f3d5a0bf02192ceabd4f1129d71b08de", "sha256:33d29ae8f1dc7c75b191bb6833f55a19c932514b9b5ce8c3ab9bc3047da5db36", "sha256:3bba2e9fbedb0511769780fe1d63007081008c5c2d7d715e91858c94dbaa260e", + "sha256:465c999ef30b1c7525f81330184121521418a67189053bcf585824d833c05b66", + "sha256:51064ee7938526bab92acd049d41a1dc797422256086b39c08bafeffb9d304c6", "sha256:5a49e8473b1ab1228302ed27365ea0fadd4bf44bc0f9e73fe38e10fdd3d6b4fc", "sha256:618db68745682f64cedc96ca93707805d1f3a031747b5a0d8e150cfd5055ae4d", "sha256:6547b27698b5b3bbfc5210233bd9523de849b2bb8a0329cd754c9308fc8a05ce", @@ -404,6 +520,7 @@ "sha256:9e865835e36dfbb1873b65e722ea627c096c11b05f796831e3a9b542926e979e", "sha256:aa0554495fe06172b550098909be8db79b5accdf6ffb59611900bea345df5eba", "sha256:b595e71c51657f9ee3235db8b53d0b57c09eee74dfb5b77edff0e46d2218dc02", + "sha256:b6ff91356354b7ff3bd208adcf875056d3d886ed7cef90c571aef2ab8a554b12", "sha256:b70bad2f1a5bd3460746c3fb3ab69e4e0eb5f59d977a23f9b66e5bdc74d97b86", "sha256:c7adb1f69a80573698c2def5ead584138ca00fff4ad9785a4b0b2bf927ba308d", "sha256:c898b3ebcc9eae7b36bd0b4bbbafce2d8076680f6868bcbacee2d39a7a9726a7", @@ -421,6 +538,14 @@ ], "version": "==1.0.0" }, + "starlette": { + "hashes": [ + "sha256:bd2ffe5e37fb75d014728511f8e68ebf2c80b0fa3d04ca1479f4dc752ae31ac9", + "sha256:ebe8ee08d9be96a3c9f31b2cb2a24dbdf845247b745664bd8a3f9bd0c977fdbc" + ], + "markers": "python_version >= '3.6'", + "version": "==0.13.6" + }, "tabulate": { "hashes": [ "sha256:ac64cb76d53b1231d364babcd72abbb16855adac7de6665122f97b593f1eb2ba", @@ -460,6 +585,14 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", "version": "==1.25.10" }, + "uvicorn": { + "hashes": [ + "sha256:46a83e371f37ea7ff29577d00015f02c942410288fb57def6440f2653fff1d26", + "sha256:4b70ddb4c1946e39db9f3082d53e323dfd50634b95fd83625d778729ef1730ef" + ], + "index": "pypi", + "version": "==0.11.8" + }, "uvloop": { "hashes": [ "sha256:08b109f0213af392150e2fe6f81d33261bb5ce968a288eb698aad4f46eb711bd", @@ -472,7 +605,7 @@ "sha256:e7514d7a48c063226b7d06617cbb12a14278d4323a065a8d46a7962686ce2e95", "sha256:f07909cd9fc08c52d294b1570bba92186181ca01fe3dc9ffba68955273dd7362" ], - "markers": "sys_platform != 'win32' and implementation_name == 'cpython'", + "markers": "sys_platform != 'win32' and implementation_name == 'cpython' and sys_platform != 'win32' and sys_platform != 'cygwin' and platform_python_implementation != 'PyPy'", "version": "==0.14.0" }, "websockets": { @@ -502,6 +635,14 @@ ], "markers": "python_full_version >= '3.6.1'", "version": "==8.1" + }, + "zipp": { + "hashes": [ + "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b", + "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96" + ], + "markers": "python_version >= '3.6'", + "version": "==3.1.0" } }, "develop": { diff --git a/server/api/README.md b/server/api/README.md new file mode 100644 index 000000000..e84c88249 --- /dev/null +++ b/server/api/README.md @@ -0,0 +1,113 @@ +# 311 API + +## Changes + +* API now uses FastAPI (uvicorn/ASGI) +* Data access uses Gino, asyncpg +* New code is in the code directory +* Entry point is code/run.py +* Legacy code remains in src directory +* Current API compatibility: code/lacity_data_api/routers/legacy.py + +## Running using Docker Compose + +Setting for Docker Compose are in the .env file in the server directory. +The DB_HOST setting should work if it is set to: host.docker.internal + +To start the DB and API from the server directory run: + +```bash +docker-compose up +``` + +To try the API: + +* Ensure Test API is running with welcome message at http://localhost:5000/ +* Test API using Open API (Swagger) at http://localhost:5000/docs +* Run the ReactJS app ```npm start``` from /client to make sure frontend works + +## Testing + +### Full filter (with zoom and bounds) + +```json +{ + "startDate":"01/01/2020", + "endDate":"08/27/2020", + "ncList":[ + 52, + 46, + 128, + 54, + 104, + 76, + 97, + 121, + 55 + ], + "requestTypes":[ + "Dead Animal Removal", + "Homeless Encampment", + "Single Streetlight Issue", + "Multiple Streetlight Issue", + "Feedback" + ], + "zoom":13, + "bounds":{ + "north":34.0731374116421, + "east":-118.18010330200195, + "south":33.97582290387967, + "west":-118.41201782226564 + } +} +``` + +## TODOs + +* add more pytests +* finish routes +* alembic migrations +* data seeds +* Add telemetry: +OpenTelemetry instrumentors exist for FastAPI, asyncpg, SQLAlchemy +https://opentelemetry-python.readthedocs.io/ +https://opentelemetry.lightstep.com/ + +## Odd and Ends + +To time API calls using curl: add ```--write-out '%{time_total}\n' --output /dev/null --silent``` + +For example: + +```bash +curl -X POST "http://localhost:5000/map/pins" -H "accept: application/json" -H "Content-Type: application/json" -d "{\"startDate\":\"01/01/2020\",\"endDate\":\"08/27/2020\",\"ncList\":[52,46,128,54,104,76,97,121,55],\"requestTypes\":[\"Homeless Encampment\"]}" --write-out '%{time_total}\n' --output /dev/null --silent +``` + +Here's how to go about killing any process orphaned by VS Code using port 5000 + +```bash +lsof -ti tcp:5000 | xargs kill +``` + +### Format for comparison reports + +{ + "startDate":"01/01/2020", + "endDate":"08/27/2020", + "requestTypes":[ + "Bulky Items" + ], + "chart":"frequency", + "set1":{ + "district":"nc", + "list":[ + 6 + ] + }, + "set2":{ + "district":"nc", + "list":[ + 9 + ] + } +} diff --git a/server/api/api.config b/server/api/api.config new file mode 100644 index 000000000..728b7406d --- /dev/null +++ b/server/api/api.config @@ -0,0 +1,35 @@ + +# Application Settings: sets picklecache enable and file location +DEBUG=True +UPDATE_ON_START=0 +USE_FILE_CACHE=True +TEMP_FOLDER=./__tmp__ +API_LEGACY_MODE=True + +# Database Settings: host.docker.internal or localhost +DB_HOST=localhost +DB_PORT=5433 +DB_PASSWORD=311_pass +DB_ECHO=True + +# Redis cache +REDIS_ENABLED=0 +REDIS_URL=redis://redis:6379 +REDIS_TTL_SECONDS=3600 + +# Socrata data source +SOCRATA_TOKEN=6b5lwk1jHSQTgx7PAVFKOOdt2 +SOCRATA_BATCH_SIZE=50000 + +# Github code source +GITHUB_TOKEN= +GITHUB_ISSUES_URL=https://api.github.com/repos/hackforla/311-data-support/issues +GITHUB_PROJECT_URL= +GITHUB_SHA=DEVELOPMENT + +# Slack error reporting +SLACK_WEBHOOK_URL= +SLACK_ERROR_CODES=[400, 500] + +# Sendgrid email +SENDGRID_API_KEY= diff --git a/server/api/code/lacity_data_api/__init__.py b/server/api/code/lacity_data_api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/server/api/code/lacity_data_api/asgi.py b/server/api/code/lacity_data_api/asgi.py new file mode 100644 index 000000000..d842254bd --- /dev/null +++ b/server/api/code/lacity_data_api/asgi.py @@ -0,0 +1,3 @@ +from .main import get_app + +app = get_app() diff --git a/server/api/code/lacity_data_api/config.py b/server/api/code/lacity_data_api/config.py new file mode 100644 index 000000000..72de95bb1 --- /dev/null +++ b/server/api/code/lacity_data_api/config.py @@ -0,0 +1,66 @@ +import os +from sqlalchemy.engine.url import URL, make_url +from starlette.config import Config, environ +from starlette.datastructures import Secret + +config = Config("api.config") + +# checking for testing or debug +DEBUG = config("DEBUG", cast=bool, default=False) +TESTING = config("TESTING", cast=bool, default=False) + +# getting database configuration +DB_DRIVER = config("DB_DRIVER", default="postgresql") +DB_HOST = config("DB_HOST", default="localhost") +DB_PORT = config("DB_PORT", cast=int, default=5432) +DB_USER = config("DB_USER", default="311_user") +DB_PASSWORD = config("DB_PASSWORD", cast=Secret, default=None) +DB_DATABASE = config("DB_DATABASE", default="311_db") + +if TESTING: + if DB_DATABASE: + DB_DATABASE += "_test" + +DB_DSN = config( + "DB_DSN", + cast=make_url, + default=URL( + drivername=DB_DRIVER, + username=DB_USER, + password=DB_PASSWORD, + host=DB_HOST, + port=DB_PORT, + database=DB_DATABASE, + ), +) + +DB_POOL_MIN_SIZE = config("DB_POOL_MIN_SIZE", cast=int, default=1) +DB_POOL_MAX_SIZE = config("DB_POOL_MAX_SIZE", cast=int, default=16) +DB_ECHO = config("DB_ECHO", cast=bool, default=True) +DB_SSL = config("DB_SSL", default=None) +DB_USE_CONNECTION_FOR_REQUEST = config( + "DB_USE_CONNECTION_FOR_REQUEST", cast=bool, default=True +) +DB_RETRY_LIMIT = config("DB_RETRY_LIMIT", cast=int, default=32) +DB_RETRY_INTERVAL = config("DB_RETRY_INTERVAL", cast=int, default=1) + +# check whether running in legacy mode +API_LEGACY_MODE = config('API_LEGACY_MODE', cast=bool, default=True) + +# TODO: figure out how to remove dependency on DATABASE_URL from services +# the legacy code needs these created as environment settings +if API_LEGACY_MODE: + environ['DATABASE_URL'] = str(DB_DSN) + environ['TMP_DIR'] = config('TEMP_FOLDER') + environ['PICKLECACHE_ENABLED'] = config('USE_FILE_CACHE') + +# print out debug information +if DEBUG: + print("\n\033[93mLA City Data API server starting with DEBUG mode ENABLED\033[0m") + print("\nEnvironment variables after executing config.py file:") + for k, v in sorted(os.environ.items()): + print(f'\033[92m{k}\033[0m: {v}') + print(f"\n\033[93mDatabase\033[0m: {DB_DSN}\n") + +# create empty cache object to populate at runtime +cache = {} diff --git a/server/api/code/lacity_data_api/main.py b/server/api/code/lacity_data_api/main.py new file mode 100644 index 000000000..a74c12a85 --- /dev/null +++ b/server/api/code/lacity_data_api/main.py @@ -0,0 +1,43 @@ +import logging + +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from fastapi.middleware.gzip import GZipMiddleware + +from .config import API_LEGACY_MODE # important to load config first +from .models import db +from .routers import ( + index, legacy, councils, regions, request_types, service_requests, shim +) + + +logger = logging.getLogger(__name__) + + +def get_app(): + app = FastAPI(title="LA City 311 Data API") + + db.init_app(app) + app.include_router(index.router) + + if API_LEGACY_MODE: + app.include_router(legacy.router) + else: + app.include_router(shim.router) + + app.include_router(councils.router, prefix="/councils") + app.include_router(regions.router, prefix="/regions") + app.include_router(request_types.router, prefix="/types") + app.include_router(service_requests.router, prefix="/requests") + + app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) + + app.add_middleware(GZipMiddleware) + + return app diff --git a/server/api/code/lacity_data_api/models/__init__.py b/server/api/code/lacity_data_api/models/__init__.py new file mode 100644 index 000000000..a8638934f --- /dev/null +++ b/server/api/code/lacity_data_api/models/__init__.py @@ -0,0 +1,14 @@ +from gino.ext.starlette import Gino + +from .. import config + +db = Gino( + dsn=config.DB_DSN, + pool_min_size=config.DB_POOL_MIN_SIZE, + pool_max_size=config.DB_POOL_MAX_SIZE, + echo=config.DB_ECHO, + ssl=config.DB_SSL, + use_connection_for_request=config.DB_USE_CONNECTION_FOR_REQUEST, + retry_limit=config.DB_RETRY_LIMIT, + retry_interval=config.DB_RETRY_INTERVAL, +) diff --git a/server/api/code/lacity_data_api/models/clusters.py b/server/api/code/lacity_data_api/models/clusters.py new file mode 100644 index 000000000..8630485fe --- /dev/null +++ b/server/api/code/lacity_data_api/models/clusters.py @@ -0,0 +1,305 @@ +from typing import List +import datetime + +import numpy +import pysupercluster +from sqlalchemy import and_ + +from .service_request import ServiceRequest +from .region import Region +from . import db +from ..config import cache + + +DEFAULT_CITY_ZOOM = 11 # a click on a city point zooms from 10 to 12 +DEFAULT_REGION_ZOOM = 12 # a click on a city point zooms from 10 to 12 +DEFAULT_COUNCIL_ZOOM = 13 # a click on a council point zooms to 14 +DEFAULT_LATITUDE = 34.0522 +DEFAULT_LONGITUDE = -118.2437 + + +class Cluster: + def __init__(self, + count: int, + expansion_zoom: int, + id: int, + latitude: float, + longitude: float): + self.count = count + self.expansion_zoom = expansion_zoom + self.id = id + self.latitude = latitude + self.longitude = longitude + + +async def get_clusters_for_city( + start_date: datetime.date, + end_date: datetime.date, + type_ids: List[int], + zoom_current: int +) -> List[Cluster]: + """ + Cluster pins for the entire city + + Args: + start_date (date): beginning of date range service was requested + end_date (date): end of date range service was requested + type_ids (List[int]): the request type ids to match on + + Returns: + cluster: a cluster object + """ + + result = await (db.select([db.func.count()]) + .where(and_( + ServiceRequest.created_date >= start_date, + ServiceRequest.created_date <= end_date, + ServiceRequest.type_id.in_(type_ids) + )) + ).gino.scalar() + + # zoom_next = (zoom_current + 1) or DEFAULT_CITY_ZOOM + + cluster_list = [] + cluster_list.append(Cluster( + count=result, + expansion_zoom=DEFAULT_CITY_ZOOM, + id=0, + latitude=DEFAULT_LATITUDE, + longitude=DEFAULT_LONGITUDE + )) + + return cluster_list + + +async def get_clusters_for_regions( + start_date: datetime.date, + end_date: datetime.date, + type_ids: List[int], + council_ids: List[int], + zoom_current: int +) -> List[Cluster]: + """ + Cluster service request pins by council regions + + Args: + start_date (date): beginning of date range service was requested + end_date (date): end of date range service was requested + type_ids (List[int]): the request type ids to match on + council_ids (List[int]): the council ids to match on + + Returns: + cluster: a list of cluster objects + """ + + # TODO: CACHE 'region-reqs:start-end-types-councils' + result = await ( + db.select( + [ + ServiceRequest.region_id, + db.func.count() + ] + ).where( + and_( + ServiceRequest.created_date >= start_date, + ServiceRequest.created_date <= end_date, + ServiceRequest.type_id.in_(type_ids), + ServiceRequest.council_id.in_(council_ids), + ) + ).group_by( + ServiceRequest.region_id + ).gino.all() + ) + + cluster_list = [] + + for row in result: + region = await Region.get(row[0]) + cluster_list.append(Cluster( + count=row[1], + expansion_zoom=DEFAULT_REGION_ZOOM, + id=region.region_id, + latitude=region.latitude, + longitude=region.longitude + )) + + return cluster_list + + +async def get_clusters_for_councils( + start_date: datetime.date, + end_date: datetime.date, + type_ids: List[int], + council_ids: List[int], + zoom_current: int +) -> List[Cluster]: + """ + Cluster service request pins by council + + Args: + start_date (date): beginning of date range service was requested + end_date (date): end of date range service was requested + type_ids (List[int]): the request type ids to match on + council_ids (List[int]): the council ids to match on + + Returns: + cluster: a list of cluster objects + """ + + # TODO: CACHE 'council-reqs:start-end-types-councils' + result = await ( + db.select( + [ + ServiceRequest.council_id, + db.func.count() + ] + ).where( + and_( + ServiceRequest.created_date >= start_date, + ServiceRequest.created_date <= end_date, + ServiceRequest.type_id.in_(type_ids), + ServiceRequest.council_id.in_(council_ids), + ) + ).group_by( + ServiceRequest.council_id + ).gino.all() + ) + + # zoom_next = (zoom_current + 1) or DEFAULT_COUNCIL_ZOOM + cluster_list = [] + + # TODO: replace this with a caching solution + # returns dictionary with council id as key and name, lat, long + # council_result = await db.all(Council.query) + # councils = [ + # (i.council_id, [i.council_name, i.latitude, i.longitude]) + # for i in council_result + # ] + councils_dict = cache.get("councils_dict") + + for row in result: + council = councils_dict.get(row[0]) + cluster_list.append(Cluster( + count=row[1], + expansion_zoom=DEFAULT_COUNCIL_ZOOM, + id=row[0], + latitude=council[1], + longitude=council[2] + )) + + return cluster_list + + +async def get_points( + start_date: datetime.date, + end_date: datetime.date, + type_ids: List[int], + council_ids: List[int] +) -> List[List[int]]: + """ + Get filtered geospacial points for service requests for the entire city + + Args: + start_date (date): beginning of date range service was requested + end_date (date): end of date range service was requested + type_ids (List[int]): the request type ids to match + council_ids: (List[int]): the council ids to match + + Returns: + a list of latitude and logitude pairs of service request locations + """ + + result = await ( + db.select( + [ + ServiceRequest.latitude, + ServiceRequest.longitude + ] + ).where( + and_( + ServiceRequest.created_date >= start_date, + ServiceRequest.created_date <= end_date, + ServiceRequest.type_id.in_(type_ids), + ServiceRequest.council_id.in_(council_ids) + ) + ).gino.all() + ) + + point_list = [] + for row in result: + point_list.append([row.latitude, row.longitude]) + + return point_list + + +async def get_clusters_for_bounds( + start_date: datetime.date, + end_date: datetime.date, + type_ids: List[int], + council_ids: List[int], + zoom_current: int, + bounds +) -> List[Cluster]: + """ + Cluster pins for the entire city + + Args: + start_date (date): beginning of date range service was requested + end_date (date): end of date range service was requested + type_ids (List[int]): the request type ids to match on + + Returns: + a JSON object either representing a cluster or a pin for a request + """ + + result = await ( + db.select( + [ + ServiceRequest.request_id, + ServiceRequest.latitude, + ServiceRequest.longitude, + ServiceRequest.type_id + ] + ).where( + and_( + ServiceRequest.created_date >= start_date, + ServiceRequest.created_date <= end_date, + ServiceRequest.type_id.in_(type_ids), + ServiceRequest.council_id.in_(council_ids), + ServiceRequest.latitude < bounds.north, + ServiceRequest.latitude > bounds.south, + ServiceRequest.longitude > bounds.west, + ServiceRequest.longitude < bounds.east + ) + ).gino.all() + ) + + # TODO: clean this up. goes in [longitude, latitude] format + points = [[row.longitude, row.latitude] for row in result] + + index = pysupercluster.SuperCluster( + numpy.array(points), + min_zoom=0, + max_zoom=17, + radius=200, + extent=512 + ) + + cluster_list = index.getClusters( + top_left=(bounds.west, bounds.north), + bottom_right=(bounds.east, bounds.south), + zoom=zoom_current + ) + + # TODO: replace this with a proper caching solution + types_dict = cache.get("types_dict") + + for item in cluster_list: + # change single item clusters into points + if item['count'] == 1: + pin = result[item['id']] # cluster id matches the result row + item['srnumber'] = "1-" + str(pin.request_id) + item['requesttype'] = types_dict[pin.type_id] + del item['expansion_zoom'] + + return cluster_list diff --git a/server/api/code/lacity_data_api/models/council.py b/server/api/code/lacity_data_api/models/council.py new file mode 100644 index 000000000..b43cd8db6 --- /dev/null +++ b/server/api/code/lacity_data_api/models/council.py @@ -0,0 +1,20 @@ +from . import db + + +class Council(db.Model): + __tablename__ = "councils" + + council_id = db.Column(db.SmallInteger, primary_key=True) + council_name = db.Column(db.String) + region_id = db.Column(db.SmallInteger) + latitude = db.Column(db.Float) + longitude = db.Column(db.Float) + + +async def get_councils_dict(): + result = await db.all(Council.query) + councils_dict = [ + (i.council_id, (i.council_name, i.latitude, i.longitude)) + for i in result + ] + return dict(councils_dict) diff --git a/server/api/code/lacity_data_api/models/region.py b/server/api/code/lacity_data_api/models/region.py new file mode 100644 index 000000000..b0af10bfd --- /dev/null +++ b/server/api/code/lacity_data_api/models/region.py @@ -0,0 +1,19 @@ +from . import db + + +class Region(db.Model): + __tablename__ = 'regions' + + region_id = db.Column(db.SmallInteger, primary_key=True) + region_name = db.Column(db.String) + latitude = db.Column(db.Float) + longitude = db.Column(db.Float) + + +async def get_regions_dict(): + result = await db.all(Region.query) + regions_dict = [ + (i.region_id, (i.region_name, i.latitude, i.longitude)) + for i in result + ] + return dict(regions_dict) diff --git a/server/api/code/lacity_data_api/models/request_type.py b/server/api/code/lacity_data_api/models/request_type.py new file mode 100644 index 000000000..808a4022e --- /dev/null +++ b/server/api/code/lacity_data_api/models/request_type.py @@ -0,0 +1,38 @@ +from typing import List +import functools + +from . import db + + +class RequestType(db.Model): + __tablename__ = 'request_types' + + type_id = db.Column(db.SmallInteger, primary_key=True) + type_name = db.Column(db.String) + + +@functools.lru_cache(maxsize=1) +async def get_types_dict(): + result = await db.all(RequestType.query) + types_dict = [(i.type_id, i.type_name) for i in result] + return dict(types_dict) + + +async def get_types_by_str_list(str_list: List[str]) -> List[RequestType]: + '''Get a list of RequestTypes from their type_names''' + result = await db.all( + RequestType.query.where( + RequestType.type_name.in_(str_list) + ) + ) + return result + + +async def get_types_by_int_list(int_list: List[int]) -> List[RequestType]: + '''Get a list of RequestTypes from their type_names''' + result = await db.all( + RequestType.query.where( + RequestType.type_id.in_(int_list) + ) + ) + return result diff --git a/server/api/code/lacity_data_api/models/service_request.py b/server/api/code/lacity_data_api/models/service_request.py new file mode 100644 index 000000000..457000ac3 --- /dev/null +++ b/server/api/code/lacity_data_api/models/service_request.py @@ -0,0 +1,27 @@ +from typing import List + +from . import db + + +class ServiceRequest(db.Model): + __tablename__ = 'service_requests' + + request_id = db.Column(db.Integer, primary_key=True) + created_date = db.Column(db.Date) + closed_date = db.Column(db.Date) + type_id = db.Column(db.SmallInteger) + council_id = db.Column(db.SmallInteger) + region_id = db.Column(db.SmallInteger) + address = db.Column(db.String) + latitude = db.Column(db.Float) + longitude = db.Column(db.Float) + + +async def get_open_requests() -> List[ServiceRequest]: + '''Get a list of RequestTypes from their type_names''' + result = await db.all( + ServiceRequest.query.where( + ServiceRequest.closed_date == None # noqa + ) + ) + return result diff --git a/server/api/code/lacity_data_api/routers/__init__.py b/server/api/code/lacity_data_api/routers/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/server/api/code/lacity_data_api/routers/api_models.py b/server/api/code/lacity_data_api/routers/api_models.py new file mode 100644 index 000000000..d03918d4b --- /dev/null +++ b/server/api/code/lacity_data_api/routers/api_models.py @@ -0,0 +1,86 @@ +from typing import List, Optional +import datetime +from enum import Enum + +from pydantic import BaseModel, validator +from pydantic.dataclasses import dataclass + + +class Bounds(BaseModel): + north: float + south: float + east: float + west: float + + +class Filter(BaseModel): + startDate: str + endDate: str + ncList: List[int] + requestTypes: List[str] + zoom: Optional[int] = None + bounds: Optional[Bounds] = None + + @validator('startDate', 'endDate') + def parse_date(cls, v): + if isinstance(v, str): + try: + v = datetime.datetime.strptime(v, '%m/%d/%Y') + except ValueError: + try: + v = datetime.datetime.strptime(v, '%Y-%m-%d') + except ValueError: + pass + return v + + +class Pin(BaseModel): + srnumber: str + requesttype: str + latitude: float + longitude: float + + +class Cluster(BaseModel): + count: int + expansion_zoom: Optional[int] + id: int + latitude: float + longitude: float + + +@dataclass +class Set: + district: str + list: List[int] + + @validator('district') + def district_is_valid(cls, v): + assert v in ['cc', 'nc'], 'district must be either "nc" or "cc".' + return v + + def __getitem__(cls, item): + return getattr(cls, item) + + +class Comparison(BaseModel): + startDate: str + endDate: str + requestTypes: List[str] + set1: Set + set2: Set + + +class Feedback(BaseModel): + title: str + body: str + + +class StatusTypes(str, Enum): + api = "api" + database = "db" + system = "sys" + + +Pins = List[Pin] +Clusters = List[Cluster] diff --git a/server/api/code/lacity_data_api/routers/councils.py b/server/api/code/lacity_data_api/routers/councils.py new file mode 100644 index 000000000..81ec99122 --- /dev/null +++ b/server/api/code/lacity_data_api/routers/councils.py @@ -0,0 +1,35 @@ +from typing import List + +from fastapi import APIRouter +from pydantic import BaseModel + +from ..models.council import Council +from ..models import db + +router = APIRouter() + + +class CouncilModel(BaseModel): + council_id: int + council_name: str + region_id: int + latitude: float + longitude: float + + class Config: + orm_mode = True + + +Items = List[CouncilModel] + + +@router.get("/", response_model=Items) +async def get_all_councils(): + result = await db.all(Council.query) + return result + + +@router.get("/{id}") +async def get_council(id: int): + result = await Council.get_or_404(id) + return result.to_dict() diff --git a/server/api/code/lacity_data_api/routers/index.py b/server/api/code/lacity_data_api/routers/index.py new file mode 100644 index 000000000..9a5b4dd68 --- /dev/null +++ b/server/api/code/lacity_data_api/routers/index.py @@ -0,0 +1,12 @@ +from fastapi import APIRouter + +from .utilities import build_cache +from ..config import cache + +router = APIRouter() + + +@router.get("/") +async def index(): + await build_cache() + return cache diff --git a/server/api/code/lacity_data_api/routers/legacy.py b/server/api/code/lacity_data_api/routers/legacy.py new file mode 100644 index 000000000..ece34585e --- /dev/null +++ b/server/api/code/lacity_data_api/routers/legacy.py @@ -0,0 +1,124 @@ +from fastapi import responses +from fastapi import APIRouter + +from .api_models import ( + StatusTypes, Filter, Pins, Comparison, Feedback +) +from services import status, map, visualizations, requests, comparison, github, email +from .utilities import build_cache + +router = APIRouter() + +""" +These are router classes that implement the existing API design and +use the legacy services code from src as-is. +""" + + +@router.get("/status/{status_type}", + description="Provides the status of backend systems") +async def status_check(status_type: StatusTypes): + if status_type == StatusTypes.api: + await build_cache() + result = await status.api() + if status_type == StatusTypes.database: + result = await status.database() + if status_type == StatusTypes.system: + result = await status.system() + return result + + +@router.get("/servicerequest/{srnumber}") +async def get_service_request_by_string(srnumber: str): + result = requests.item_query(srnumber) + # TODO: clean this up with 3.8 syntax + # convert createddate and closeddate to epochs for app compatibility + if (result['createddate']): + result['createddate'] = int(result['createddate'].strftime('%s')) + if (result['closeddate']): + result['closeddate'] = int(result['closeddate'].strftime('%s')) + if (result['updateddate']): + result['updateddate'] = int(result['updateddate'].strftime('%s')) + if (result['servicedate']): + result['servicedate'] = int(result['servicedate'].strftime('%s')) + return result + + +@router.post("/open-requests") +async def get_open_requests(): + result = await requests.open_requests() + return result + + +# NOTE: can't apply response filter here since it sometimes returns a single point +@router.post("/map/clusters") +async def get_clusters(filter: Filter): + result = await map.pin_clusters( + filter.startDate, + filter.endDate, + filter.requestTypes, + filter.ncList, + filter.zoom, + dict(filter.bounds) + ) + return result + + +@router.post("/map/heat") +async def get_heatmap(filter: Filter): + result = await map.heatmap( + startDate=filter.startDate, + endDate=filter.endDate, + requestTypes=filter.requestTypes, + ncList=filter.ncList + ) + return responses.JSONResponse(result.tolist()) + + +@router.post("/map/pins", response_model=Pins) +async def get_pins(filter: Filter): + result = await map.pins( + startDate=filter.startDate, + endDate=filter.endDate, + requestTypes=filter.requestTypes, + ncList=filter.ncList + ) + return result + + +@router.post("/visualizations") +async def get_visualizations(filter: Filter): + result = await visualizations.visualizations( + startDate=filter.startDate, + endDate=filter.endDate, + requestTypes=filter.requestTypes, + ncList=filter.ncList + ) + return result + + +@router.post("/comparison/frequency") +async def get_comparison_frequency(comp_filter: Comparison): + result = await comparison.freq_comparison(**dict(comp_filter)) + return result + + +@router.post("/comparison/timetoclose") +async def get_comparison_time_to_close(comp_filter: Comparison): + result = await comparison.ttc_comparison(**dict(comp_filter)) + return result + + +@router.post("/comparison/counts") +async def get_comparison_counts(comp_filter: Comparison): + result = await comparison.counts_comparison(**dict(comp_filter)) + return result + + +@router.post("/feedback") +async def get_feedback(feedback: Feedback): + id, number = await github.create_issue(feedback.title, feedback.body) + await github.add_issue_to_project(id) + await email.respond_to_feedback(feedback.body, number) + + return {'success': True} diff --git a/server/api/code/lacity_data_api/routers/regions.py b/server/api/code/lacity_data_api/routers/regions.py new file mode 100644 index 000000000..0e93e229c --- /dev/null +++ b/server/api/code/lacity_data_api/routers/regions.py @@ -0,0 +1,34 @@ +from typing import List + +from fastapi import APIRouter +from pydantic import BaseModel + +from ..models.region import Region +from ..models import db + +router = APIRouter() + + +class RegionModel(BaseModel): + region_id: int + region_name: str + latitude: float + longitude: float + + class Config: + orm_mode = True + + +Items = List[RegionModel] + + +@router.get("/", response_model=Items) +async def get_all_regions(): + result = await db.all(Region.query) + return result + + +@router.get("/{id}") +async def get_region(id: int): + result = await Region.get_or_404(id) + return result.to_dict() diff --git a/server/api/code/lacity_data_api/routers/request_types.py b/server/api/code/lacity_data_api/routers/request_types.py new file mode 100644 index 000000000..6b2516415 --- /dev/null +++ b/server/api/code/lacity_data_api/routers/request_types.py @@ -0,0 +1,31 @@ +from typing import List + +from fastapi import APIRouter +from pydantic import BaseModel + +from ..models.request_type import RequestType +from ..models import db + +router = APIRouter() + + +class RequestTypeModel(BaseModel): + type_id: int + type_name: str + + class Config: + orm_mode = True + + +Items = List[RequestTypeModel] + + +@router.get("/", response_model=Items) +async def get_all_request_types(): + return await db.all(RequestType.query) + + +@router.get("/{id}") +async def get_request_type(id: int): + request_type = await RequestType.get_or_404(id) + return request_type.to_dict() diff --git a/server/api/code/lacity_data_api/routers/service_requests.py b/server/api/code/lacity_data_api/routers/service_requests.py new file mode 100644 index 000000000..cb51bff6b --- /dev/null +++ b/server/api/code/lacity_data_api/routers/service_requests.py @@ -0,0 +1,111 @@ +from typing import List, Optional +import datetime + +from sqlalchemy import sql +from fastapi import APIRouter +from pydantic import BaseModel + +from ..models.service_request import ServiceRequest +from ..models import db, clusters + +router = APIRouter() + + +class ServiceRequestModel(BaseModel): + request_id: int + council_id: int + type_id: int + created_date: datetime.date + closed_date: Optional[datetime.date] + address: str + latitude: float + longitude: float + + class Config: + orm_mode = True + + +Items = List[ServiceRequestModel] + + +@router.get("/", response_model=Items) +async def get_all_service_requests(skip: int = 0, limit: int = 100): + async with db.transaction(): + cursor = await ServiceRequest.query.gino.iterate() + if skip > 0: + await cursor.forward(skip) # skip 80 rows + result = await cursor.many(limit) # and retrieve next 10 rows + + return result + + +@router.get("/{id}", description=""" + The service request ID is the integer created from the srnumber + when the initial "1-" is removed. + """) +async def get_service_request(id: int): + svcreq = await ServiceRequest.get_or_404(id) + return svcreq.to_dict() + + +class Bounds(BaseModel): + north: float + south: float + east: float + west: float + + +class Filter(BaseModel): + startDate: datetime.date + endDate: datetime.date + ncList: List[int] + requestTypes: List[int] + zoom: Optional[int] = None + bounds: Optional[Bounds] = None + + +class Cluster(BaseModel): + count: int + expansion_zoom: int + id: int + latitude: float + longitude: float + + +Clusters = List[Cluster] + + +class Pin(BaseModel): + request_id: int + type_id: int + latitude: float + longitude: float + + +Pins = List[Pin] + + +@router.post("/pins", response_model=Items) +async def get_service_request_pins(filter: Filter): + result = await ServiceRequest.query.where( + sql.and_( + ServiceRequest.created_date >= filter.startDate, + ServiceRequest.created_date <= filter.endDate, + ServiceRequest.type_id.in_(filter.requestTypes), + ServiceRequest.council_id.in_(filter.ncList) + ) + ).gino.all() + return result + + +# TODO: implement conditional cluster logic based on zoom +@router.post("/clusters", response_model=Clusters) +async def get_service_request_clusters(filter: Filter): + + result = await clusters.get_clusters_for_city( + filter.startDate, + filter.endDate, + filter.requestTypes + ) + + return result diff --git a/server/api/code/lacity_data_api/routers/shim.py b/server/api/code/lacity_data_api/routers/shim.py new file mode 100644 index 000000000..eafc11477 --- /dev/null +++ b/server/api/code/lacity_data_api/routers/shim.py @@ -0,0 +1,223 @@ +import datetime + +from fastapi import APIRouter +from pydantic import BaseModel + +from .api_models import ( + Filter # StatusTypes, Pins, Comparison, Feedback +) +from ..models import ( + clusters, request_type, service_request +) +from .utilities import build_cache + +router = APIRouter() + +""" +These are new backward-compatible router routes. They implement the existing API +methods but get data from the new models using async database queries. +""" + + +class SimpleServiceRequest(BaseModel): + request_id: int + type_id: int + latitude: float + longitude: float + + class Config: + orm_mode = True + + +@router.get("/status/api") +async def shim_get_api_status(): + currentTime = datetime.datetime.now() + last_pulled = datetime.datetime.now() + + await build_cache() + + # SELECT last_pulled FROM metadata + return { + 'currentTime': currentTime, + 'gitSha': "DEVELOPMENT", + 'version': "0.1.1", + 'lastPulled': last_pulled + } + + +# TODO: return format is slightly different than current +@router.get("/servicerequest/{srnumber}", description=""" + The service request ID is the integer created from the srnumber + when the initial "1-" is removed. + """) +async def shim_get_service_request(srnumber: str): + id = int(srnumber[2:]) + result = await service_request.ServiceRequest.get_or_404(id) + return result.to_dict() + + +# TODO: return format is slightly different than current +@router.post("/open-requests") +async def get_open_requests(): + result = await service_request.get_open_requests() + + requests_list = [] + + types_dict = await request_type.get_types_dict() + + for i in result: + requests_list.append({ + 'srnumber': f"1-{i.request_id}", + 'requesttype': types_dict.get(i.type_id), + 'latitude': i.latitude, + 'longitude': i.longitude + }) + + return requests_list + + +@router.post("/map/clusters") +async def get_clusters(filter: Filter): + # convert type names to type ids + request_types = await request_type.get_types_by_str_list(filter.requestTypes) + type_ids = [i.type_id for i in request_types] + + result = await clusters.get_clusters_for_bounds( + filter.startDate, + filter.endDate, + type_ids, + filter.ncList, + filter.zoom, + filter.bounds + ) + + return result + + +# TODO: tries clustering by district and NC first +@router.post("/new/clusters") +async def shim_get_clusters(filter: Filter): + # have to convert the funky date formats + start_date = datetime.datetime.strptime(filter.startDate, '%m/%d/%Y') + end_date = datetime.datetime.strptime(filter.endDate, '%m/%d/%Y') + + # convert type names to type ids + request_types = await request_type.get_types_by_str_list(filter.requestTypes) + type_ids = [i.type_id for i in request_types] + + zoom = filter.zoom or 10 + + if zoom < 12: + # get region clusters + result = await clusters.get_clusters_for_regions( + start_date, + end_date, + type_ids, + filter.ncList, + filter.zoom + ) + elif zoom < 14: + # get council clusters + result = await clusters.get_clusters_for_councils( + start_date, + end_date, + type_ids, + filter.ncList, + filter.zoom + ) + else: + # use pysupercluster to cluster viewing area + result = await clusters.get_clusters_for_bounds( + start_date, + end_date, + type_ids, + filter.ncList, + filter.zoom, + filter.bounds + ) + + return result + + +@router.post("/map/heat") +async def shim_get_heatmap(filter: Filter): + + # convert type names to type ids + request_types = await request_type.get_types_by_str_list(filter.requestTypes) + type_ids = [i.type_id for i in request_types] + + result = await clusters.get_points( + filter.startDate, + filter.endDate, + type_ids, + filter.ncList + ) + return result + + +# TODO: currently a placeholder +@router.post("/visualizations") +async def shim_get_visualizations(filter: Filter): + result_object = { + "frequency": { + "bins": [ + "2020-01-01", + "2020-01-21", + "2020-02-10", + "2020-03-01", + "2020-03-21", + "2020-04-10", + "2020-04-30", + "2020-05-20", + "2020-06-09", + "2020-06-29", + "2020-07-19", + "2020-08-08", + "2020-08-28", + "2020-09-17" + ], + "counts": { + "Dead Animal Removal": [ + 20, + 31, + 16, + 21, + 16, + 22, + 23, + 15, + 17, + 22, + 19, + 25, + 7 + ] + } + }, + "timeToClose": { + "Dead Animal Removal": { + "min": 0.001632, + "q1": 0.043319, + "median": 0.123883, + "q3": 0.693608, + "max": 2.700694, + "whiskerMin": 0.001632, + "whiskerMax": 1.03765, + "count": 254, + "outlierCount": 2 + } + }, + "counts": { + "type": { + "Dead Animal Removal": 254 + }, + "source": { + "Call": 165, + "Driver Self Report": 1, + "Mobile App": 36, + "Self Service": 50, + "Voicemail": 2 + } + } + } + return result_object diff --git a/server/api/code/lacity_data_api/routers/utilities.py b/server/api/code/lacity_data_api/routers/utilities.py new file mode 100644 index 000000000..775fe4c5d --- /dev/null +++ b/server/api/code/lacity_data_api/routers/utilities.py @@ -0,0 +1,12 @@ + +from ..models import request_type, council, region +from ..config import cache + + +async def build_cache(): + if cache.get('types_dict') is None: + cache['types_dict'] = await request_type.get_types_dict() + if cache.get('councils_dict') is None: + cache['councils_dict'] = await council.get_councils_dict() + if cache.get('regions_dict') is None: + cache['regions_dict'] = await region.get_regions_dict() diff --git a/server/api/code/run.py b/server/api/code/run.py new file mode 100644 index 000000000..ddf794423 --- /dev/null +++ b/server/api/code/run.py @@ -0,0 +1,10 @@ +import os + +if __name__ == "__main__": + import uvicorn + + uvicorn.run( + "lacity_data_api.asgi:app", + host=os.getenv("APP_HOST", "127.0.0.1"), + port=int(os.getenv("APP_PORT", "5000")), + ) diff --git a/server/api/requirements.txt b/server/api/requirements.txt index 54286f51d..33bb3d8e1 100644 --- a/server/api/requirements.txt +++ b/server/api/requirements.txt @@ -1,12 +1,24 @@ aiofiles==0.5.0 +alembic==1.4.2 +asyncpg==0.21.0 attrs==20.1.0 certifi==2020.6.20 chardet==3.0.4 +click==7.1.2 +click-plugins==1.1.1 +cligj==0.5.0 +fastapi==0.61.1 +Fiona==1.8.13.post1 flake8==3.8.3 +geopandas==0.8.1 +gino==1.0.1 +gino-sanic==0.1.0 +gino-starlette==0.1.1 +gunicorn==20.0.4 h11==0.8.1 h2==3.2.0 hpack==3.0.0 -hstspreload==2020.8.18 +hstspreload==2020.8.25 http3==0.6.7 httptools==0.1.1 httpx==0.11.1 @@ -15,9 +27,12 @@ idna==2.10 immutables==0.14 importlib-metadata==1.7.0 iniconfig==1.0.1 +Mako==1.1.3 +MarkupSafe==1.1.1 mccabe==0.6.1 -more-itertools==8.4.0 +more-itertools==8.5.0 multidict==4.7.6 +munch==2.5.0 numpy==1.19.1 packaging==20.4 pandas==1.1.1 @@ -25,13 +40,16 @@ pluggy==0.13.1 psycopg2-binary==2.8.5 py==1.9.0 pycodestyle==2.6.0 +pydantic==1.6.1 pyflakes==2.2.0 pyparsing==2.4.7 +pyproj==2.6.1.post1 pysupercluster==0.7.6 pytest==6.0.1 pytest-asyncio==0.14.0 python-dateutil==2.8.1 python-dotenv==0.14.0 +python-editor==1.0.4 python-http-client==3.3.1 pytz==2020.1 redis==3.5.3 @@ -43,15 +61,18 @@ sanic-compress==0.1.1 Sanic-Cors==0.10.0.post3 Sanic-Plugins-Framework==0.9.3 sendgrid==6.4.6 +Shapely==1.7.1 six==1.15.0 sniffio==1.1.0 sodapy==2.1.0 SQLAlchemy==1.3.19 starkbank-ecdsa==1.0.0 +starlette==0.13.6 tabulate==0.8.7 toml==0.10.1 ujson==3.1.0 urllib3==1.25.10 +uvicorn==0.11.8 uvloop==0.14.0 websockets==8.1 zipp==3.1.0 diff --git a/server/api/src/db/conn.py b/server/api/src/db/conn.py index 97b35be63..eacbce994 100644 --- a/server/api/src/db/conn.py +++ b/server/api/src/db/conn.py @@ -8,7 +8,7 @@ def get_engine(url): - ATTEMPTS = 5 + ATTEMPTS = 3 DELAY = 3 def fail(message): @@ -29,7 +29,7 @@ def fail(message): engine.connect() except Exception: if attempt < ATTEMPTS: - log(f'Could not connect to DB, retrying in {DELAY}') + log(f'Could not connect to DB ({engine.url}), retrying in {DELAY}') time.sleep(DELAY) attempt += 1 continue diff --git a/server/api/src/db/requests/views.py b/server/api/src/db/requests/views.py index 9680cdba3..929eb7ec9 100644 --- a/server/api/src/db/requests/views.py +++ b/server/api/src/db/requests/views.py @@ -66,6 +66,77 @@ def create(): """) + # TODO: this is a potential new refactor of the database not currently in API + exec_sql(""" + + CREATE MATERIALIZED VIEW regions AS + WITH join_table AS ( + select distinct apc + from requests + where apc is not null + ) SELECT + ROW_NUMBER () OVER (order by apc) as region_id, + left(apc, -4) as region_name + FROM join_table; + + CREATE UNIQUE INDEX ON regions(region_id); + + ------------------------------------------ + + CREATE MATERIALIZED VIEW councils AS + SELECT DISTINCT on (nc) + nc as council_id, + ncname as council_name, + region_id + FROM requests + LEFT JOIN regions ON left(requests.apc, -4) = regions.region_name + WHERE requests.nc is not null + ORDER BY nc, createddate desc; + + CREATE UNIQUE INDEX ON councils(council_id); + CREATE INDEX ON councils(region_id); + + --add shortname, geometry, centroid, d and w websites + + ------------------------------------------ + + CREATE MATERIALIZED VIEW request_types AS + WITH join_table AS ( + select distinct requesttype + from requests + ) SELECT + ROW_NUMBER () OVER (order by requesttype) as type_id, + requesttype as type_name + FROM join_table; + + CREATE UNIQUE INDEX ON request_types(type_id); + + --colors: primary/alt, abbreviations + + ------------------------------------------ + + CREATE MATERIALIZED VIEW service_requests AS + SELECT right(requests.srnumber::VARCHAR(12), -2)::INTEGER as request_id, + requests.createddate::DATE as created_date, + requests.closeddate::DATE as closed_date, + request_types.type_id as type_id, + requests.nc::SMALLINT as council_id, + requests.address::VARCHAR(100), + requests.latitude, + requests.longitude + FROM requests, request_types + WHERE + requests.latitude IS NOT NULL + AND requests.longitude IS NOT NULL + AND type_name = requests.requesttype; + + CREATE UNIQUE INDEX ON service_requests(request_id); + CREATE INDEX ON service_requests(created_date); + CREATE INDEX ON service_requests(type_id); + CREATE INDEX ON service_requests(council_id); + + """) + def refresh(): log('\nRefreshing views') @@ -73,5 +144,5 @@ def refresh(): REFRESH MATERIALIZED VIEW CONCURRENTLY map; REFRESH MATERIALIZED VIEW CONCURRENTLY vis; REFRESH MATERIALIZED VIEW CONCURRENTLY open_requests; - + REFRESH MATERIALIZED VIEW CONCURRENTLY service_requests; """) diff --git a/server/api/src/settings.py b/server/api/src/settings.py index 6dd328b9d..7445a7a2e 100644 --- a/server/api/src/settings.py +++ b/server/api/src/settings.py @@ -1,6 +1,6 @@ from utils.parse_env import env, to -from dotenv import load_dotenv -load_dotenv() +# from dotenv import load_dotenv +# load_dotenv() class Version: diff --git a/server/api/tests/__init__.py b/server/api/tests/__init__.py index 9cd84a4c4..fbdbc51ca 100644 --- a/server/api/tests/__init__.py +++ b/server/api/tests/__init__.py @@ -2,6 +2,14 @@ import sys from os.path import join, dirname +if os.getenv("DATABASE_URL") is None: + os.environ["DATABASE_URL"] = "postgresql://311_user:311_pass@localhost:5433/311_db" + +os.environ["PICKLEBASE_ENABLED"] = "False" +os.environ["PICKLECACHE_ENABLED"] = "False" +os.environ["TMP_DIR"] = "./__tmp__" + +print(os.environ) sys.path.append(join(dirname(__file__), '../src')) diff --git a/server/docker-compose.yml b/server/docker-compose.yml index fdbbd20cb..360abdfe3 100644 --- a/server/docker-compose.yml +++ b/server/docker-compose.yml @@ -5,6 +5,7 @@ services: container_name: 311-postgres image: postgres restart: always + env_file: .env environment: POSTGRES_USER: ${DB_USER} POSTGRES_PASSWORD: ${DB_PASS} @@ -15,50 +16,28 @@ services: volumes: - backend_data:/var/lib/postgresql/data - redis: - container_name: 311-redis - build: ./redis - restart: always - expose: - - 6379 + # redis: + # container_name: 311-redis + # build: ./redis + # restart: always + # expose: + # - 6379 api: container_name: 311-api build: ./api restart: ${API_RESTART_POLICY} env_file: .env + environment: + DATABASE_URL: postgresql://${DB_USER}:${DB_PASS}@db:5432/${DB_NAME} ports: - - target: ${PORT} + - target: ${APP_PORT} published: ${API_HOST_PORT} volumes: - ./api/:/home/api depends_on: - db - - redis - - adminer: - container_name: postgres-dashboard - image: adminer - ports: - - 8080:8080 - - rebrow: - container_name: redis-dashboard - image: marian/rebrow - links: - - redis:redis - ports: - - 5001:5001 - - # jupyter: - # container_name: 311-notebook - # image: jupyter/scipy-notebook - # environment: - # DATABASE_URL: postgresql://${DB_USER}:${DB_PASS}@db:5432/${DB_NAME} - # ports: - # - 8888:8888 - # volumes: - # - ../:/home/jovyan/work + # - redis volumes: backend_data: diff --git a/server/postman/collections/311-CI.postman_collection.json b/server/postman/collections/311-CI.postman_collection.json index 5845e9679..445bf487e 100644 --- a/server/postman/collections/311-CI.postman_collection.json +++ b/server/postman/collections/311-CI.postman_collection.json @@ -560,8 +560,8 @@ "script": { "id": "eaf88891-878e-4ab6-b1bf-b03101326461", "exec": [ - "pm.test(\"Status code is 400\", function () {", - " pm.response.to.have.status(400);", + "pm.test(\"Status code is 400 or 422\", function () {", + " pm.expect(pm.response.code).to.be.oneOf([400, 422]);", "});" ], "type": "text/javascript" @@ -608,8 +608,8 @@ "script": { "id": "7b817388-1582-4ae6-a528-55cd117e6e85", "exec": [ - "pm.test(\"Status code is 400\", function () {", - " pm.response.to.have.status(400);", + "pm.test(\"Status code is 400 or 422\", function () {", + " pm.expect(pm.response.code).to.be.oneOf([400, 422]);", "});" ], "type": "text/javascript" @@ -655,8 +655,8 @@ "script": { "id": "b9124b20-d2fc-4207-bf57-43be22772f69", "exec": [ - "pm.test(\"Status code is 400\", function () {", - " pm.response.to.have.status(400);", + "pm.test(\"Status code is 400 or 422\", function () {", + " pm.expect(pm.response.code).to.be.oneOf([400, 422]);", "});", "" ], @@ -704,10 +704,9 @@ "script": { "id": "4bfd4f4b-3f04-4208-909b-273fec1fff04", "exec": [ - "pm.test(\"Status code is 400\", function () {", - " pm.response.to.have.status(400);", - "});", - "" + "pm.test(\"Status code is 400 or 422\", function () {", + " pm.expect(pm.response.code).to.be.oneOf([400, 422]);", + "});" ], "type": "text/javascript" }