From 8f7b6abbdeadf0bb9dc1bc8a0f08076e6c5d2e11 Mon Sep 17 00:00:00 2001 From: Matthew Webster Date: Thu, 27 Aug 2020 13:04:08 -0700 Subject: [PATCH 01/16] First set of API refactor changes --- server/api/Pipfile | 4 + server/api/Pipfile.lock | 151 +++++++++++++++++- server/api/code/311_data_api/__init__.py | 0 server/api/code/311_data_api/asgi.py | 3 + server/api/code/311_data_api/config.py | 33 ++++ server/api/code/311_data_api/main.py | 36 +++++ .../api/code/311_data_api/models/__init__.py | 14 ++ .../api/code/311_data_api/models/council.py | 9 ++ server/api/code/311_data_api/models/region.py | 8 + .../code/311_data_api/models/request_type.py | 8 + .../311_data_api/models/service_request.py | 14 ++ .../api/code/311_data_api/routers/__init__.py | 0 .../api/code/311_data_api/routers/councils.py | 31 ++++ server/api/code/311_data_api/routers/index.py | 95 +++++++++++ .../api/code/311_data_api/routers/regions.py | 31 ++++ .../311_data_api/routers/request_types.py | 31 ++++ .../311_data_api/routers/service_requests.py | 44 +++++ server/api/code/run.py | 13 ++ server/api/src/db/requests/views.py | 73 ++++++++- 19 files changed, 592 insertions(+), 6 deletions(-) create mode 100644 server/api/code/311_data_api/__init__.py create mode 100644 server/api/code/311_data_api/asgi.py create mode 100644 server/api/code/311_data_api/config.py create mode 100644 server/api/code/311_data_api/main.py create mode 100644 server/api/code/311_data_api/models/__init__.py create mode 100644 server/api/code/311_data_api/models/council.py create mode 100644 server/api/code/311_data_api/models/region.py create mode 100644 server/api/code/311_data_api/models/request_type.py create mode 100644 server/api/code/311_data_api/models/service_request.py create mode 100644 server/api/code/311_data_api/routers/__init__.py create mode 100644 server/api/code/311_data_api/routers/councils.py create mode 100644 server/api/code/311_data_api/routers/index.py create mode 100644 server/api/code/311_data_api/routers/regions.py create mode 100644 server/api/code/311_data_api/routers/request_types.py create mode 100644 server/api/code/311_data_api/routers/service_requests.py create mode 100644 server/api/code/run.py diff --git a/server/api/Pipfile b/server/api/Pipfile index db8545fc4..f35734683 100644 --- a/server/api/Pipfile +++ b/server/api/Pipfile @@ -24,6 +24,10 @@ sendgrid = "*" sodapy = "*" SQLAlchemy = "*" tabulate = "*" +gino = {extras = ["starlette"], version = "*"} +fastapi = "*" +uvicorn = "*" +gunicorn = "*" [requires] python_version = "3.7" diff --git a/server/api/Pipfile.lock b/server/api/Pipfile.lock index e62ac529a..12a7dc482 100644 --- a/server/api/Pipfile.lock +++ b/server/api/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "9c31d5fffafd7b7faebd5a67959ccc2953e1c4d86e9a1f7ed7f63160cf14963b" + "sha256": "6cf778b290d427ebf1790460fe80f042f45f68644247d5b28c4ad1654bc11f5c" }, "pipfile-spec": 6, "requires": { @@ -23,6 +23,37 @@ ], "version": "==0.5.0" }, + "asyncpg": { + "hashes": [ + "sha256:09badce47a4645cfe523cc8a182bd047d5d62af0caaea77935e6a3c9e77dc364", + "sha256:22d161618b59e4b56fb2a5cc956aa9eeb336d07cae924a5b90c9aa1c2d137f15", + "sha256:28584783dd0d21b2a0db3bfe54fb12f21425a4cc015e4419083ea99e6de0de9b", + "sha256:308b8ba32c42ea1ed84c034320678ec307296bb4faf3fbbeb9f9e20b46db99a5", + "sha256:3ade59cef35bffae6dbc6f5f3ef56e1d53c67f0a7adc3cc4c714f07568d2d717", + "sha256:4421407b07b4e22291a226d9de0bf6f3ea8158aa1c12d83bfedbf5c22e13cd55", + "sha256:53cb2a0eb326f61e34ef4da2db01d87ce9c0ebe396f65a295829df334e31863f", + "sha256:615c7e3adb46e1f2e3aff45e4ee9401b4f24f9f7153e5530a0753369be72a5c6", + "sha256:68f7981f65317a5d5f497ec76919b488dbe0e838f8b924e7517a680bdca0f308", + "sha256:6b7807bfedd24dd15cfb2c17c60977ce01410615ecc285268b5144a944ec97ff", + "sha256:7e51d1a012b779e0ebf0195f80d004f65d3c60cc06f0fa1cef9d3e536262abbd", + "sha256:7ee29c4707eb8fb3d3a0348ac4495e06f4afaca3ee38c3bebedc9c8b239125ff", + "sha256:823eca36108bd64a8600efe7bbf1230aa00f2defa3be42852f3b61ab40cf1226", + "sha256:8587e206d78e739ca83a40c9982e03b28f8904c95a54dc782da99e86cf768f73", + "sha256:888593b6688faa7ec1c97ff7f2ca3b5a5b8abb15478fe2a13c5012b607a28737", + "sha256:915cebc8a7693c8a5e89804fa106678dbedcc50d0270ebab0b75f16e668bd59b", + "sha256:a4c1feb285ec3807ecd5b54ab718a3d065bb55c93ebaf800670eadde31484be8", + "sha256:aa2e0cb14c01a2f58caeeca7196681b30aa22dd22c82845560b401df5e98e171", + "sha256:b1b10916c006e5c2c0dcd5dadeb38cbf61ecd20d66c50164e82f31c22c7e329d", + "sha256:dddf4d4c5e781310a36529c3c87c1746837c2d2c7ec0f2ec4e4f06450d83c50a", + "sha256:dfd491e9865e64a3e91f1587b1d88d71dde1cfb850429253a73d4d44b98c3a0f", + "sha256:e7bfb9269aeb11d78d50accf1be46823683ced99209b7199e307cdf7da849522", + "sha256:ea26604932719b3612541e606508d9d604211f56a65806ccf8c92c64104f4f8a", + "sha256:ecd5232cf64f58caac3b85103f1223fdf20e9eb43bfa053c56ef9e5dd76ab099", + "sha256:f2d1aa890ffd1ad062a38b7ff7488764b3da4b0a24e0c83d7bbb1d1a6609df15" + ], + "markers": "python_full_version >= '3.5.0'", + "version": "==0.21.0" + }, "certifi": { "hashes": [ "sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3", @@ -37,6 +68,14 @@ ], "version": "==3.0.4" }, + "click": { + "hashes": [ + "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", + "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==7.1.2" + }, "contextvars": { "hashes": [ "sha256:f38c908aaa59c14335eeea12abea5f443646216c4e29380d7bf34d2018e2c39e" @@ -44,6 +83,48 @@ "markers": "python_version < '3.7'", "version": "==2.4" }, + "dataclasses": { + "hashes": [ + "sha256:3459118f7ede7c8bea0fe795bff7c6c2ce287d01dd226202f7c9ebc0610a7836", + "sha256:494a6dcae3b8bcf80848eea2ef64c0cc5cd307ffc263e17cdf42f3e5420808e6" + ], + "markers": "python_version < '3.7'", + "version": "==0.7" + }, + "fastapi": { + "hashes": [ + "sha256:29c12dd0d4ac825d13c2db4762d2863281c18085ae521825829182e977fd25ac", + "sha256:d0b3f629f8d165a21ee082bf31e1697c391c1cdf940304408614b5b7c59d1fb3" + ], + "index": "pypi", + "version": "==0.61.0" + }, + "gino": { + "extras": [ + "starlette" + ], + "hashes": [ + "sha256:56df57cfdefbaf897a7c4897c265a0e91a8cca80716fb64f7d3cf6d501fdfb3d", + "sha256:fe4189e82fe9d20c4a5f03fc775fb91c168061c5176b4c95623caeef22316150" + ], + "index": "pypi", + "version": "==1.0.1" + }, + "gino-starlette": { + "hashes": [ + "sha256:a1afe419b34146449a502a5483085a60a75a46639534fff50510172b47c930fb", + "sha256:de6ec87168097a52668359c842e9e3be4d339423c7805c615377975a1a19cb6c" + ], + "version": "==0.1.1" + }, + "gunicorn": { + "hashes": [ + "sha256:1904bb2b8a43658807108d59c3f3d56c2b6121a701161de0ddf9ad140073c626", + "sha256:cd4a810dd51bf497552cf3f863b575dabd73d6ad6a91075b65936b151cbf4f9c" + ], + "index": "pypi", + "version": "==20.0.4" + }, "h11": { "hashes": [ "sha256:acca6a44cb52a32ab442b1779adf0875c443c689e9e028f8d831a3769f9c5208", @@ -67,11 +148,11 @@ }, "hstspreload": { "hashes": [ - "sha256:13cf2e9fcd064cd81d220432de9a66dd7e4f10862a03574c45e5f61fc522f312", - "sha256:5e3b6b2376c6f412086ee21cdd29cd5e0af5b28c967e5f1f026323d0f31dc84b" + "sha256:3129613419c13ea62411ec7375d79840e28004cbb6a585909ddcbeee401bea14", + "sha256:c96401eca4669340b423abd711d2d5d03ddf0685461f95e9cfe500d5e9acf3d2" ], "markers": "python_version >= '3.6'", - "version": "==2020.8.18" + "version": "==2020.8.25" }, "http3": { "hashes": [ @@ -95,6 +176,7 @@ "sha256:fa3cd71e31436911a44620473e873a256851e1f53dee56669dae403ba41756a4", "sha256:fea04e126014169384dee76a153d4573d90d0cbd1d12185da089f73c78390437" ], + "markers": "sys_platform != 'win32' and sys_platform != 'cygwin' and platform_python_implementation != 'PyPy'", "version": "==0.1.1" }, "httpx": { @@ -138,6 +220,14 @@ "markers": "python_version >= '3.5'", "version": "==0.14" }, + "importlib-metadata": { + "hashes": [ + "sha256:90bb658cdbbf6d1735b6341ce708fc7024a3e14e99ffdc5783edea9f9b077f83", + "sha256:dc15b2969b4ce36305c51eebe62d418ac7791e9a157911d58bfb1f9ccd8e2070" + ], + "markers": "python_version < '3.8'", + "version": "==1.7.0" + }, "multidict": { "hashes": [ "sha256:1ece5a3369835c20ed57adadc663400b5525904e53bae59ec854a5d36b39b21a", @@ -251,6 +341,29 @@ "index": "pypi", "version": "==2.8.5" }, + "pydantic": { + "hashes": [ + "sha256:1783c1d927f9e1366e0e0609ae324039b2479a1a282a98ed6a6836c9ed02002c", + "sha256:2dc946b07cf24bee4737ced0ae77e2ea6bc97489ba5a035b603bd1b40ad81f7e", + "sha256:2de562a456c4ecdc80cf1a8c3e70c666625f7d02d89a6174ecf63754c734592e", + "sha256:36dbf6f1be212ab37b5fda07667461a9219c956181aa5570a00edfb0acdfe4a1", + "sha256:3fa799f3cfff3e5f536cbd389368fc96a44bb30308f258c94ee76b73bd60531d", + "sha256:40d765fa2d31d5be8e29c1794657ad46f5ee583a565c83cea56630d3ae5878b9", + "sha256:418b84654b60e44c0cdd5384294b0e4bc1ebf42d6e873819424f3b78b8690614", + "sha256:4900b8820b687c9a3ed753684337979574df20e6ebe4227381d04b3c3c628f99", + "sha256:530d7222a2786a97bc59ee0e0ebbe23728f82974b1f1ad9a11cd966143410633", + "sha256:54122a8ed6b75fe1dd80797f8251ad2063ea348a03b77218d73ea9fe19bd4e73", + "sha256:6c3f162ba175678218629f446a947e3356415b6b09122dcb364e58c442c645a7", + "sha256:b49c86aecde15cde33835d5d6360e55f5e0067bb7143a8303bf03b872935c75b", + "sha256:b5b3489cb303d0f41ad4a7390cf606a5f2c7a94dcba20c051cd1c653694cb14d", + "sha256:cf3933c98cb5e808b62fae509f74f209730b180b1e3c3954ee3f7949e083a7df", + "sha256:eb75dc1809875d5738df14b6566ccf9fd9c0bcde4f36b72870f318f16b9f5c20", + "sha256:f769141ab0abfadf3305d4fcf36660e5cf568a666dd3efab7c3d4782f70946b1", + "sha256:f8af9b840a9074e08c0e6dc93101de84ba95df89b267bf7151d74c553d66833b" + ], + "markers": "python_version >= '3.6'", + "version": "==1.6.1" + }, "pysupercluster": { "hashes": [ "sha256:1e9739e9dad6126f346b5816dd6d9affc31d8482ef89fd30ff87f8412530726a" @@ -386,10 +499,13 @@ "sha256:072766c3bd09294d716b2d114d46ffc5ccf8ea0b714a4e1c48253014b771c6bb", "sha256:107d4af989831d7b091e382d192955679ec07a9209996bf8090f1f539ffc5804", "sha256:15c0bcd3c14f4086701c33a9e87e2c7ceb3bcb4a246cd88ec54a49cf2a5bd1a6", + "sha256:26c5ca9d09f0e21b8671a32f7d83caad5be1f6ff45eef5ec2f6fd0db85fc5dc0", "sha256:276936d41111a501cf4a1a0543e25449108d87e9f8c94714f7660eaea89ae5fe", "sha256:3292a28344922415f939ee7f4fc0c186f3d5a0bf02192ceabd4f1129d71b08de", "sha256:33d29ae8f1dc7c75b191bb6833f55a19c932514b9b5ce8c3ab9bc3047da5db36", "sha256:3bba2e9fbedb0511769780fe1d63007081008c5c2d7d715e91858c94dbaa260e", + "sha256:465c999ef30b1c7525f81330184121521418a67189053bcf585824d833c05b66", + "sha256:51064ee7938526bab92acd049d41a1dc797422256086b39c08bafeffb9d304c6", "sha256:5a49e8473b1ab1228302ed27365ea0fadd4bf44bc0f9e73fe38e10fdd3d6b4fc", "sha256:618db68745682f64cedc96ca93707805d1f3a031747b5a0d8e150cfd5055ae4d", "sha256:6547b27698b5b3bbfc5210233bd9523de849b2bb8a0329cd754c9308fc8a05ce", @@ -404,6 +520,7 @@ "sha256:9e865835e36dfbb1873b65e722ea627c096c11b05f796831e3a9b542926e979e", "sha256:aa0554495fe06172b550098909be8db79b5accdf6ffb59611900bea345df5eba", "sha256:b595e71c51657f9ee3235db8b53d0b57c09eee74dfb5b77edff0e46d2218dc02", + "sha256:b6ff91356354b7ff3bd208adcf875056d3d886ed7cef90c571aef2ab8a554b12", "sha256:b70bad2f1a5bd3460746c3fb3ab69e4e0eb5f59d977a23f9b66e5bdc74d97b86", "sha256:c7adb1f69a80573698c2def5ead584138ca00fff4ad9785a4b0b2bf927ba308d", "sha256:c898b3ebcc9eae7b36bd0b4bbbafce2d8076680f6868bcbacee2d39a7a9726a7", @@ -421,6 +538,14 @@ ], "version": "==1.0.0" }, + "starlette": { + "hashes": [ + "sha256:bd2ffe5e37fb75d014728511f8e68ebf2c80b0fa3d04ca1479f4dc752ae31ac9", + "sha256:ebe8ee08d9be96a3c9f31b2cb2a24dbdf845247b745664bd8a3f9bd0c977fdbc" + ], + "markers": "python_version >= '3.6'", + "version": "==0.13.6" + }, "tabulate": { "hashes": [ "sha256:ac64cb76d53b1231d364babcd72abbb16855adac7de6665122f97b593f1eb2ba", @@ -460,6 +585,14 @@ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", "version": "==1.25.10" }, + "uvicorn": { + "hashes": [ + "sha256:46a83e371f37ea7ff29577d00015f02c942410288fb57def6440f2653fff1d26", + "sha256:4b70ddb4c1946e39db9f3082d53e323dfd50634b95fd83625d778729ef1730ef" + ], + "index": "pypi", + "version": "==0.11.8" + }, "uvloop": { "hashes": [ "sha256:08b109f0213af392150e2fe6f81d33261bb5ce968a288eb698aad4f46eb711bd", @@ -472,7 +605,7 @@ "sha256:e7514d7a48c063226b7d06617cbb12a14278d4323a065a8d46a7962686ce2e95", "sha256:f07909cd9fc08c52d294b1570bba92186181ca01fe3dc9ffba68955273dd7362" ], - "markers": "sys_platform != 'win32' and implementation_name == 'cpython'", + "markers": "sys_platform != 'win32' and implementation_name == 'cpython' and sys_platform != 'win32' and sys_platform != 'cygwin' and platform_python_implementation != 'PyPy'", "version": "==0.14.0" }, "websockets": { @@ -502,6 +635,14 @@ ], "markers": "python_full_version >= '3.6.1'", "version": "==8.1" + }, + "zipp": { + "hashes": [ + "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b", + "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96" + ], + "markers": "python_version >= '3.6'", + "version": "==3.1.0" } }, "develop": { diff --git a/server/api/code/311_data_api/__init__.py b/server/api/code/311_data_api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/server/api/code/311_data_api/asgi.py b/server/api/code/311_data_api/asgi.py new file mode 100644 index 000000000..d842254bd --- /dev/null +++ b/server/api/code/311_data_api/asgi.py @@ -0,0 +1,3 @@ +from .main import get_app + +app = get_app() diff --git a/server/api/code/311_data_api/config.py b/server/api/code/311_data_api/config.py new file mode 100644 index 000000000..a912e8e09 --- /dev/null +++ b/server/api/code/311_data_api/config.py @@ -0,0 +1,33 @@ +from sqlalchemy.engine.url import URL, make_url +from starlette.config import Config +from starlette.datastructures import Secret + +config = Config(".env") + +DB_DRIVER = config("DB_DRIVER", default="postgresql") +DB_HOST = config("DB_HOST", default=None) +DB_PORT = config("DB_PORT", cast=int, default=None) +DB_USER = config("DB_USER", default=None) +DB_PASSWORD = config("DB_PASSWORD", cast=Secret, default=None) +DB_DATABASE = config("DB_DATABASE", default=None) +DB_DSN = config( + "DB_DSN", + cast=make_url, + default=URL( + drivername=DB_DRIVER, + username=DB_USER, + password=DB_PASSWORD, + host=DB_HOST, + port=DB_PORT, + database=DB_DATABASE, + ), +) +DB_POOL_MIN_SIZE = config("DB_POOL_MIN_SIZE", cast=int, default=1) +DB_POOL_MAX_SIZE = config("DB_POOL_MAX_SIZE", cast=int, default=16) +DB_ECHO = config("DB_ECHO", cast=bool, default=False) +DB_SSL = config("DB_SSL", default=None) +DB_USE_CONNECTION_FOR_REQUEST = config( + "DB_USE_CONNECTION_FOR_REQUEST", cast=bool, default=True +) +DB_RETRY_LIMIT = config("DB_RETRY_LIMIT", cast=int, default=32) +DB_RETRY_INTERVAL = config("DB_RETRY_INTERVAL", cast=int, default=1) diff --git a/server/api/code/311_data_api/main.py b/server/api/code/311_data_api/main.py new file mode 100644 index 000000000..b8150088c --- /dev/null +++ b/server/api/code/311_data_api/main.py @@ -0,0 +1,36 @@ +import logging + +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware + +from .routers import index, councils, regions, request_types, service_requests +from .models import db + +try: + from importlib.metadata import entry_points +except ImportError: # pragma: no cover + from importlib_metadata import entry_points + +logger = logging.getLogger(__name__) + + +def get_app(): + app = FastAPI(title="311 Data API") + + db.init_app(app) + + app.include_router(index.router) + app.include_router(councils.router, prefix="/councils") + app.include_router(regions.router, prefix="/regions") + app.include_router(request_types.router, prefix="/types") + app.include_router(service_requests.router, prefix="/requests") + + app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) + + return app diff --git a/server/api/code/311_data_api/models/__init__.py b/server/api/code/311_data_api/models/__init__.py new file mode 100644 index 000000000..a8638934f --- /dev/null +++ b/server/api/code/311_data_api/models/__init__.py @@ -0,0 +1,14 @@ +from gino.ext.starlette import Gino + +from .. import config + +db = Gino( + dsn=config.DB_DSN, + pool_min_size=config.DB_POOL_MIN_SIZE, + pool_max_size=config.DB_POOL_MAX_SIZE, + echo=config.DB_ECHO, + ssl=config.DB_SSL, + use_connection_for_request=config.DB_USE_CONNECTION_FOR_REQUEST, + retry_limit=config.DB_RETRY_LIMIT, + retry_interval=config.DB_RETRY_INTERVAL, +) diff --git a/server/api/code/311_data_api/models/council.py b/server/api/code/311_data_api/models/council.py new file mode 100644 index 000000000..ba0dfc3d9 --- /dev/null +++ b/server/api/code/311_data_api/models/council.py @@ -0,0 +1,9 @@ +from . import db + + +class Council(db.Model): + __tablename__ = "councils" + + council_id = db.Column(db.SmallInteger, primary_key=True) + council_name = db.Column(db.String) + region_id = db.Column(db.SmallInteger) diff --git a/server/api/code/311_data_api/models/region.py b/server/api/code/311_data_api/models/region.py new file mode 100644 index 000000000..b7d7024ff --- /dev/null +++ b/server/api/code/311_data_api/models/region.py @@ -0,0 +1,8 @@ +from . import db + + +class Region(db.Model): + __tablename__ = 'regions' + + region_id = db.Column(db.SmallInteger, primary_key=True) + region_name = db.Column(db.String) diff --git a/server/api/code/311_data_api/models/request_type.py b/server/api/code/311_data_api/models/request_type.py new file mode 100644 index 000000000..0451aeb02 --- /dev/null +++ b/server/api/code/311_data_api/models/request_type.py @@ -0,0 +1,8 @@ +from . import db + + +class RequestType(db.Model): + __tablename__ = 'request_types' + + type_id = db.Column(db.SmallInteger, primary_key=True) + type_name = db.Column(db.String) diff --git a/server/api/code/311_data_api/models/service_request.py b/server/api/code/311_data_api/models/service_request.py new file mode 100644 index 000000000..2aaf6c433 --- /dev/null +++ b/server/api/code/311_data_api/models/service_request.py @@ -0,0 +1,14 @@ +from . import db + + +class ServiceRequest(db.Model): + __tablename__ = 'service_requests' + + request_id = db.Column(db.Integer, primary_key=True) + created_date = db.Column(db.Date) + closed_date = db.Column(db.Date) + type_id = db.Column(db.SmallInteger) + council_id = db.Column(db.SmallInteger) + address = db.Column(db.String) + latitude = db.Column(db.Float) + longitude = db.Column(db.Float) diff --git a/server/api/code/311_data_api/routers/__init__.py b/server/api/code/311_data_api/routers/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/server/api/code/311_data_api/routers/councils.py b/server/api/code/311_data_api/routers/councils.py new file mode 100644 index 000000000..1916a2185 --- /dev/null +++ b/server/api/code/311_data_api/routers/councils.py @@ -0,0 +1,31 @@ +from typing import List + +from fastapi import APIRouter +from pydantic import BaseModel + +from ..models.council import Council +from ..models import db + +router = APIRouter() + + +class CouncilModel(BaseModel): + council_id: int + council_name: str + + class Config: + orm_mode = True + + +Items = List[CouncilModel] + + +@router.get("/", response_model=Items) +async def index(): + return await db.all(Council.query) + + +@router.get("/{cid}") +async def get_council(cid: int): + council = await Council.get_or_404(cid) + return council.to_dict() diff --git a/server/api/code/311_data_api/routers/index.py b/server/api/code/311_data_api/routers/index.py new file mode 100644 index 000000000..212d217b3 --- /dev/null +++ b/server/api/code/311_data_api/routers/index.py @@ -0,0 +1,95 @@ +from typing import List, Optional +import datetime + +from fastapi import responses +from fastapi import APIRouter +from pydantic import BaseModel + +from services import status, map, visualizations, requests + +router = APIRouter() + + +class Bounds(BaseModel): + north: float + south: float + east: float + west: float + + +class Filter(BaseModel): + startDate: str + endDate: str + ncList: List[int] + requestTypes: List[str] + zoom: Optional[int] = None + bounds: Optional[Bounds] = None + + +@router.get("/") +async def index(): + return {"message": "Hello, new index!"} + + +@router.get("/status/api") +async def status_api(): + result = await status.api() + return result + + +@router.get("/status/db") +async def status_db(): + result = await status.database() + return result + + +@router.get("/status/system") +async def status_system(): + result = await status.system() + return result + + +@router.get("/servicerequest/{srnumber}") +async def get_service_request_by_string(srnumber: str): + # result = await get_service_request(int(srnumber[2:])) + result = requests.item_query(srnumber) + return result + + +@router.post("/map/clusters") +async def get_clusters(filter: Filter): + start_time = datetime.datetime.strptime(filter.startDate, '%m/%d/%Y') + end_time = datetime.datetime.strptime(filter.endDate, '%m/%d/%Y') + + result = await map.pin_clusters(start_time, + end_time, + filter.requestTypes, + filter.ncList, + filter.zoom, + dict(filter.bounds) + ) + return result + + +@router.post("/map/heat") +async def get_heatmap(filter: Filter): + start_time = datetime.datetime.strptime(filter.startDate, '%m/%d/%Y') + end_time = datetime.datetime.strptime(filter.endDate, '%m/%d/%Y') + + result = await map.heatmap(startDate=start_time, + endDate=end_time, + requestTypes=filter.requestTypes, + ncList=filter.ncList) + return responses.JSONResponse(result.tolist()) + + +@router.post("/visualizations") +async def get_visualizations(filter: Filter): + start_time = datetime.datetime.strptime(filter.startDate, '%m/%d/%Y') + end_time = datetime.datetime.strptime(filter.endDate, '%m/%d/%Y') + + result = await visualizations.visualizations(startDate=start_time, + endDate=end_time, + requestTypes=filter.requestTypes, + ncList=filter.ncList) + return result diff --git a/server/api/code/311_data_api/routers/regions.py b/server/api/code/311_data_api/routers/regions.py new file mode 100644 index 000000000..72e41091e --- /dev/null +++ b/server/api/code/311_data_api/routers/regions.py @@ -0,0 +1,31 @@ +from typing import List + +from fastapi import APIRouter +from pydantic import BaseModel + +from ..models.region import Region +from ..models import db + +router = APIRouter() + + +class RegionModel(BaseModel): + region_id: int + region_name: str + + class Config: + orm_mode = True + + +Items = List[RegionModel] + + +@router.get("/", response_model=Items) +async def index(): + return await db.all(Region.query) + + +@router.get("/{rid}") +async def get_region(rid: int): + region = await Region.get_or_404(rid) + return region.to_dict() diff --git a/server/api/code/311_data_api/routers/request_types.py b/server/api/code/311_data_api/routers/request_types.py new file mode 100644 index 000000000..451674ddc --- /dev/null +++ b/server/api/code/311_data_api/routers/request_types.py @@ -0,0 +1,31 @@ +from typing import List + +from fastapi import APIRouter +from pydantic import BaseModel + +from ..models.request_type import RequestType +from ..models import db + +router = APIRouter() + + +class RequestTypeModel(BaseModel): + type_id: int + type_name: str + + class Config: + orm_mode = True + + +Items = List[RequestTypeModel] + + +@router.get("/", response_model=Items) +async def index(): + return await db.all(RequestType.query) + + +@router.get("/{tid}") +async def get_request_type(tid: int): + request_type = await RequestType.get_or_404(tid) + return request_type.to_dict() diff --git a/server/api/code/311_data_api/routers/service_requests.py b/server/api/code/311_data_api/routers/service_requests.py new file mode 100644 index 000000000..8dc7ae9ae --- /dev/null +++ b/server/api/code/311_data_api/routers/service_requests.py @@ -0,0 +1,44 @@ +from typing import List +import datetime + +from fastapi import APIRouter +from pydantic import BaseModel + +from ..models.service_request import ServiceRequest +from ..models import db + +router = APIRouter() + + +class ServiceRequestModel(BaseModel): + request_id: int + council_id: int + type_id: int + created_date: datetime.date + closed_date: datetime.date + address: str + latitude: float + longitude: float + + class Config: + orm_mode = True + + +Items = List[ServiceRequestModel] + + +@router.get("/", response_model=Items) +async def index(skip: int = 0, limit: int = 100): + async with db.transaction(): + cursor = await ServiceRequest.query.gino.iterate() + if skip > 0: + await cursor.forward(skip) # skip 80 rows + result = await cursor.many(limit) # and retrieve next 10 rows + + return result + + +@router.get("/{srid}") +async def get_service_request(srid: int): + svcreq = await ServiceRequest.get_or_404(srid) + return svcreq.to_dict() diff --git a/server/api/code/run.py b/server/api/code/run.py new file mode 100644 index 000000000..f7a39afd3 --- /dev/null +++ b/server/api/code/run.py @@ -0,0 +1,13 @@ +import os + +if __name__ == "__main__": + import uvicorn + import sys + + print(sys.path) + + uvicorn.run( + "311_data_api.asgi:app", + host=os.getenv("APP_HOST", "127.0.0.1"), + port=int(os.getenv("APP_PORT", "5000")), + ) diff --git a/server/api/src/db/requests/views.py b/server/api/src/db/requests/views.py index 9680cdba3..929eb7ec9 100644 --- a/server/api/src/db/requests/views.py +++ b/server/api/src/db/requests/views.py @@ -66,6 +66,77 @@ def create(): """) + # TODO: this is a potential new refactor of the database not currently in API + exec_sql(""" + + CREATE MATERIALIZED VIEW regions AS + WITH join_table AS ( + select distinct apc + from requests + where apc is not null + ) SELECT + ROW_NUMBER () OVER (order by apc) as region_id, + left(apc, -4) as region_name + FROM join_table; + + CREATE UNIQUE INDEX ON regions(region_id); + + ------------------------------------------ + + CREATE MATERIALIZED VIEW councils AS + SELECT DISTINCT on (nc) + nc as council_id, + ncname as council_name, + region_id + FROM requests + LEFT JOIN regions ON left(requests.apc, -4) = regions.region_name + WHERE requests.nc is not null + ORDER BY nc, createddate desc; + + CREATE UNIQUE INDEX ON councils(council_id); + CREATE INDEX ON councils(region_id); + + --add shortname, geometry, centroid, d and w websites + + ------------------------------------------ + + CREATE MATERIALIZED VIEW request_types AS + WITH join_table AS ( + select distinct requesttype + from requests + ) SELECT + ROW_NUMBER () OVER (order by requesttype) as type_id, + requesttype as type_name + FROM join_table; + + CREATE UNIQUE INDEX ON request_types(type_id); + + --colors: primary/alt, abbreviations + + ------------------------------------------ + + CREATE MATERIALIZED VIEW service_requests AS + SELECT right(requests.srnumber::VARCHAR(12), -2)::INTEGER as request_id, + requests.createddate::DATE as created_date, + requests.closeddate::DATE as closed_date, + request_types.type_id as type_id, + requests.nc::SMALLINT as council_id, + requests.address::VARCHAR(100), + requests.latitude, + requests.longitude + FROM requests, request_types + WHERE + requests.latitude IS NOT NULL + AND requests.longitude IS NOT NULL + AND type_name = requests.requesttype; + + CREATE UNIQUE INDEX ON service_requests(request_id); + CREATE INDEX ON service_requests(created_date); + CREATE INDEX ON service_requests(type_id); + CREATE INDEX ON service_requests(council_id); + + """) + def refresh(): log('\nRefreshing views') @@ -73,5 +144,5 @@ def refresh(): REFRESH MATERIALIZED VIEW CONCURRENTLY map; REFRESH MATERIALIZED VIEW CONCURRENTLY vis; REFRESH MATERIALIZED VIEW CONCURRENTLY open_requests; - + REFRESH MATERIALIZED VIEW CONCURRENTLY service_requests; """) From d4c65facbd528d36037af62ef6cc73d57d820387 Mon Sep 17 00:00:00 2001 From: Matthew Webster Date: Thu, 27 Aug 2020 17:04:16 -0700 Subject: [PATCH 02/16] added pins route and Gzip --- server/api/code/311_data_api/main.py | 5 +- server/api/code/311_data_api/routers/index.py | 22 +++ .../311_data_api/routers/service_requests.py | 60 ++++++++- .../code/311_data_api/routers/utilities.py | 41 ++++++ server/api/code/README.md | 127 ++++++++++++++++++ server/api/requirements.txt | 12 +- 6 files changed, 263 insertions(+), 4 deletions(-) create mode 100644 server/api/code/311_data_api/routers/utilities.py create mode 100644 server/api/code/README.md diff --git a/server/api/code/311_data_api/main.py b/server/api/code/311_data_api/main.py index b8150088c..6a4a8b983 100644 --- a/server/api/code/311_data_api/main.py +++ b/server/api/code/311_data_api/main.py @@ -2,6 +2,7 @@ from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware +from fastapi.middleware.gzip import GZipMiddleware from .routers import index, councils, regions, request_types, service_requests from .models import db @@ -32,5 +33,7 @@ def get_app(): allow_methods=["*"], allow_headers=["*"], ) - + + app.add_middleware(GZipMiddleware) + return app diff --git a/server/api/code/311_data_api/routers/index.py b/server/api/code/311_data_api/routers/index.py index 212d217b3..bc818daa1 100644 --- a/server/api/code/311_data_api/routers/index.py +++ b/server/api/code/311_data_api/routers/index.py @@ -83,6 +83,28 @@ async def get_heatmap(filter: Filter): return responses.JSONResponse(result.tolist()) +class Pin(BaseModel): + srnumber: str + requesttype: str + latitude: float + longitude: float + + +Pins = List[Pin] + + +@router.post("/map/pins", response_model=Pins) +async def get_pins(filter: Filter): + start_time = datetime.datetime.strptime(filter.startDate, '%m/%d/%Y') + end_time = datetime.datetime.strptime(filter.endDate, '%m/%d/%Y') + + result = await map.pins(startDate=start_time, + endDate=end_time, + requestTypes=filter.requestTypes, + ncList=filter.ncList) + return result + + @router.post("/visualizations") async def get_visualizations(filter: Filter): start_time = datetime.datetime.strptime(filter.startDate, '%m/%d/%Y') diff --git a/server/api/code/311_data_api/routers/service_requests.py b/server/api/code/311_data_api/routers/service_requests.py index 8dc7ae9ae..96dd74c7c 100644 --- a/server/api/code/311_data_api/routers/service_requests.py +++ b/server/api/code/311_data_api/routers/service_requests.py @@ -1,11 +1,13 @@ -from typing import List +from typing import List, Optional import datetime +from sqlalchemy import sql from fastapi import APIRouter from pydantic import BaseModel from ..models.service_request import ServiceRequest from ..models import db +from .utilities import get_clusters_for_pins router = APIRouter() @@ -15,7 +17,7 @@ class ServiceRequestModel(BaseModel): council_id: int type_id: int created_date: datetime.date - closed_date: datetime.date + closed_date: Optional[datetime.date] address: str latitude: float longitude: float @@ -42,3 +44,57 @@ async def index(skip: int = 0, limit: int = 100): async def get_service_request(srid: int): svcreq = await ServiceRequest.get_or_404(srid) return svcreq.to_dict() + + +class Bounds(BaseModel): + north: float + south: float + east: float + west: float + + +class Filter(BaseModel): + startDate: datetime.date + endDate: datetime.date + ncList: List[int] + requestTypes: List[int] + zoom: Optional[int] = None + bounds: Optional[Bounds] = None + + +class Pin(BaseModel): + request_id: int + type_id: int + latitude: float + longitude: float + + +Pins = List[Pin] + + +@router.post("/pins", response_model=Items) +async def get_service_request_pins(filter: Filter): + result = await ServiceRequest.query.where( + sql.and_( + ServiceRequest.created_date >= filter.startDate, + ServiceRequest.created_date <= filter.endDate, + ServiceRequest.type_id.in_(filter.requestTypes), + ServiceRequest.council_id.in_(filter.ncList) + ) + ).gino.all() + return result + + +@router.post("/clusters", response_model=Items) +async def get_service_request_clusters(filter: Filter): + result = await ServiceRequest.query.where( + sql.and_( + ServiceRequest.created_date >= filter.startDate, + ServiceRequest.created_date <= filter.endDate, + ServiceRequest.type_id.in_(filter.requestTypes), + ServiceRequest.council_id.in_(filter.ncList) + ) + ).gino.all() + + clusters = get_clusters_for_pins(result, filter.zoom, filter.bounds, options={}) + return clusters diff --git a/server/api/code/311_data_api/routers/utilities.py b/server/api/code/311_data_api/routers/utilities.py new file mode 100644 index 000000000..d7523b432 --- /dev/null +++ b/server/api/code/311_data_api/routers/utilities.py @@ -0,0 +1,41 @@ +import pysupercluster + + +def get_clusters_for_pins(pins, zoom, bounds, options): + """ + Cluster pins into aggregate values using pysupercluster + based on filters and user view + """ + if len(pins) == 0: + return [] + + min_zoom = options.get('min_zoom', 0) + max_zoom = options.get('max_zoom', 17) + radius = options.get('radius', 200) + extent = options.get('extent', 512) + + index = pysupercluster.SuperCluster( + pins[['longitude', 'latitude']].to_numpy(), + min_zoom=min_zoom, + max_zoom=max_zoom, + radius=radius, + extent=extent) + + north = bounds.get('north', 90) + south = bounds.get('south', -90) + west = bounds.get('west', -180) + east = bounds.get('east', 180) + + clusters = index.getClusters( + top_left=(west, north), + bottom_right=(east, south), + zoom=zoom) + + for cluster in clusters: + if cluster['count'] == 1: + pin = pins.iloc[cluster['id']] + cluster['srnumber'] = pin['srnumber'] + cluster['requesttype'] = pin['requesttype'] + del cluster['expansion_zoom'] + + return clusters diff --git a/server/api/code/README.md b/server/api/code/README.md new file mode 100644 index 000000000..fd13adf26 --- /dev/null +++ b/server/api/code/README.md @@ -0,0 +1,127 @@ + +To try it: + +Start new server from API directory with python code/run +Ensure Test API is running with welcome message at http://localhost:5000/ +Test API using OpenAPI at http://localhost:5000/docs +Run the ReactJS app to make sure frontend works + + +TODO: +* add some pytests +* finish routes +* alembic migrations +* data seeds + + +add ```--write-out '%{time_total}\n' --output /dev/null --silent``` to curls + +```bash +curl -X POST "http://localhost:5000/map/pins" -H "accept: application/json" -H "Content-Type: application/json" -d "{\"startDate\":\"01/01/2020\",\"endDate\":\"08/27/2020\",\"ncList\":[52,46,128,54,104,76,97,121,55],\"requestTypes\":[\"Homeless Encampment\"]}" --write-out '%{time_total}\n' --output /dev/null --silent +``` + +killing +```bash +lsof -i :5000 +| kill -9 + +``` + +### Full filter OLD FORMAT +```json +{ + "startDate":"01/01/2020", + "endDate":"08/27/2020", + "ncList":[ + 52, + 46, + 128, + 54, + 104, + 76, + 97, + 121, + 55 + ], + "requestTypes":[ + "Dead Animal Removal", + "Homeless Encampment", + "Single Streetlight Issue", + "Multiple Streetlight Issue", + "Feedback", + "Bulky Items", + "Electronic Waste", + "Metal/Household Appliances", + "Graffiti Removal", + "Illegal Dumping Pickup", + "Other" + ], + "zoom":13, + "bounds":{ + "north":34.0731374116421, + "east":-118.18010330200195, + "south":33.97582290387967, + "west":-118.41201782226564 + } +} +``` + + +### Full filter: NEW FORMAT +```json +{ + "startDate":"2020-01-01", + "endDate":"2020-08-27", + "ncList":[ + 52, + 46, + 128, + 54, + 104, + 76, + 97, + 121, + 55 + ], + "requestTypes":[ + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12 + ], + "zoom":13, + "bounds":{ + "north":34.0731374116421, + "east":-118.18010330200195, + "south":33.97582290387967, + "west":-118.41201782226564 + } +} +``` + + +# Old format + +{ + "startDate": "01/01/2020", + "endDate": "08/27/2020", + "ncList": [52, 46, 128, 54, 104, 76, 97, 121, 55], + "requestTypes": ["Homeless Encampment"] +} + +# New format + +{ + "startDate": "2020-01-01", + "endDate": "2020-08-27", + "ncList": [52, 46, 128, 54, 104, 76, 97, 121, 55], + "requestTypes": [6] +} diff --git a/server/api/requirements.txt b/server/api/requirements.txt index 54286f51d..f56530d12 100644 --- a/server/api/requirements.txt +++ b/server/api/requirements.txt @@ -1,12 +1,19 @@ aiofiles==0.5.0 +asyncpg==0.21.0 attrs==20.1.0 certifi==2020.6.20 chardet==3.0.4 +click==7.1.2 +fastapi==0.61.0 flake8==3.8.3 +gino==1.0.1 +gino-sanic==0.1.0 +gino-starlette==0.1.1 +gunicorn==20.0.4 h11==0.8.1 h2==3.2.0 hpack==3.0.0 -hstspreload==2020.8.18 +hstspreload==2020.8.25 http3==0.6.7 httptools==0.1.1 httpx==0.11.1 @@ -25,6 +32,7 @@ pluggy==0.13.1 psycopg2-binary==2.8.5 py==1.9.0 pycodestyle==2.6.0 +pydantic==1.6.1 pyflakes==2.2.0 pyparsing==2.4.7 pysupercluster==0.7.6 @@ -48,10 +56,12 @@ sniffio==1.1.0 sodapy==2.1.0 SQLAlchemy==1.3.19 starkbank-ecdsa==1.0.0 +starlette==0.13.6 tabulate==0.8.7 toml==0.10.1 ujson==3.1.0 urllib3==1.25.10 +uvicorn==0.11.8 uvloop==0.14.0 websockets==8.1 zipp==3.1.0 From 63cc3b78e6241a3d60fdbdddc1fc5f30ee050ebd Mon Sep 17 00:00:00 2001 From: Matthew Webster Date: Tue, 1 Sep 2020 13:17:01 -0700 Subject: [PATCH 03/16] first draft of legacy endpoints --- .../code/311_data_api/models/request_type.py | 8 - .../code/311_data_api/routers/utilities.py | 41 --- .../__init__.py | 0 .../{311_data_api => lacity_data_api}/asgi.py | 0 .../config.py | 16 +- .../{311_data_api => lacity_data_api}/main.py | 12 +- .../models/__init__.py | 0 .../code/lacity_data_api/models/clusters.py | 248 ++++++++++++++++++ .../models/council.py | 2 + .../models/region.py | 2 + .../lacity_data_api/models/request_type.py | 38 +++ .../models/service_request.py | 0 .../routers/__init__.py | 0 .../routers/councils.py | 10 +- .../api/code/lacity_data_api/routers/index.py | 8 + .../routers/legacy.py} | 96 +++++-- .../routers/regions.py | 9 +- .../routers/request_types.py | 0 .../routers/service_requests.py | 53 +++- .../api/code/lacity_data_api/routers/shim.py | 89 +++++++ server/api/code/run.py | 2 +- 21 files changed, 543 insertions(+), 91 deletions(-) delete mode 100644 server/api/code/311_data_api/models/request_type.py delete mode 100644 server/api/code/311_data_api/routers/utilities.py rename server/api/code/{311_data_api => lacity_data_api}/__init__.py (100%) rename server/api/code/{311_data_api => lacity_data_api}/asgi.py (100%) rename server/api/code/{311_data_api => lacity_data_api}/config.py (78%) rename server/api/code/{311_data_api => lacity_data_api}/main.py (75%) rename server/api/code/{311_data_api => lacity_data_api}/models/__init__.py (100%) create mode 100644 server/api/code/lacity_data_api/models/clusters.py rename server/api/code/{311_data_api => lacity_data_api}/models/council.py (75%) rename server/api/code/{311_data_api => lacity_data_api}/models/region.py (71%) create mode 100644 server/api/code/lacity_data_api/models/request_type.py rename server/api/code/{311_data_api => lacity_data_api}/models/service_request.py (100%) rename server/api/code/{311_data_api => lacity_data_api}/routers/__init__.py (100%) rename server/api/code/{311_data_api => lacity_data_api}/routers/councils.py (69%) create mode 100644 server/api/code/lacity_data_api/routers/index.py rename server/api/code/{311_data_api/routers/index.py => lacity_data_api/routers/legacy.py} (63%) rename server/api/code/{311_data_api => lacity_data_api}/routers/regions.py (71%) rename server/api/code/{311_data_api => lacity_data_api}/routers/request_types.py (100%) rename server/api/code/{311_data_api => lacity_data_api}/routers/service_requests.py (65%) create mode 100644 server/api/code/lacity_data_api/routers/shim.py diff --git a/server/api/code/311_data_api/models/request_type.py b/server/api/code/311_data_api/models/request_type.py deleted file mode 100644 index 0451aeb02..000000000 --- a/server/api/code/311_data_api/models/request_type.py +++ /dev/null @@ -1,8 +0,0 @@ -from . import db - - -class RequestType(db.Model): - __tablename__ = 'request_types' - - type_id = db.Column(db.SmallInteger, primary_key=True) - type_name = db.Column(db.String) diff --git a/server/api/code/311_data_api/routers/utilities.py b/server/api/code/311_data_api/routers/utilities.py deleted file mode 100644 index d7523b432..000000000 --- a/server/api/code/311_data_api/routers/utilities.py +++ /dev/null @@ -1,41 +0,0 @@ -import pysupercluster - - -def get_clusters_for_pins(pins, zoom, bounds, options): - """ - Cluster pins into aggregate values using pysupercluster - based on filters and user view - """ - if len(pins) == 0: - return [] - - min_zoom = options.get('min_zoom', 0) - max_zoom = options.get('max_zoom', 17) - radius = options.get('radius', 200) - extent = options.get('extent', 512) - - index = pysupercluster.SuperCluster( - pins[['longitude', 'latitude']].to_numpy(), - min_zoom=min_zoom, - max_zoom=max_zoom, - radius=radius, - extent=extent) - - north = bounds.get('north', 90) - south = bounds.get('south', -90) - west = bounds.get('west', -180) - east = bounds.get('east', 180) - - clusters = index.getClusters( - top_left=(west, north), - bottom_right=(east, south), - zoom=zoom) - - for cluster in clusters: - if cluster['count'] == 1: - pin = pins.iloc[cluster['id']] - cluster['srnumber'] = pin['srnumber'] - cluster['requesttype'] = pin['requesttype'] - del cluster['expansion_zoom'] - - return clusters diff --git a/server/api/code/311_data_api/__init__.py b/server/api/code/lacity_data_api/__init__.py similarity index 100% rename from server/api/code/311_data_api/__init__.py rename to server/api/code/lacity_data_api/__init__.py diff --git a/server/api/code/311_data_api/asgi.py b/server/api/code/lacity_data_api/asgi.py similarity index 100% rename from server/api/code/311_data_api/asgi.py rename to server/api/code/lacity_data_api/asgi.py diff --git a/server/api/code/311_data_api/config.py b/server/api/code/lacity_data_api/config.py similarity index 78% rename from server/api/code/311_data_api/config.py rename to server/api/code/lacity_data_api/config.py index a912e8e09..6d95e57c8 100644 --- a/server/api/code/311_data_api/config.py +++ b/server/api/code/lacity_data_api/config.py @@ -4,12 +4,19 @@ config = Config(".env") +TESTING = config("TESTING", cast=bool, default=False) + DB_DRIVER = config("DB_DRIVER", default="postgresql") DB_HOST = config("DB_HOST", default=None) DB_PORT = config("DB_PORT", cast=int, default=None) DB_USER = config("DB_USER", default=None) DB_PASSWORD = config("DB_PASSWORD", cast=Secret, default=None) DB_DATABASE = config("DB_DATABASE", default=None) + +if TESTING: + if DB_DATABASE: + DB_DATABASE += "_test" + DB_DSN = config( "DB_DSN", cast=make_url, @@ -22,12 +29,19 @@ database=DB_DATABASE, ), ) + +if TESTING: + if DB_DSN: + DB_DSN += "_test" + DB_POOL_MIN_SIZE = config("DB_POOL_MIN_SIZE", cast=int, default=1) DB_POOL_MAX_SIZE = config("DB_POOL_MAX_SIZE", cast=int, default=16) -DB_ECHO = config("DB_ECHO", cast=bool, default=False) +DB_ECHO = config("DB_ECHO", cast=bool, default=True) DB_SSL = config("DB_SSL", default=None) DB_USE_CONNECTION_FOR_REQUEST = config( "DB_USE_CONNECTION_FOR_REQUEST", cast=bool, default=True ) DB_RETRY_LIMIT = config("DB_RETRY_LIMIT", cast=int, default=32) DB_RETRY_INTERVAL = config("DB_RETRY_INTERVAL", cast=int, default=1) + +API_LEGACY_MODE = config('API_LEGACY_MODE', cast=bool, default=True) diff --git a/server/api/code/311_data_api/main.py b/server/api/code/lacity_data_api/main.py similarity index 75% rename from server/api/code/311_data_api/main.py rename to server/api/code/lacity_data_api/main.py index 6a4a8b983..389b61021 100644 --- a/server/api/code/311_data_api/main.py +++ b/server/api/code/lacity_data_api/main.py @@ -4,8 +4,9 @@ from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.gzip import GZipMiddleware -from .routers import index, councils, regions, request_types, service_requests +from .routers import index, legacy, councils, regions, request_types, service_requests, shim from .models import db +from .config import API_LEGACY_MODE try: from importlib.metadata import entry_points @@ -16,11 +17,16 @@ def get_app(): - app = FastAPI(title="311 Data API") + app = FastAPI(title="LA City 311 Data API") db.init_app(app) - app.include_router(index.router) + + if API_LEGACY_MODE: + app.include_router(legacy.router) + else: + app.include_router(shim.router) + app.include_router(councils.router, prefix="/councils") app.include_router(regions.router, prefix="/regions") app.include_router(request_types.router, prefix="/types") diff --git a/server/api/code/311_data_api/models/__init__.py b/server/api/code/lacity_data_api/models/__init__.py similarity index 100% rename from server/api/code/311_data_api/models/__init__.py rename to server/api/code/lacity_data_api/models/__init__.py diff --git a/server/api/code/lacity_data_api/models/clusters.py b/server/api/code/lacity_data_api/models/clusters.py new file mode 100644 index 000000000..02c7d206e --- /dev/null +++ b/server/api/code/lacity_data_api/models/clusters.py @@ -0,0 +1,248 @@ +from typing import List +import datetime + +import numpy +import pysupercluster +from sqlalchemy import and_ + +from .service_request import ServiceRequest +from .request_type import RequestType, get_types_dict +from .council import Council +from . import db + + +DEFAULT_CITY_ZOOM = 12 # a click on a city point zooms from 10 to 12 +DEFAULT_COUNCIL_ZOOM = 13 # a click on a council point zooms to 14 +DEFAULT_LATITUDE = 34.0522 +DEFAULT_LONGITUDE = -118.2437 + + +class Cluster: + def __init__(self, + count: int, + expansion_zoom: int, + id: int, + latitude: float, + longitude: float): + self.count = count + self.expansion_zoom = expansion_zoom + self.id = id + self.latitude = latitude + self.longitude = longitude + + +async def get_clusters_for_city( + start_date: datetime.date, + end_date: datetime.date, + type_ids: List[int], + zoom_current: int +) -> List[Cluster]: + """ + Cluster pins for the entire city + + Args: + start_date (date): beginning of date range service was requested + end_date (date): end of date range service was requested + type_ids (List[int]): the request type ids to match on + + Returns: + cluster: a cluster object + """ + + result = await (db.select([db.func.count()]) + .where(and_( + ServiceRequest.created_date >= start_date, + ServiceRequest.created_date <= end_date, + ServiceRequest.type_id.in_(type_ids) + )) + ).gino.scalar() + + # zoom_next = (zoom_current + 1) or DEFAULT_CITY_ZOOM + + cluster_list = [] + cluster_list.append(Cluster( + count=result, + expansion_zoom=DEFAULT_CITY_ZOOM, + id=0, + latitude=DEFAULT_LATITUDE, + longitude=DEFAULT_LONGITUDE + )) + + return cluster_list + + +# TODO: same as above by group by region of each council +def get_clusters_for_regions(pins, zoom, bounds, options): + """ + Cluster pins by region + """ + print(zoom) + + +# TODO: same as above by group by council +async def get_clusters_for_councils( + start_date: datetime.date, + end_date: datetime.date, + type_ids: List[int], + council_ids: List[int], + zoom_current: int +) -> List[Cluster]: + """ + Cluster pins for the entire city + + Args: + start_date (date): beginning of date range service was requested + end_date (date): end of date range service was requested + type_ids (List[int]): the request type ids to match on + + Returns: + cluster: a cluster object + """ + + result = await ( + db.select( + [ + ServiceRequest.council_id, + db.func.count() + ] + ).where( + and_( + ServiceRequest.created_date >= start_date, + ServiceRequest.created_date <= end_date, + ServiceRequest.type_id.in_(type_ids), + ServiceRequest.council_id.in_(council_ids), + ) + ).group_by( + ServiceRequest.council_id + ).gino.all() + ) + + # zoom_next = (zoom_current + 1) or DEFAULT_COUNCIL_ZOOM + cluster_list = [] + + for row in result: + council = await Council.get(row[0]) + cluster_list.append(Cluster( + count=row[1], + expansion_zoom=DEFAULT_COUNCIL_ZOOM, + id=council.council_id, + latitude=council.latitude, + longitude=council.longitude + )) + + return cluster_list + + +async def get_points( + start_date: datetime.date, + end_date: datetime.date, + type_ids: List[int], + council_ids: List[int] +) -> List[List[int]]: + """ + Get filtered geospacial points for service requests for the entire city + + Args: + start_date (date): beginning of date range service was requested + end_date (date): end of date range service was requested + type_ids (List[int]): the request type ids to match + council_ids: (List[int]): the council ids to match + + Returns: + a list of latitude and logitude pairs of service locations + """ + + result = await ( + db.select( + [ + ServiceRequest.latitude, + ServiceRequest.longitude + ] + ).where( + and_( + ServiceRequest.created_date >= start_date, + ServiceRequest.created_date <= end_date, + ServiceRequest.type_id.in_(type_ids), + ServiceRequest.council_id.in_(council_ids) + ) + ).gino.all() + ) + + point_list = [] + for row in result: + point_list.append([row[0], row[1]]) + + return point_list + + +# TODO: same as above by group by council +async def get_clusters_for_bounds( + start_date: datetime.date, + end_date: datetime.date, + type_ids: List[int], + council_ids: List[int], + zoom_current: int, + bounds +) -> List[Cluster]: + """ + Cluster pins for the entire city + + Args: + start_date (date): beginning of date range service was requested + end_date (date): end of date range service was requested + type_ids (List[int]): the request type ids to match on + + Returns: + a JSON object either representing a cluster or a pin for a request + """ + + result = await ( + db.select( + [ + ServiceRequest.request_id, + ServiceRequest.latitude, + ServiceRequest.longitude, + ServiceRequest.type_id + ] + ).where( + and_( + ServiceRequest.created_date >= start_date, + ServiceRequest.created_date <= end_date, + ServiceRequest.type_id.in_(type_ids), + ServiceRequest.council_id.in_(council_ids), + ServiceRequest.latitude < bounds.north, + ServiceRequest.latitude > bounds.south, + ServiceRequest.longitude > bounds.west, + ServiceRequest.longitude < bounds.east + ) + ).gino.all() + ) + + # TODO: clean this up. goes in [longitude, latitude] format + points = [[i[2], i[1]] for i in result] + + index = pysupercluster.SuperCluster( + numpy.array(points), + min_zoom=0, + max_zoom=17, + radius=200, + extent=512 + ) + + cluster_list = index.getClusters( + top_left=(bounds.west, bounds.north), + bottom_right=(bounds.east, bounds.south), + zoom=zoom_current + ) + + types_dict = await get_types_dict() + + for item in cluster_list: + # change single item clusters into points + if item['count'] == 1: + pin = result[item['id']] # cluster id matches the result row + item['srnumber'] = "1-" + str(pin[0]) + item['requesttype'] = types_dict[pin[3]] + del item['expansion_zoom'] + + return cluster_list diff --git a/server/api/code/311_data_api/models/council.py b/server/api/code/lacity_data_api/models/council.py similarity index 75% rename from server/api/code/311_data_api/models/council.py rename to server/api/code/lacity_data_api/models/council.py index ba0dfc3d9..ceb4df737 100644 --- a/server/api/code/311_data_api/models/council.py +++ b/server/api/code/lacity_data_api/models/council.py @@ -7,3 +7,5 @@ class Council(db.Model): council_id = db.Column(db.SmallInteger, primary_key=True) council_name = db.Column(db.String) region_id = db.Column(db.SmallInteger) + latitude = db.Column(db.Float) + longitude = db.Column(db.Float) diff --git a/server/api/code/311_data_api/models/region.py b/server/api/code/lacity_data_api/models/region.py similarity index 71% rename from server/api/code/311_data_api/models/region.py rename to server/api/code/lacity_data_api/models/region.py index b7d7024ff..ecb14a261 100644 --- a/server/api/code/311_data_api/models/region.py +++ b/server/api/code/lacity_data_api/models/region.py @@ -6,3 +6,5 @@ class Region(db.Model): region_id = db.Column(db.SmallInteger, primary_key=True) region_name = db.Column(db.String) + latitude = db.Column(db.Float) + longitude = db.Column(db.Float) diff --git a/server/api/code/lacity_data_api/models/request_type.py b/server/api/code/lacity_data_api/models/request_type.py new file mode 100644 index 000000000..808a4022e --- /dev/null +++ b/server/api/code/lacity_data_api/models/request_type.py @@ -0,0 +1,38 @@ +from typing import List +import functools + +from . import db + + +class RequestType(db.Model): + __tablename__ = 'request_types' + + type_id = db.Column(db.SmallInteger, primary_key=True) + type_name = db.Column(db.String) + + +@functools.lru_cache(maxsize=1) +async def get_types_dict(): + result = await db.all(RequestType.query) + types_dict = [(i.type_id, i.type_name) for i in result] + return dict(types_dict) + + +async def get_types_by_str_list(str_list: List[str]) -> List[RequestType]: + '''Get a list of RequestTypes from their type_names''' + result = await db.all( + RequestType.query.where( + RequestType.type_name.in_(str_list) + ) + ) + return result + + +async def get_types_by_int_list(int_list: List[int]) -> List[RequestType]: + '''Get a list of RequestTypes from their type_names''' + result = await db.all( + RequestType.query.where( + RequestType.type_id.in_(int_list) + ) + ) + return result diff --git a/server/api/code/311_data_api/models/service_request.py b/server/api/code/lacity_data_api/models/service_request.py similarity index 100% rename from server/api/code/311_data_api/models/service_request.py rename to server/api/code/lacity_data_api/models/service_request.py diff --git a/server/api/code/311_data_api/routers/__init__.py b/server/api/code/lacity_data_api/routers/__init__.py similarity index 100% rename from server/api/code/311_data_api/routers/__init__.py rename to server/api/code/lacity_data_api/routers/__init__.py diff --git a/server/api/code/311_data_api/routers/councils.py b/server/api/code/lacity_data_api/routers/councils.py similarity index 69% rename from server/api/code/311_data_api/routers/councils.py rename to server/api/code/lacity_data_api/routers/councils.py index 1916a2185..b5df1f058 100644 --- a/server/api/code/311_data_api/routers/councils.py +++ b/server/api/code/lacity_data_api/routers/councils.py @@ -12,6 +12,9 @@ class CouncilModel(BaseModel): council_id: int council_name: str + region_id: int + latitude: float + longitude: float class Config: orm_mode = True @@ -22,10 +25,11 @@ class Config: @router.get("/", response_model=Items) async def index(): - return await db.all(Council.query) + result = await db.all(Council.query) + return result @router.get("/{cid}") async def get_council(cid: int): - council = await Council.get_or_404(cid) - return council.to_dict() + result = await Council.get_or_404(cid) + return result.to_dict() diff --git a/server/api/code/lacity_data_api/routers/index.py b/server/api/code/lacity_data_api/routers/index.py new file mode 100644 index 000000000..d26837768 --- /dev/null +++ b/server/api/code/lacity_data_api/routers/index.py @@ -0,0 +1,8 @@ +from fastapi import APIRouter + +router = APIRouter() + + +@router.get("/") +async def index(): + return {"message": "Hello, new index!"} diff --git a/server/api/code/311_data_api/routers/index.py b/server/api/code/lacity_data_api/routers/legacy.py similarity index 63% rename from server/api/code/311_data_api/routers/index.py rename to server/api/code/lacity_data_api/routers/legacy.py index bc818daa1..56a2c4775 100644 --- a/server/api/code/311_data_api/routers/index.py +++ b/server/api/code/lacity_data_api/routers/legacy.py @@ -5,10 +5,15 @@ from fastapi import APIRouter from pydantic import BaseModel -from services import status, map, visualizations, requests +from services import status, map, visualizations, requests, comparison, github, email router = APIRouter() +""" +These are router classes that implement the existing API design and +use the legacy services code from src as-is. +""" + class Bounds(BaseModel): north: float @@ -26,12 +31,46 @@ class Filter(BaseModel): bounds: Optional[Bounds] = None -@router.get("/") -async def index(): - return {"message": "Hello, new index!"} +class Pin(BaseModel): + srnumber: str + requesttype: str + latitude: float + longitude: float + + +Pins = List[Pin] + + +class Cluster(BaseModel): + count: int + expansion_zoom: Optional[int] + id: int + latitude: float + longitude: float -@router.get("/status/api") +Clusters = List[Cluster] + + +class Set(dict): + district: str + list: List[int] + + +class Comparison(BaseModel): + startDate: str + endDate: str + requestTypes: List[str] + set1: Set + set2: Set + + +class Feedback(BaseModel): + title: str + body: str + + +@router.get("/status/api", description="Provides the status of backend systems") async def status_api(): result = await status.api() return result @@ -43,7 +82,7 @@ async def status_db(): return result -@router.get("/status/system") +@router.get("/status/sys") async def status_system(): result = await status.system() return result @@ -51,11 +90,17 @@ async def status_system(): @router.get("/servicerequest/{srnumber}") async def get_service_request_by_string(srnumber: str): - # result = await get_service_request(int(srnumber[2:])) result = requests.item_query(srnumber) return result +@router.post("/open-requests") +async def get_open_requests(): + result = await requests.open_requests() + return result + + +# NOTE: can't apply response filter here since it sometimes returns a single point @router.post("/map/clusters") async def get_clusters(filter: Filter): start_time = datetime.datetime.strptime(filter.startDate, '%m/%d/%Y') @@ -83,16 +128,6 @@ async def get_heatmap(filter: Filter): return responses.JSONResponse(result.tolist()) -class Pin(BaseModel): - srnumber: str - requesttype: str - latitude: float - longitude: float - - -Pins = List[Pin] - - @router.post("/map/pins", response_model=Pins) async def get_pins(filter: Filter): start_time = datetime.datetime.strptime(filter.startDate, '%m/%d/%Y') @@ -115,3 +150,30 @@ async def get_visualizations(filter: Filter): requestTypes=filter.requestTypes, ncList=filter.ncList) return result + + +@router.post("/comparison/frequency") +async def get_comparison_frequency(comp_filter: Comparison): + result = await comparison.freq_comparison(**dict(comp_filter)) + return result + + +@router.post("/comparison/timetoclose") +async def get_comparison_time_to_close(comp_filter: Comparison): + result = await comparison.ttc_comparison(**dict(comp_filter)) + return result + + +@router.post("/comparison/counts") +async def get_comparison_counts(comp_filter: Comparison): + result = await comparison.counts_comparison(**dict(comp_filter)) + return result + + +@router.post("/feedback") +async def get_feedback(feedback: Feedback): + id, number = await github.create_issue(feedback.title, feedback.body) + await github.add_issue_to_project(id) + await email.respond_to_feedback(feedback.body, number) + + return {'success': True} diff --git a/server/api/code/311_data_api/routers/regions.py b/server/api/code/lacity_data_api/routers/regions.py similarity index 71% rename from server/api/code/311_data_api/routers/regions.py rename to server/api/code/lacity_data_api/routers/regions.py index 72e41091e..9e244b5c3 100644 --- a/server/api/code/311_data_api/routers/regions.py +++ b/server/api/code/lacity_data_api/routers/regions.py @@ -12,6 +12,8 @@ class RegionModel(BaseModel): region_id: int region_name: str + latitude: float + longitude: float class Config: orm_mode = True @@ -22,10 +24,11 @@ class Config: @router.get("/", response_model=Items) async def index(): - return await db.all(Region.query) + result = await db.all(Region.query) + return result @router.get("/{rid}") async def get_region(rid: int): - region = await Region.get_or_404(rid) - return region.to_dict() + result = await Region.get_or_404(rid) + return result.to_dict() diff --git a/server/api/code/311_data_api/routers/request_types.py b/server/api/code/lacity_data_api/routers/request_types.py similarity index 100% rename from server/api/code/311_data_api/routers/request_types.py rename to server/api/code/lacity_data_api/routers/request_types.py diff --git a/server/api/code/311_data_api/routers/service_requests.py b/server/api/code/lacity_data_api/routers/service_requests.py similarity index 65% rename from server/api/code/311_data_api/routers/service_requests.py rename to server/api/code/lacity_data_api/routers/service_requests.py index 96dd74c7c..ebf29a24f 100644 --- a/server/api/code/311_data_api/routers/service_requests.py +++ b/server/api/code/lacity_data_api/routers/service_requests.py @@ -6,8 +6,7 @@ from pydantic import BaseModel from ..models.service_request import ServiceRequest -from ..models import db -from .utilities import get_clusters_for_pins +from ..models import db, clusters router = APIRouter() @@ -62,6 +61,17 @@ class Filter(BaseModel): bounds: Optional[Bounds] = None +class Cluster(BaseModel): + count: int + expansion_zoom: int + id: int + latitude: float + longitude: float + + +Clusters = List[Cluster] + + class Pin(BaseModel): request_id: int type_id: int @@ -85,16 +95,31 @@ async def get_service_request_pins(filter: Filter): return result -@router.post("/clusters", response_model=Items) +@router.post("/clusters", response_model=Clusters) async def get_service_request_clusters(filter: Filter): - result = await ServiceRequest.query.where( - sql.and_( - ServiceRequest.created_date >= filter.startDate, - ServiceRequest.created_date <= filter.endDate, - ServiceRequest.type_id.in_(filter.requestTypes), - ServiceRequest.council_id.in_(filter.ncList) - ) - ).gino.all() - - clusters = get_clusters_for_pins(result, filter.zoom, filter.bounds, options={}) - return clusters + + result = await clusters.get_clusters_for_city( + filter.startDate, + filter.endDate, + filter.requestTypes + ) + + # result = await ServiceRequest.query() + # .where( + # sql.and_( + # ServiceRequest.created_date >= filter.startDate, + # ServiceRequest.created_date <= filter.endDate, + # ServiceRequest.type_id.in_(filter.requestTypes), + # ServiceRequest.council_id.in_(filter.ncList) + # ) + # ).gino.all() + + # # council + + # # street + # if filter.zoom > 9: + # cluster_result = clusters.get_clusters_for_regions(result, filter.zoom, filter.bounds, options={}) + # else: + # cluster_result = clusters.get_clusters_for_pins(result, filter.zoom, filter.bounds, options={}) + + return result diff --git a/server/api/code/lacity_data_api/routers/shim.py b/server/api/code/lacity_data_api/routers/shim.py new file mode 100644 index 000000000..897149d0c --- /dev/null +++ b/server/api/code/lacity_data_api/routers/shim.py @@ -0,0 +1,89 @@ +from typing import List, Optional +import datetime + +from fastapi import APIRouter +from pydantic import BaseModel + +from lacity_data_api.models import clusters, request_type + +router = APIRouter() + +""" +These are new backward-compatible router routes. They implement the existing API +methods but get data from the new models using async database queries. +""" + + +class Bounds(BaseModel): + north: float + south: float + east: float + west: float + + +class Filter(BaseModel): + startDate: str + endDate: str + ncList: List[int] + requestTypes: List[str] + zoom: Optional[int] = None + bounds: Optional[Bounds] = None + + +@router.post("/new/clusters") +async def get_new_clusters(filter: Filter): + # have to convert the funky date formats + start_date = datetime.datetime.strptime(filter.startDate, '%m/%d/%Y') + end_date = datetime.datetime.strptime(filter.endDate, '%m/%d/%Y') + + # convert type names to type ids + request_types = await request_type.get_types_by_str_list(filter.requestTypes) + type_ids = [i.type_id for i in request_types] + + zoom = filter.zoom or 10 + + if zoom < 11: + # get city clusters + result = await clusters.get_clusters_for_city( + start_date, + end_date, + type_ids, + filter.zoom + ) + elif zoom < 14: + result = await clusters.get_clusters_for_councils( + start_date, + end_date, + type_ids, + filter.ncList, + filter.zoom + ) + else: + result = await clusters.get_clusters_for_bounds( + start_date, + end_date, + type_ids, + filter.ncList, + filter.zoom, + filter.bounds + ) + + return result + + +@router.post("/new/heat") +async def get_new_heatmap(filter: Filter): + start_time = datetime.datetime.strptime(filter.startDate, '%m/%d/%Y') + end_time = datetime.datetime.strptime(filter.endDate, '%m/%d/%Y') + + # convert type names to type ids + request_types = await request_type.get_types_by_str_list(filter.requestTypes) + type_ids = [i.type_id for i in request_types] + + result = await clusters.get_points( + start_time, + end_time, + type_ids, + filter.ncList + ) + return result diff --git a/server/api/code/run.py b/server/api/code/run.py index f7a39afd3..539220e53 100644 --- a/server/api/code/run.py +++ b/server/api/code/run.py @@ -7,7 +7,7 @@ print(sys.path) uvicorn.run( - "311_data_api.asgi:app", + "lacity_data_api.asgi:app", host=os.getenv("APP_HOST", "127.0.0.1"), port=int(os.getenv("APP_PORT", "5000")), ) From 8886ca718c4060d2b83fc55a4a1f3410e9c4a3f7 Mon Sep 17 00:00:00 2001 From: Matthew Webster Date: Tue, 1 Sep 2020 13:45:38 -0700 Subject: [PATCH 04/16] fixing lint issues from flake8 --- server/api/code/lacity_data_api/main.py | 8 +++----- .../code/lacity_data_api/models/clusters.py | 6 +++--- .../routers/service_requests.py | 19 +------------------ 3 files changed, 7 insertions(+), 26 deletions(-) diff --git a/server/api/code/lacity_data_api/main.py b/server/api/code/lacity_data_api/main.py index 389b61021..2ec0c586c 100644 --- a/server/api/code/lacity_data_api/main.py +++ b/server/api/code/lacity_data_api/main.py @@ -4,14 +4,12 @@ from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.gzip import GZipMiddleware -from .routers import index, legacy, councils, regions, request_types, service_requests, shim +from .routers import ( + index, legacy, councils, regions, request_types, service_requests, shim +) from .models import db from .config import API_LEGACY_MODE -try: - from importlib.metadata import entry_points -except ImportError: # pragma: no cover - from importlib_metadata import entry_points logger = logging.getLogger(__name__) diff --git a/server/api/code/lacity_data_api/models/clusters.py b/server/api/code/lacity_data_api/models/clusters.py index 02c7d206e..c2867f707 100644 --- a/server/api/code/lacity_data_api/models/clusters.py +++ b/server/api/code/lacity_data_api/models/clusters.py @@ -6,7 +6,7 @@ from sqlalchemy import and_ from .service_request import ServiceRequest -from .request_type import RequestType, get_types_dict +from .request_type import get_types_dict from .council import Council from . import db @@ -35,7 +35,7 @@ async def get_clusters_for_city( start_date: datetime.date, end_date: datetime.date, type_ids: List[int], - zoom_current: int + zoom_current: int ) -> List[Cluster]: """ Cluster pins for the entire city @@ -71,7 +71,7 @@ async def get_clusters_for_city( return cluster_list -# TODO: same as above by group by region of each council +# TODO: same as above by group by region of each council def get_clusters_for_regions(pins, zoom, bounds, options): """ Cluster pins by region diff --git a/server/api/code/lacity_data_api/routers/service_requests.py b/server/api/code/lacity_data_api/routers/service_requests.py index ebf29a24f..678da9c1d 100644 --- a/server/api/code/lacity_data_api/routers/service_requests.py +++ b/server/api/code/lacity_data_api/routers/service_requests.py @@ -95,6 +95,7 @@ async def get_service_request_pins(filter: Filter): return result +# TODO: implement conditional cluster logic based on zoom @router.post("/clusters", response_model=Clusters) async def get_service_request_clusters(filter: Filter): @@ -104,22 +105,4 @@ async def get_service_request_clusters(filter: Filter): filter.requestTypes ) - # result = await ServiceRequest.query() - # .where( - # sql.and_( - # ServiceRequest.created_date >= filter.startDate, - # ServiceRequest.created_date <= filter.endDate, - # ServiceRequest.type_id.in_(filter.requestTypes), - # ServiceRequest.council_id.in_(filter.ncList) - # ) - # ).gino.all() - - # # council - - # # street - # if filter.zoom > 9: - # cluster_result = clusters.get_clusters_for_regions(result, filter.zoom, filter.bounds, options={}) - # else: - # cluster_result = clusters.get_clusters_for_pins(result, filter.zoom, filter.bounds, options={}) - return result From 35628fff902746de1f1d58a30e3368b8cc0c6407 Mon Sep 17 00:00:00 2001 From: Matthew Webster Date: Tue, 1 Sep 2020 23:28:16 -0700 Subject: [PATCH 05/16] added support for deploying new API in Docker --- server/api/.dockerignore | 5 ++++ server/api/.env.example | 49 +++++++++++++++++++++++++++++++++++++ server/api/Dockerfile | 26 ++++++++------------ server/api/requirements.txt | 15 ++++++++++-- server/docker-compose.yml | 25 +------------------ 5 files changed, 78 insertions(+), 42 deletions(-) create mode 100644 server/api/.dockerignore create mode 100644 server/api/.env.example diff --git a/server/api/.dockerignore b/server/api/.dockerignore new file mode 100644 index 000000000..8f9d89f87 --- /dev/null +++ b/server/api/.dockerignore @@ -0,0 +1,5 @@ +# ignore .git and .cache folders +Pipfile +Pipfile.lock +__pycache__ +__tmp__ diff --git a/server/api/.env.example b/server/api/.env.example new file mode 100644 index 000000000..4e0236137 --- /dev/null +++ b/server/api/.env.example @@ -0,0 +1,49 @@ +# including 2 paths to src: tools set relative to Workspace Dir, debugpy relative to api Dir +PYTHONPATH=./server/api/src:./server/api/code:./src + +# Set debug +DEBUG=True +UPDATE_ON_START=0 #TODO: Replace this +USE_FILE_CACHE=True # enables picklecache +TEMP_FOLDER=./__tmp__ # location of picklecache files + +# Application +API_HOST=127.0.0.1 +API_PORT=5000 +API_LEGACY_MODE=True + +# Database (for dev set either to host.docker.internal or localhost) +DB_DSN=postgresql://311_user:311_pass@host.docker.internal:5433/311_db +DB_ECHO=True + +# Redis cache +REDIS_ENABLED=0 +REDIS_URL=redis://redis:6379 +REDIS_TTL_SECONDS=3600 + +# Socrata data source +SOCRATA_TOKEN=6b5lwk1jHSQTgx7PAVFKOOdt2 +SOCRATA_BATCH_SIZE=50000 + +# Github code source +GITHUB_TOKEN= +GITHUB_ISSUES_URL=https://api.github.com/repos/hackforla/311-data-support/issues +GITHUB_PROJECT_URL= +GITHUB_SHA=DEVELOPMENT + +# Slack error reporting +SLACK_WEBHOOK_URL= +SLACK_ERROR_CODES=[400, 500] + +# Sendgrid email +SENDGRID_API_KEY= + +# DOCKER +PORT=5000 +COMPOSE_PROJECT_NAME=311_data +DB_USER=311_user +DB_PASS=311_pass +DB_NAME=311_db +DB_HOST_PORT=5433 +API_HOST_PORT=5000 +API_RESTART_POLICY=no diff --git a/server/api/Dockerfile b/server/api/Dockerfile index 8718ed7f7..0e19bb076 100644 --- a/server/api/Dockerfile +++ b/server/api/Dockerfile @@ -1,23 +1,17 @@ -FROM python:3.7-slim +FROM python:3.7 -RUN apt-get update && \ - apt-get install -yq \ - python3 \ - python3-dev \ - gcc \ - g++ \ - gfortran \ - musl-dev && \ - pip install --upgrade pip +WORKDIR /home/api -COPY requirements.txt /home/api/ +COPY requirements.txt . RUN pip install --no-cache-dir -r /home/api/requirements.txt -COPY setup.cfg /home/api/ -COPY /bin /home/api/bin/ -COPY .env* /src /home/api/src/ +COPY . . -WORKDIR /home/api +ENV APP_HOST=0.0.0.0 +# need add the src folder to sys.path for compatibility +ENV PYTHONPATH=/home/api/src + +EXPOSE 5000 -CMD python bin/api_check.py && python bin/api_start.py +CMD python code/run.py diff --git a/server/api/requirements.txt b/server/api/requirements.txt index f56530d12..33bb3d8e1 100644 --- a/server/api/requirements.txt +++ b/server/api/requirements.txt @@ -1,11 +1,16 @@ aiofiles==0.5.0 +alembic==1.4.2 asyncpg==0.21.0 attrs==20.1.0 certifi==2020.6.20 chardet==3.0.4 click==7.1.2 -fastapi==0.61.0 +click-plugins==1.1.1 +cligj==0.5.0 +fastapi==0.61.1 +Fiona==1.8.13.post1 flake8==3.8.3 +geopandas==0.8.1 gino==1.0.1 gino-sanic==0.1.0 gino-starlette==0.1.1 @@ -22,9 +27,12 @@ idna==2.10 immutables==0.14 importlib-metadata==1.7.0 iniconfig==1.0.1 +Mako==1.1.3 +MarkupSafe==1.1.1 mccabe==0.6.1 -more-itertools==8.4.0 +more-itertools==8.5.0 multidict==4.7.6 +munch==2.5.0 numpy==1.19.1 packaging==20.4 pandas==1.1.1 @@ -35,11 +43,13 @@ pycodestyle==2.6.0 pydantic==1.6.1 pyflakes==2.2.0 pyparsing==2.4.7 +pyproj==2.6.1.post1 pysupercluster==0.7.6 pytest==6.0.1 pytest-asyncio==0.14.0 python-dateutil==2.8.1 python-dotenv==0.14.0 +python-editor==1.0.4 python-http-client==3.3.1 pytz==2020.1 redis==3.5.3 @@ -51,6 +61,7 @@ sanic-compress==0.1.1 Sanic-Cors==0.10.0.post3 Sanic-Plugins-Framework==0.9.3 sendgrid==6.4.6 +Shapely==1.7.1 six==1.15.0 sniffio==1.1.0 sodapy==2.1.0 diff --git a/server/docker-compose.yml b/server/docker-compose.yml index fdbbd20cb..99d2ead52 100644 --- a/server/docker-compose.yml +++ b/server/docker-compose.yml @@ -5,6 +5,7 @@ services: container_name: 311-postgres image: postgres restart: always + env_file: .env environment: POSTGRES_USER: ${DB_USER} POSTGRES_PASSWORD: ${DB_PASS} @@ -36,29 +37,5 @@ services: - db - redis - adminer: - container_name: postgres-dashboard - image: adminer - ports: - - 8080:8080 - - rebrow: - container_name: redis-dashboard - image: marian/rebrow - links: - - redis:redis - ports: - - 5001:5001 - - # jupyter: - # container_name: 311-notebook - # image: jupyter/scipy-notebook - # environment: - # DATABASE_URL: postgresql://${DB_USER}:${DB_PASS}@db:5432/${DB_NAME} - # ports: - # - 8888:8888 - # volumes: - # - ../:/home/jovyan/work - volumes: backend_data: From bd4ef1ef29865581f394fdf3b963633190ae99c0 Mon Sep 17 00:00:00 2001 From: Matthew Webster Date: Wed, 2 Sep 2020 14:03:43 -0700 Subject: [PATCH 06/16] new date parsing and config changes --- server/api/code/lacity_data_api/config.py | 23 +++++- server/api/code/lacity_data_api/main.py | 4 +- .../code/lacity_data_api/routers/legacy.py | 71 ++++++++++--------- server/api/code/run.py | 3 - server/api/src/db/conn.py | 2 +- server/api/src/settings.py | 4 +- 6 files changed, 65 insertions(+), 42 deletions(-) diff --git a/server/api/code/lacity_data_api/config.py b/server/api/code/lacity_data_api/config.py index 6d95e57c8..87dfbcaf0 100644 --- a/server/api/code/lacity_data_api/config.py +++ b/server/api/code/lacity_data_api/config.py @@ -1,11 +1,15 @@ +import os from sqlalchemy.engine.url import URL, make_url -from starlette.config import Config +from starlette.config import Config, environ from starlette.datastructures import Secret config = Config(".env") +# checking for testing or debug +DEBUG = config("DEBUG", cast=bool, default=False) TESTING = config("TESTING", cast=bool, default=False) +# getting database configuration DB_DRIVER = config("DB_DRIVER", default="postgresql") DB_HOST = config("DB_HOST", default=None) DB_PORT = config("DB_PORT", cast=int, default=None) @@ -44,4 +48,19 @@ DB_RETRY_LIMIT = config("DB_RETRY_LIMIT", cast=int, default=32) DB_RETRY_INTERVAL = config("DB_RETRY_INTERVAL", cast=int, default=1) -API_LEGACY_MODE = config('API_LEGACY_MODE', cast=bool, default=True) +# check whether running in legacy mode +API_LEGACY_MODE = config('API_LEGACY_MODE', cast=bool, default=False) + +# the legacy code needs these created as environment settings +if API_LEGACY_MODE: + environ['DATABASE_URL'] = str(DB_DSN) + environ['TMP_DIR'] = config('TEMP_FOLDER') + environ['PICKLECACHE_ENABLED'] = config('USE_FILE_CACHE') + +# print out debug information +if DEBUG: + print("\n\033[93mLA City Data API server starting with DEBUG mode ENABLED\033[0m") + print("\nEnvironment variables after executing config.py file:") + for k, v in sorted(os.environ.items()): + print(f'\033[92m{k}\033[0m: {v}') + print(f"\n\033[93mDatabase\033[0m: {DB_DSN}\n") diff --git a/server/api/code/lacity_data_api/main.py b/server/api/code/lacity_data_api/main.py index 2ec0c586c..a74c12a85 100644 --- a/server/api/code/lacity_data_api/main.py +++ b/server/api/code/lacity_data_api/main.py @@ -4,11 +4,11 @@ from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.gzip import GZipMiddleware +from .config import API_LEGACY_MODE # important to load config first +from .models import db from .routers import ( index, legacy, councils, regions, request_types, service_requests, shim ) -from .models import db -from .config import API_LEGACY_MODE logger = logging.getLogger(__name__) diff --git a/server/api/code/lacity_data_api/routers/legacy.py b/server/api/code/lacity_data_api/routers/legacy.py index 56a2c4775..756e02911 100644 --- a/server/api/code/lacity_data_api/routers/legacy.py +++ b/server/api/code/lacity_data_api/routers/legacy.py @@ -3,7 +3,7 @@ from fastapi import responses from fastapi import APIRouter -from pydantic import BaseModel +from pydantic import BaseModel, validator from services import status, map, visualizations, requests, comparison, github, email @@ -30,6 +30,18 @@ class Filter(BaseModel): zoom: Optional[int] = None bounds: Optional[Bounds] = None + @validator('startDate', 'endDate') + def parse_date(cls, v): + if isinstance(v, str): + try: + v = datetime.datetime.strptime(v, '%m/%d/%Y') + except ValueError: + try: + v = datetime.datetime.strptime(v, '%Y-%m-%d') + except ValueError: + pass + return v + class Pin(BaseModel): srnumber: str @@ -103,52 +115,47 @@ async def get_open_requests(): # NOTE: can't apply response filter here since it sometimes returns a single point @router.post("/map/clusters") async def get_clusters(filter: Filter): - start_time = datetime.datetime.strptime(filter.startDate, '%m/%d/%Y') - end_time = datetime.datetime.strptime(filter.endDate, '%m/%d/%Y') - - result = await map.pin_clusters(start_time, - end_time, - filter.requestTypes, - filter.ncList, - filter.zoom, - dict(filter.bounds) - ) + result = await map.pin_clusters( + filter.startDate, + filter.endDate, + filter.requestTypes, + filter.ncList, + filter.zoom, + dict(filter.bounds) + ) return result @router.post("/map/heat") async def get_heatmap(filter: Filter): - start_time = datetime.datetime.strptime(filter.startDate, '%m/%d/%Y') - end_time = datetime.datetime.strptime(filter.endDate, '%m/%d/%Y') - - result = await map.heatmap(startDate=start_time, - endDate=end_time, - requestTypes=filter.requestTypes, - ncList=filter.ncList) + result = await map.heatmap( + startDate=filter.startDate, + endDate=filter.endDate, + requestTypes=filter.requestTypes, + ncList=filter.ncList + ) return responses.JSONResponse(result.tolist()) @router.post("/map/pins", response_model=Pins) async def get_pins(filter: Filter): - start_time = datetime.datetime.strptime(filter.startDate, '%m/%d/%Y') - end_time = datetime.datetime.strptime(filter.endDate, '%m/%d/%Y') - - result = await map.pins(startDate=start_time, - endDate=end_time, - requestTypes=filter.requestTypes, - ncList=filter.ncList) + result = await map.pins( + startDate=filter.startDate, + endDate=filter.endDate, + requestTypes=filter.requestTypes, + ncList=filter.ncList + ) return result @router.post("/visualizations") async def get_visualizations(filter: Filter): - start_time = datetime.datetime.strptime(filter.startDate, '%m/%d/%Y') - end_time = datetime.datetime.strptime(filter.endDate, '%m/%d/%Y') - - result = await visualizations.visualizations(startDate=start_time, - endDate=end_time, - requestTypes=filter.requestTypes, - ncList=filter.ncList) + result = await visualizations.visualizations( + startDate=filter.startDate, + endDate=filter.endDate, + requestTypes=filter.requestTypes, + ncList=filter.ncList + ) return result diff --git a/server/api/code/run.py b/server/api/code/run.py index 539220e53..ddf794423 100644 --- a/server/api/code/run.py +++ b/server/api/code/run.py @@ -2,9 +2,6 @@ if __name__ == "__main__": import uvicorn - import sys - - print(sys.path) uvicorn.run( "lacity_data_api.asgi:app", diff --git a/server/api/src/db/conn.py b/server/api/src/db/conn.py index 97b35be63..a9be14950 100644 --- a/server/api/src/db/conn.py +++ b/server/api/src/db/conn.py @@ -29,7 +29,7 @@ def fail(message): engine.connect() except Exception: if attempt < ATTEMPTS: - log(f'Could not connect to DB, retrying in {DELAY}') + log(f'Could not connect to DB ({engine.url}), retrying in {DELAY}') time.sleep(DELAY) attempt += 1 continue diff --git a/server/api/src/settings.py b/server/api/src/settings.py index 6dd328b9d..7445a7a2e 100644 --- a/server/api/src/settings.py +++ b/server/api/src/settings.py @@ -1,6 +1,6 @@ from utils.parse_env import env, to -from dotenv import load_dotenv -load_dotenv() +# from dotenv import load_dotenv +# load_dotenv() class Version: From cc995e0442cc00ec410c8b46091869506d5bd88b Mon Sep 17 00:00:00 2001 From: Matthew Webster Date: Wed, 2 Sep 2020 22:04:10 -0700 Subject: [PATCH 07/16] more docker and config changes plus README --- server/.env.example | 61 +++------ server/api/.env.example | 49 ------- server/api/README.md | 113 ++++++++++++++++ server/api/api.config | 35 +++++ server/api/code/README.md | 127 ------------------ server/api/code/lacity_data_api/config.py | 14 +- .../code/lacity_data_api/routers/councils.py | 8 +- .../code/lacity_data_api/routers/legacy.py | 37 +++-- .../code/lacity_data_api/routers/regions.py | 8 +- .../lacity_data_api/routers/request_types.py | 8 +- .../routers/service_requests.py | 11 +- server/api/src/db/conn.py | 2 +- server/docker-compose.yml | 16 +-- 13 files changed, 227 insertions(+), 262 deletions(-) delete mode 100644 server/api/.env.example create mode 100644 server/api/README.md create mode 100644 server/api/api.config delete mode 100644 server/api/code/README.md diff --git a/server/.env.example b/server/.env.example index 778fac167..a7d561e21 100644 --- a/server/.env.example +++ b/server/.env.example @@ -1,32 +1,25 @@ -############################### API SETTINGS ############################# - -# Server -HOST=0.0.0.0 -PORT=5000 -DEBUG=0 -ACCESS_LOG=1 -AUTO_RELOAD=1 -WORKERS=1 -TMP_DIR=./__tmp__ -UPDATE_ON_START=0 - -# Database -DATABASE_URL=postgresql://311_user:311_pass@db:5432/311_db -DATABASE_LOG_QUERIES=0 - -# Redis -REDIS_ENABLED=1 -REDIS_URL=redis://redis:6379 -REDIS_TTL_SECONDS=3600 - -# Picklebase -PICKLEBASE_ENABLED=0 -PICKLEBASE_BATCH_SIZE=400000 - -# Picklecache -PICKLECACHE_ENABLED=0 -PICKLECACHE_TTL_SECONDS=3600 + +######################### DOCKER-COMPOSE SETTINGS ######################## + +# host/public configs +COMPOSE_PROJECT_NAME=311_data +API_HOST_PORT=5000 +DB_HOST_PORT=5433 + +# api config +DEBUG=True +API_RESTART_POLICY=no +APP_PORT=5000 + +# db config +DB_HOST=host.docker.internal +DB_USER=311_user +DB_PASS=311_pass +DB_NAME=311_db +DB_ECHO=False + +############################ 3RD PARTY TOOLS ############################## # Socrata SOCRATA_TOKEN= @@ -45,14 +38,4 @@ SLACK_ERROR_CODES=[400, 500] # Sendgrid SENDGRID_API_KEY= -######################### DOCKER-COMPOSE SETTINGS ######################## - -COMPOSE_PROJECT_NAME=311_data -DB_USER=311_user -DB_PASS=311_pass -DB_NAME=311_db -DB_HOST_PORT=5433 -API_HOST_PORT=5000 -API_RESTART_POLICY=no - -################################# OVERRIDES ############################## +############################################################################ diff --git a/server/api/.env.example b/server/api/.env.example deleted file mode 100644 index 4e0236137..000000000 --- a/server/api/.env.example +++ /dev/null @@ -1,49 +0,0 @@ -# including 2 paths to src: tools set relative to Workspace Dir, debugpy relative to api Dir -PYTHONPATH=./server/api/src:./server/api/code:./src - -# Set debug -DEBUG=True -UPDATE_ON_START=0 #TODO: Replace this -USE_FILE_CACHE=True # enables picklecache -TEMP_FOLDER=./__tmp__ # location of picklecache files - -# Application -API_HOST=127.0.0.1 -API_PORT=5000 -API_LEGACY_MODE=True - -# Database (for dev set either to host.docker.internal or localhost) -DB_DSN=postgresql://311_user:311_pass@host.docker.internal:5433/311_db -DB_ECHO=True - -# Redis cache -REDIS_ENABLED=0 -REDIS_URL=redis://redis:6379 -REDIS_TTL_SECONDS=3600 - -# Socrata data source -SOCRATA_TOKEN=6b5lwk1jHSQTgx7PAVFKOOdt2 -SOCRATA_BATCH_SIZE=50000 - -# Github code source -GITHUB_TOKEN= -GITHUB_ISSUES_URL=https://api.github.com/repos/hackforla/311-data-support/issues -GITHUB_PROJECT_URL= -GITHUB_SHA=DEVELOPMENT - -# Slack error reporting -SLACK_WEBHOOK_URL= -SLACK_ERROR_CODES=[400, 500] - -# Sendgrid email -SENDGRID_API_KEY= - -# DOCKER -PORT=5000 -COMPOSE_PROJECT_NAME=311_data -DB_USER=311_user -DB_PASS=311_pass -DB_NAME=311_db -DB_HOST_PORT=5433 -API_HOST_PORT=5000 -API_RESTART_POLICY=no diff --git a/server/api/README.md b/server/api/README.md new file mode 100644 index 000000000..e84c88249 --- /dev/null +++ b/server/api/README.md @@ -0,0 +1,113 @@ +# 311 API + +## Changes + +* API now uses FastAPI (uvicorn/ASGI) +* Data access uses Gino, asyncpg +* New code is in the code directory +* Entry point is code/run.py +* Legacy code remains in src directory +* Current API compatibility: code/lacity_data_api/routers/legacy.py + +## Running using Docker Compose + +Setting for Docker Compose are in the .env file in the server directory. +The DB_HOST setting should work if it is set to: host.docker.internal + +To start the DB and API from the server directory run: + +```bash +docker-compose up +``` + +To try the API: + +* Ensure Test API is running with welcome message at http://localhost:5000/ +* Test API using Open API (Swagger) at http://localhost:5000/docs +* Run the ReactJS app ```npm start``` from /client to make sure frontend works + +## Testing + +### Full filter (with zoom and bounds) + +```json +{ + "startDate":"01/01/2020", + "endDate":"08/27/2020", + "ncList":[ + 52, + 46, + 128, + 54, + 104, + 76, + 97, + 121, + 55 + ], + "requestTypes":[ + "Dead Animal Removal", + "Homeless Encampment", + "Single Streetlight Issue", + "Multiple Streetlight Issue", + "Feedback" + ], + "zoom":13, + "bounds":{ + "north":34.0731374116421, + "east":-118.18010330200195, + "south":33.97582290387967, + "west":-118.41201782226564 + } +} +``` + +## TODOs + +* add more pytests +* finish routes +* alembic migrations +* data seeds +* Add telemetry: +OpenTelemetry instrumentors exist for FastAPI, asyncpg, SQLAlchemy +https://opentelemetry-python.readthedocs.io/ +https://opentelemetry.lightstep.com/ + +## Odd and Ends + +To time API calls using curl: add ```--write-out '%{time_total}\n' --output /dev/null --silent``` + +For example: + +```bash +curl -X POST "http://localhost:5000/map/pins" -H "accept: application/json" -H "Content-Type: application/json" -d "{\"startDate\":\"01/01/2020\",\"endDate\":\"08/27/2020\",\"ncList\":[52,46,128,54,104,76,97,121,55],\"requestTypes\":[\"Homeless Encampment\"]}" --write-out '%{time_total}\n' --output /dev/null --silent +``` + +Here's how to go about killing any process orphaned by VS Code using port 5000 + +```bash +lsof -ti tcp:5000 | xargs kill +``` + +### Format for comparison reports + +{ + "startDate":"01/01/2020", + "endDate":"08/27/2020", + "requestTypes":[ + "Bulky Items" + ], + "chart":"frequency", + "set1":{ + "district":"nc", + "list":[ + 6 + ] + }, + "set2":{ + "district":"nc", + "list":[ + 9 + ] + } +} diff --git a/server/api/api.config b/server/api/api.config new file mode 100644 index 000000000..728b7406d --- /dev/null +++ b/server/api/api.config @@ -0,0 +1,35 @@ + +# Application Settings: sets picklecache enable and file location +DEBUG=True +UPDATE_ON_START=0 +USE_FILE_CACHE=True +TEMP_FOLDER=./__tmp__ +API_LEGACY_MODE=True + +# Database Settings: host.docker.internal or localhost +DB_HOST=localhost +DB_PORT=5433 +DB_PASSWORD=311_pass +DB_ECHO=True + +# Redis cache +REDIS_ENABLED=0 +REDIS_URL=redis://redis:6379 +REDIS_TTL_SECONDS=3600 + +# Socrata data source +SOCRATA_TOKEN=6b5lwk1jHSQTgx7PAVFKOOdt2 +SOCRATA_BATCH_SIZE=50000 + +# Github code source +GITHUB_TOKEN= +GITHUB_ISSUES_URL=https://api.github.com/repos/hackforla/311-data-support/issues +GITHUB_PROJECT_URL= +GITHUB_SHA=DEVELOPMENT + +# Slack error reporting +SLACK_WEBHOOK_URL= +SLACK_ERROR_CODES=[400, 500] + +# Sendgrid email +SENDGRID_API_KEY= diff --git a/server/api/code/README.md b/server/api/code/README.md deleted file mode 100644 index fd13adf26..000000000 --- a/server/api/code/README.md +++ /dev/null @@ -1,127 +0,0 @@ - -To try it: - -Start new server from API directory with python code/run -Ensure Test API is running with welcome message at http://localhost:5000/ -Test API using OpenAPI at http://localhost:5000/docs -Run the ReactJS app to make sure frontend works - - -TODO: -* add some pytests -* finish routes -* alembic migrations -* data seeds - - -add ```--write-out '%{time_total}\n' --output /dev/null --silent``` to curls - -```bash -curl -X POST "http://localhost:5000/map/pins" -H "accept: application/json" -H "Content-Type: application/json" -d "{\"startDate\":\"01/01/2020\",\"endDate\":\"08/27/2020\",\"ncList\":[52,46,128,54,104,76,97,121,55],\"requestTypes\":[\"Homeless Encampment\"]}" --write-out '%{time_total}\n' --output /dev/null --silent -``` - -killing -```bash -lsof -i :5000 -| kill -9 - -``` - -### Full filter OLD FORMAT -```json -{ - "startDate":"01/01/2020", - "endDate":"08/27/2020", - "ncList":[ - 52, - 46, - 128, - 54, - 104, - 76, - 97, - 121, - 55 - ], - "requestTypes":[ - "Dead Animal Removal", - "Homeless Encampment", - "Single Streetlight Issue", - "Multiple Streetlight Issue", - "Feedback", - "Bulky Items", - "Electronic Waste", - "Metal/Household Appliances", - "Graffiti Removal", - "Illegal Dumping Pickup", - "Other" - ], - "zoom":13, - "bounds":{ - "north":34.0731374116421, - "east":-118.18010330200195, - "south":33.97582290387967, - "west":-118.41201782226564 - } -} -``` - - -### Full filter: NEW FORMAT -```json -{ - "startDate":"2020-01-01", - "endDate":"2020-08-27", - "ncList":[ - 52, - 46, - 128, - 54, - 104, - 76, - 97, - 121, - 55 - ], - "requestTypes":[ - 1, - 2, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 10, - 11, - 12 - ], - "zoom":13, - "bounds":{ - "north":34.0731374116421, - "east":-118.18010330200195, - "south":33.97582290387967, - "west":-118.41201782226564 - } -} -``` - - -# Old format - -{ - "startDate": "01/01/2020", - "endDate": "08/27/2020", - "ncList": [52, 46, 128, 54, 104, 76, 97, 121, 55], - "requestTypes": ["Homeless Encampment"] -} - -# New format - -{ - "startDate": "2020-01-01", - "endDate": "2020-08-27", - "ncList": [52, 46, 128, 54, 104, 76, 97, 121, 55], - "requestTypes": [6] -} diff --git a/server/api/code/lacity_data_api/config.py b/server/api/code/lacity_data_api/config.py index 87dfbcaf0..893a3dd0a 100644 --- a/server/api/code/lacity_data_api/config.py +++ b/server/api/code/lacity_data_api/config.py @@ -3,7 +3,7 @@ from starlette.config import Config, environ from starlette.datastructures import Secret -config = Config(".env") +config = Config("api.config") # checking for testing or debug DEBUG = config("DEBUG", cast=bool, default=False) @@ -11,11 +11,11 @@ # getting database configuration DB_DRIVER = config("DB_DRIVER", default="postgresql") -DB_HOST = config("DB_HOST", default=None) -DB_PORT = config("DB_PORT", cast=int, default=None) -DB_USER = config("DB_USER", default=None) +DB_HOST = config("DB_HOST", default="localhost") +DB_PORT = config("DB_PORT", cast=int, default=5432) +DB_USER = config("DB_USER", default="311_user") DB_PASSWORD = config("DB_PASSWORD", cast=Secret, default=None) -DB_DATABASE = config("DB_DATABASE", default=None) +DB_DATABASE = config("DB_DATABASE", default="311_db") if TESTING: if DB_DATABASE: @@ -34,10 +34,6 @@ ), ) -if TESTING: - if DB_DSN: - DB_DSN += "_test" - DB_POOL_MIN_SIZE = config("DB_POOL_MIN_SIZE", cast=int, default=1) DB_POOL_MAX_SIZE = config("DB_POOL_MAX_SIZE", cast=int, default=16) DB_ECHO = config("DB_ECHO", cast=bool, default=True) diff --git a/server/api/code/lacity_data_api/routers/councils.py b/server/api/code/lacity_data_api/routers/councils.py index b5df1f058..81ec99122 100644 --- a/server/api/code/lacity_data_api/routers/councils.py +++ b/server/api/code/lacity_data_api/routers/councils.py @@ -24,12 +24,12 @@ class Config: @router.get("/", response_model=Items) -async def index(): +async def get_all_councils(): result = await db.all(Council.query) return result -@router.get("/{cid}") -async def get_council(cid: int): - result = await Council.get_or_404(cid) +@router.get("/{id}") +async def get_council(id: int): + result = await Council.get_or_404(id) return result.to_dict() diff --git a/server/api/code/lacity_data_api/routers/legacy.py b/server/api/code/lacity_data_api/routers/legacy.py index 756e02911..cea2418e0 100644 --- a/server/api/code/lacity_data_api/routers/legacy.py +++ b/server/api/code/lacity_data_api/routers/legacy.py @@ -1,5 +1,6 @@ from typing import List, Optional import datetime +from enum import Enum from fastapi import responses from fastapi import APIRouter @@ -82,27 +83,37 @@ class Feedback(BaseModel): body: str -@router.get("/status/api", description="Provides the status of backend systems") -async def status_api(): - result = await status.api() - return result - - -@router.get("/status/db") -async def status_db(): - result = await status.database() - return result +class StatusTypes(str, Enum): + api = "api" + database = "db" + system = "sys" -@router.get("/status/sys") -async def status_system(): - result = await status.system() +@router.get("/status/{status_type}", + description="Provides the status of backend systems") +async def status_check(status_type: StatusTypes): + if status_type == StatusTypes.api: + result = await status.api() + if status_type == StatusTypes.database: + result = await status.database() + if status_type == StatusTypes.system: + result = await status.system() return result @router.get("/servicerequest/{srnumber}") async def get_service_request_by_string(srnumber: str): result = requests.item_query(srnumber) + # TODO: clean this up with 3.8 syntax + # convert createddate and closeddate to epochs for app compatibility + if (result['createddate']): + result['createddate'] = int(result['createddate'].strftime('%s')) + if (result['closeddate']): + result['closeddate'] = int(result['closeddate'].strftime('%s')) + if (result['updateddate']): + result['updateddate'] = int(result['updateddate'].strftime('%s')) + if (result['servicedate']): + result['servicedate'] = int(result['servicedate'].strftime('%s')) return result diff --git a/server/api/code/lacity_data_api/routers/regions.py b/server/api/code/lacity_data_api/routers/regions.py index 9e244b5c3..0e93e229c 100644 --- a/server/api/code/lacity_data_api/routers/regions.py +++ b/server/api/code/lacity_data_api/routers/regions.py @@ -23,12 +23,12 @@ class Config: @router.get("/", response_model=Items) -async def index(): +async def get_all_regions(): result = await db.all(Region.query) return result -@router.get("/{rid}") -async def get_region(rid: int): - result = await Region.get_or_404(rid) +@router.get("/{id}") +async def get_region(id: int): + result = await Region.get_or_404(id) return result.to_dict() diff --git a/server/api/code/lacity_data_api/routers/request_types.py b/server/api/code/lacity_data_api/routers/request_types.py index 451674ddc..6b2516415 100644 --- a/server/api/code/lacity_data_api/routers/request_types.py +++ b/server/api/code/lacity_data_api/routers/request_types.py @@ -21,11 +21,11 @@ class Config: @router.get("/", response_model=Items) -async def index(): +async def get_all_request_types(): return await db.all(RequestType.query) -@router.get("/{tid}") -async def get_request_type(tid: int): - request_type = await RequestType.get_or_404(tid) +@router.get("/{id}") +async def get_request_type(id: int): + request_type = await RequestType.get_or_404(id) return request_type.to_dict() diff --git a/server/api/code/lacity_data_api/routers/service_requests.py b/server/api/code/lacity_data_api/routers/service_requests.py index 678da9c1d..cb51bff6b 100644 --- a/server/api/code/lacity_data_api/routers/service_requests.py +++ b/server/api/code/lacity_data_api/routers/service_requests.py @@ -29,7 +29,7 @@ class Config: @router.get("/", response_model=Items) -async def index(skip: int = 0, limit: int = 100): +async def get_all_service_requests(skip: int = 0, limit: int = 100): async with db.transaction(): cursor = await ServiceRequest.query.gino.iterate() if skip > 0: @@ -39,9 +39,12 @@ async def index(skip: int = 0, limit: int = 100): return result -@router.get("/{srid}") -async def get_service_request(srid: int): - svcreq = await ServiceRequest.get_or_404(srid) +@router.get("/{id}", description=""" + The service request ID is the integer created from the srnumber + when the initial "1-" is removed. + """) +async def get_service_request(id: int): + svcreq = await ServiceRequest.get_or_404(id) return svcreq.to_dict() diff --git a/server/api/src/db/conn.py b/server/api/src/db/conn.py index a9be14950..eacbce994 100644 --- a/server/api/src/db/conn.py +++ b/server/api/src/db/conn.py @@ -8,7 +8,7 @@ def get_engine(url): - ATTEMPTS = 5 + ATTEMPTS = 3 DELAY = 3 def fail(message): diff --git a/server/docker-compose.yml b/server/docker-compose.yml index 99d2ead52..62411d961 100644 --- a/server/docker-compose.yml +++ b/server/docker-compose.yml @@ -16,12 +16,12 @@ services: volumes: - backend_data:/var/lib/postgresql/data - redis: - container_name: 311-redis - build: ./redis - restart: always - expose: - - 6379 + # redis: + # container_name: 311-redis + # build: ./redis + # restart: always + # expose: + # - 6379 api: container_name: 311-api @@ -29,13 +29,13 @@ services: restart: ${API_RESTART_POLICY} env_file: .env ports: - - target: ${PORT} + - target: ${APP_PORT} published: ${API_HOST_PORT} volumes: - ./api/:/home/api depends_on: - db - - redis + # - redis volumes: backend_data: From 58a340a5a987f98cfa2f821eae91744be77e9e7f Mon Sep 17 00:00:00 2001 From: Matthew Webster Date: Wed, 2 Sep 2020 22:23:49 -0700 Subject: [PATCH 08/16] default LEGACY_MODE to True --- server/api/code/lacity_data_api/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/api/code/lacity_data_api/config.py b/server/api/code/lacity_data_api/config.py index 893a3dd0a..5d15fdd17 100644 --- a/server/api/code/lacity_data_api/config.py +++ b/server/api/code/lacity_data_api/config.py @@ -45,7 +45,7 @@ DB_RETRY_INTERVAL = config("DB_RETRY_INTERVAL", cast=int, default=1) # check whether running in legacy mode -API_LEGACY_MODE = config('API_LEGACY_MODE', cast=bool, default=False) +API_LEGACY_MODE = config('API_LEGACY_MODE', cast=bool, default=True) # the legacy code needs these created as environment settings if API_LEGACY_MODE: From d1b5daf301f345bb5c058d07430cd8c91c07d93e Mon Sep 17 00:00:00 2001 From: Matthew Webster Date: Wed, 2 Sep 2020 22:41:28 -0700 Subject: [PATCH 09/16] DB seed needs database URL --- server/docker-compose.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/server/docker-compose.yml b/server/docker-compose.yml index 62411d961..6c1cfcc52 100644 --- a/server/docker-compose.yml +++ b/server/docker-compose.yml @@ -28,6 +28,8 @@ services: build: ./api restart: ${API_RESTART_POLICY} env_file: .env + environment: + - DATABASE_URL: postgresql://${DB_USER}:${DB_PASS}@${DB_HOST}:5432/${DB_NAME} ports: - target: ${APP_PORT} published: ${API_HOST_PORT} From e53dbab9d49f04402abd1ced019493477d44872d Mon Sep 17 00:00:00 2001 From: Matthew Webster Date: Wed, 2 Sep 2020 22:43:36 -0700 Subject: [PATCH 10/16] YAML fix --- server/docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/docker-compose.yml b/server/docker-compose.yml index 6c1cfcc52..88fd84a2b 100644 --- a/server/docker-compose.yml +++ b/server/docker-compose.yml @@ -29,7 +29,7 @@ services: restart: ${API_RESTART_POLICY} env_file: .env environment: - - DATABASE_URL: postgresql://${DB_USER}:${DB_PASS}@${DB_HOST}:5432/${DB_NAME} + DATABASE_URL: postgresql://${DB_USER}:${DB_PASS}@${DB_HOST}:5432/${DB_NAME} ports: - target: ${APP_PORT} published: ${API_HOST_PORT} From f32f6284c1c7c1f6fc9d3de23670fc28837b017d Mon Sep 17 00:00:00 2001 From: Matthew Webster Date: Wed, 2 Sep 2020 23:07:55 -0700 Subject: [PATCH 11/16] compose networking --- server/docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/docker-compose.yml b/server/docker-compose.yml index 88fd84a2b..360abdfe3 100644 --- a/server/docker-compose.yml +++ b/server/docker-compose.yml @@ -29,7 +29,7 @@ services: restart: ${API_RESTART_POLICY} env_file: .env environment: - DATABASE_URL: postgresql://${DB_USER}:${DB_PASS}@${DB_HOST}:5432/${DB_NAME} + DATABASE_URL: postgresql://${DB_USER}:${DB_PASS}@db:5432/${DB_NAME} ports: - target: ${APP_PORT} published: ${API_HOST_PORT} From 11b9441cd1536b0c01f775036c5149e36abc35a2 Mon Sep 17 00:00:00 2001 From: Matthew Webster Date: Wed, 2 Sep 2020 23:49:27 -0700 Subject: [PATCH 12/16] set test default config --- server/api/tests/__init__.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/server/api/tests/__init__.py b/server/api/tests/__init__.py index 9cd84a4c4..fbdbc51ca 100644 --- a/server/api/tests/__init__.py +++ b/server/api/tests/__init__.py @@ -2,6 +2,14 @@ import sys from os.path import join, dirname +if os.getenv("DATABASE_URL") is None: + os.environ["DATABASE_URL"] = "postgresql://311_user:311_pass@localhost:5433/311_db" + +os.environ["PICKLEBASE_ENABLED"] = "False" +os.environ["PICKLECACHE_ENABLED"] = "False" +os.environ["TMP_DIR"] = "./__tmp__" + +print(os.environ) sys.path.append(join(dirname(__file__), '../src')) From aef7f67c70c7bfb0f1da736ebd0bcb02016b2719 Mon Sep 17 00:00:00 2001 From: Matthew Webster Date: Thu, 3 Sep 2020 08:24:51 -0700 Subject: [PATCH 13/16] updated missing .env.example settings --- server/.env.example | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/server/.env.example b/server/.env.example index a7d561e21..f39d40ff2 100644 --- a/server/.env.example +++ b/server/.env.example @@ -2,8 +2,9 @@ ######################### DOCKER-COMPOSE SETTINGS ######################## -# host/public configs COMPOSE_PROJECT_NAME=311_data + +# host/public configs API_HOST_PORT=5000 DB_HOST_PORT=5433 @@ -12,8 +13,9 @@ DEBUG=True API_RESTART_POLICY=no APP_PORT=5000 -# db config -DB_HOST=host.docker.internal +# db connection config +DB_HOST=db +DB_PORT=5432 DB_USER=311_user DB_PASS=311_pass DB_NAME=311_db From a4cb368081136e89dd3bff60e8d2b0c5e1ad9cb0 Mon Sep 17 00:00:00 2001 From: Jake Mensch Date: Thu, 3 Sep 2020 09:02:20 -0700 Subject: [PATCH 14/16] postman tests accept either 400 or 422 for bad input --- .../311-CI.postman_collection.json | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/server/postman/collections/311-CI.postman_collection.json b/server/postman/collections/311-CI.postman_collection.json index 5845e9679..445bf487e 100644 --- a/server/postman/collections/311-CI.postman_collection.json +++ b/server/postman/collections/311-CI.postman_collection.json @@ -560,8 +560,8 @@ "script": { "id": "eaf88891-878e-4ab6-b1bf-b03101326461", "exec": [ - "pm.test(\"Status code is 400\", function () {", - " pm.response.to.have.status(400);", + "pm.test(\"Status code is 400 or 422\", function () {", + " pm.expect(pm.response.code).to.be.oneOf([400, 422]);", "});" ], "type": "text/javascript" @@ -608,8 +608,8 @@ "script": { "id": "7b817388-1582-4ae6-a528-55cd117e6e85", "exec": [ - "pm.test(\"Status code is 400\", function () {", - " pm.response.to.have.status(400);", + "pm.test(\"Status code is 400 or 422\", function () {", + " pm.expect(pm.response.code).to.be.oneOf([400, 422]);", "});" ], "type": "text/javascript" @@ -655,8 +655,8 @@ "script": { "id": "b9124b20-d2fc-4207-bf57-43be22772f69", "exec": [ - "pm.test(\"Status code is 400\", function () {", - " pm.response.to.have.status(400);", + "pm.test(\"Status code is 400 or 422\", function () {", + " pm.expect(pm.response.code).to.be.oneOf([400, 422]);", "});", "" ], @@ -704,10 +704,9 @@ "script": { "id": "4bfd4f4b-3f04-4208-909b-273fec1fff04", "exec": [ - "pm.test(\"Status code is 400\", function () {", - " pm.response.to.have.status(400);", - "});", - "" + "pm.test(\"Status code is 400 or 422\", function () {", + " pm.expect(pm.response.code).to.be.oneOf([400, 422]);", + "});" ], "type": "text/javascript" } From 5b916f8c81adb936ac28d55b58c9e575e48a50d2 Mon Sep 17 00:00:00 2001 From: Jake Mensch Date: Thu, 3 Sep 2020 17:02:19 -0700 Subject: [PATCH 15/16] comparison set validation --- server/api/code/lacity_data_api/routers/legacy.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/server/api/code/lacity_data_api/routers/legacy.py b/server/api/code/lacity_data_api/routers/legacy.py index cea2418e0..c106f934d 100644 --- a/server/api/code/lacity_data_api/routers/legacy.py +++ b/server/api/code/lacity_data_api/routers/legacy.py @@ -5,6 +5,7 @@ from fastapi import responses from fastapi import APIRouter from pydantic import BaseModel, validator +from pydantic.dataclasses import dataclass from services import status, map, visualizations, requests, comparison, github, email @@ -65,10 +66,19 @@ class Cluster(BaseModel): Clusters = List[Cluster] -class Set(dict): +@dataclass +class Set: district: str list: List[int] + @validator('district') + def district_is_valid(cls, v): + assert v in ['cc', 'nc'], 'district must be either "nc" or "cc".' + return v + + def __getitem__(cls, item): + return getattr(cls, item) + class Comparison(BaseModel): startDate: str From a1f94c7ced70d48eaf92db13b76b9ea191635e1f Mon Sep 17 00:00:00 2001 From: Matthew Webster Date: Tue, 8 Sep 2020 09:00:27 -0700 Subject: [PATCH 16/16] model models and added simple entity cache --- server/api/code/lacity_data_api/config.py | 4 + .../code/lacity_data_api/models/clusters.py | 99 ++++++++-- .../code/lacity_data_api/models/council.py | 9 + .../api/code/lacity_data_api/models/region.py | 9 + .../lacity_data_api/models/service_request.py | 13 ++ .../lacity_data_api/routers/api_models.py | 86 +++++++++ .../api/code/lacity_data_api/routers/index.py | 6 +- .../code/lacity_data_api/routers/legacy.py | 93 +-------- .../api/code/lacity_data_api/routers/shim.py | 182 +++++++++++++++--- .../code/lacity_data_api/routers/utilities.py | 12 ++ 10 files changed, 379 insertions(+), 134 deletions(-) create mode 100644 server/api/code/lacity_data_api/routers/api_models.py create mode 100644 server/api/code/lacity_data_api/routers/utilities.py diff --git a/server/api/code/lacity_data_api/config.py b/server/api/code/lacity_data_api/config.py index 5d15fdd17..72de95bb1 100644 --- a/server/api/code/lacity_data_api/config.py +++ b/server/api/code/lacity_data_api/config.py @@ -47,6 +47,7 @@ # check whether running in legacy mode API_LEGACY_MODE = config('API_LEGACY_MODE', cast=bool, default=True) +# TODO: figure out how to remove dependency on DATABASE_URL from services # the legacy code needs these created as environment settings if API_LEGACY_MODE: environ['DATABASE_URL'] = str(DB_DSN) @@ -60,3 +61,6 @@ for k, v in sorted(os.environ.items()): print(f'\033[92m{k}\033[0m: {v}') print(f"\n\033[93mDatabase\033[0m: {DB_DSN}\n") + +# create empty cache object to populate at runtime +cache = {} diff --git a/server/api/code/lacity_data_api/models/clusters.py b/server/api/code/lacity_data_api/models/clusters.py index c2867f707..8630485fe 100644 --- a/server/api/code/lacity_data_api/models/clusters.py +++ b/server/api/code/lacity_data_api/models/clusters.py @@ -6,12 +6,13 @@ from sqlalchemy import and_ from .service_request import ServiceRequest -from .request_type import get_types_dict -from .council import Council +from .region import Region from . import db +from ..config import cache -DEFAULT_CITY_ZOOM = 12 # a click on a city point zooms from 10 to 12 +DEFAULT_CITY_ZOOM = 11 # a click on a city point zooms from 10 to 12 +DEFAULT_REGION_ZOOM = 12 # a click on a city point zooms from 10 to 12 DEFAULT_COUNCIL_ZOOM = 13 # a click on a council point zooms to 14 DEFAULT_LATITUDE = 34.0522 DEFAULT_LONGITUDE = -118.2437 @@ -71,15 +72,60 @@ async def get_clusters_for_city( return cluster_list -# TODO: same as above by group by region of each council -def get_clusters_for_regions(pins, zoom, bounds, options): +async def get_clusters_for_regions( + start_date: datetime.date, + end_date: datetime.date, + type_ids: List[int], + council_ids: List[int], + zoom_current: int +) -> List[Cluster]: """ - Cluster pins by region + Cluster service request pins by council regions + + Args: + start_date (date): beginning of date range service was requested + end_date (date): end of date range service was requested + type_ids (List[int]): the request type ids to match on + council_ids (List[int]): the council ids to match on + + Returns: + cluster: a list of cluster objects """ - print(zoom) + + # TODO: CACHE 'region-reqs:start-end-types-councils' + result = await ( + db.select( + [ + ServiceRequest.region_id, + db.func.count() + ] + ).where( + and_( + ServiceRequest.created_date >= start_date, + ServiceRequest.created_date <= end_date, + ServiceRequest.type_id.in_(type_ids), + ServiceRequest.council_id.in_(council_ids), + ) + ).group_by( + ServiceRequest.region_id + ).gino.all() + ) + + cluster_list = [] + + for row in result: + region = await Region.get(row[0]) + cluster_list.append(Cluster( + count=row[1], + expansion_zoom=DEFAULT_REGION_ZOOM, + id=region.region_id, + latitude=region.latitude, + longitude=region.longitude + )) + + return cluster_list -# TODO: same as above by group by council async def get_clusters_for_councils( start_date: datetime.date, end_date: datetime.date, @@ -88,17 +134,19 @@ async def get_clusters_for_councils( zoom_current: int ) -> List[Cluster]: """ - Cluster pins for the entire city + Cluster service request pins by council Args: start_date (date): beginning of date range service was requested end_date (date): end of date range service was requested type_ids (List[int]): the request type ids to match on + council_ids (List[int]): the council ids to match on Returns: - cluster: a cluster object + cluster: a list of cluster objects """ + # TODO: CACHE 'council-reqs:start-end-types-councils' result = await ( db.select( [ @@ -120,14 +168,23 @@ async def get_clusters_for_councils( # zoom_next = (zoom_current + 1) or DEFAULT_COUNCIL_ZOOM cluster_list = [] + # TODO: replace this with a caching solution + # returns dictionary with council id as key and name, lat, long + # council_result = await db.all(Council.query) + # councils = [ + # (i.council_id, [i.council_name, i.latitude, i.longitude]) + # for i in council_result + # ] + councils_dict = cache.get("councils_dict") + for row in result: - council = await Council.get(row[0]) + council = councils_dict.get(row[0]) cluster_list.append(Cluster( count=row[1], expansion_zoom=DEFAULT_COUNCIL_ZOOM, - id=council.council_id, - latitude=council.latitude, - longitude=council.longitude + id=row[0], + latitude=council[1], + longitude=council[2] )) return cluster_list @@ -149,7 +206,7 @@ async def get_points( council_ids: (List[int]): the council ids to match Returns: - a list of latitude and logitude pairs of service locations + a list of latitude and logitude pairs of service request locations """ result = await ( @@ -170,12 +227,11 @@ async def get_points( point_list = [] for row in result: - point_list.append([row[0], row[1]]) + point_list.append([row.latitude, row.longitude]) return point_list -# TODO: same as above by group by council async def get_clusters_for_bounds( start_date: datetime.date, end_date: datetime.date, @@ -219,7 +275,7 @@ async def get_clusters_for_bounds( ) # TODO: clean this up. goes in [longitude, latitude] format - points = [[i[2], i[1]] for i in result] + points = [[row.longitude, row.latitude] for row in result] index = pysupercluster.SuperCluster( numpy.array(points), @@ -235,14 +291,15 @@ async def get_clusters_for_bounds( zoom=zoom_current ) - types_dict = await get_types_dict() + # TODO: replace this with a proper caching solution + types_dict = cache.get("types_dict") for item in cluster_list: # change single item clusters into points if item['count'] == 1: pin = result[item['id']] # cluster id matches the result row - item['srnumber'] = "1-" + str(pin[0]) - item['requesttype'] = types_dict[pin[3]] + item['srnumber'] = "1-" + str(pin.request_id) + item['requesttype'] = types_dict[pin.type_id] del item['expansion_zoom'] return cluster_list diff --git a/server/api/code/lacity_data_api/models/council.py b/server/api/code/lacity_data_api/models/council.py index ceb4df737..b43cd8db6 100644 --- a/server/api/code/lacity_data_api/models/council.py +++ b/server/api/code/lacity_data_api/models/council.py @@ -9,3 +9,12 @@ class Council(db.Model): region_id = db.Column(db.SmallInteger) latitude = db.Column(db.Float) longitude = db.Column(db.Float) + + +async def get_councils_dict(): + result = await db.all(Council.query) + councils_dict = [ + (i.council_id, (i.council_name, i.latitude, i.longitude)) + for i in result + ] + return dict(councils_dict) diff --git a/server/api/code/lacity_data_api/models/region.py b/server/api/code/lacity_data_api/models/region.py index ecb14a261..b0af10bfd 100644 --- a/server/api/code/lacity_data_api/models/region.py +++ b/server/api/code/lacity_data_api/models/region.py @@ -8,3 +8,12 @@ class Region(db.Model): region_name = db.Column(db.String) latitude = db.Column(db.Float) longitude = db.Column(db.Float) + + +async def get_regions_dict(): + result = await db.all(Region.query) + regions_dict = [ + (i.region_id, (i.region_name, i.latitude, i.longitude)) + for i in result + ] + return dict(regions_dict) diff --git a/server/api/code/lacity_data_api/models/service_request.py b/server/api/code/lacity_data_api/models/service_request.py index 2aaf6c433..457000ac3 100644 --- a/server/api/code/lacity_data_api/models/service_request.py +++ b/server/api/code/lacity_data_api/models/service_request.py @@ -1,3 +1,5 @@ +from typing import List + from . import db @@ -9,6 +11,17 @@ class ServiceRequest(db.Model): closed_date = db.Column(db.Date) type_id = db.Column(db.SmallInteger) council_id = db.Column(db.SmallInteger) + region_id = db.Column(db.SmallInteger) address = db.Column(db.String) latitude = db.Column(db.Float) longitude = db.Column(db.Float) + + +async def get_open_requests() -> List[ServiceRequest]: + '''Get a list of RequestTypes from their type_names''' + result = await db.all( + ServiceRequest.query.where( + ServiceRequest.closed_date == None # noqa + ) + ) + return result diff --git a/server/api/code/lacity_data_api/routers/api_models.py b/server/api/code/lacity_data_api/routers/api_models.py new file mode 100644 index 000000000..d03918d4b --- /dev/null +++ b/server/api/code/lacity_data_api/routers/api_models.py @@ -0,0 +1,86 @@ +from typing import List, Optional +import datetime +from enum import Enum + +from pydantic import BaseModel, validator +from pydantic.dataclasses import dataclass + + +class Bounds(BaseModel): + north: float + south: float + east: float + west: float + + +class Filter(BaseModel): + startDate: str + endDate: str + ncList: List[int] + requestTypes: List[str] + zoom: Optional[int] = None + bounds: Optional[Bounds] = None + + @validator('startDate', 'endDate') + def parse_date(cls, v): + if isinstance(v, str): + try: + v = datetime.datetime.strptime(v, '%m/%d/%Y') + except ValueError: + try: + v = datetime.datetime.strptime(v, '%Y-%m-%d') + except ValueError: + pass + return v + + +class Pin(BaseModel): + srnumber: str + requesttype: str + latitude: float + longitude: float + + +class Cluster(BaseModel): + count: int + expansion_zoom: Optional[int] + id: int + latitude: float + longitude: float + + +@dataclass +class Set: + district: str + list: List[int] + + @validator('district') + def district_is_valid(cls, v): + assert v in ['cc', 'nc'], 'district must be either "nc" or "cc".' + return v + + def __getitem__(cls, item): + return getattr(cls, item) + + +class Comparison(BaseModel): + startDate: str + endDate: str + requestTypes: List[str] + set1: Set + set2: Set + + +class Feedback(BaseModel): + title: str + body: str + + +class StatusTypes(str, Enum): + api = "api" + database = "db" + system = "sys" + + +Pins = List[Pin] +Clusters = List[Cluster] diff --git a/server/api/code/lacity_data_api/routers/index.py b/server/api/code/lacity_data_api/routers/index.py index d26837768..9a5b4dd68 100644 --- a/server/api/code/lacity_data_api/routers/index.py +++ b/server/api/code/lacity_data_api/routers/index.py @@ -1,8 +1,12 @@ from fastapi import APIRouter +from .utilities import build_cache +from ..config import cache + router = APIRouter() @router.get("/") async def index(): - return {"message": "Hello, new index!"} + await build_cache() + return cache diff --git a/server/api/code/lacity_data_api/routers/legacy.py b/server/api/code/lacity_data_api/routers/legacy.py index c106f934d..ece34585e 100644 --- a/server/api/code/lacity_data_api/routers/legacy.py +++ b/server/api/code/lacity_data_api/routers/legacy.py @@ -1,13 +1,11 @@ -from typing import List, Optional -import datetime -from enum import Enum - from fastapi import responses from fastapi import APIRouter -from pydantic import BaseModel, validator -from pydantic.dataclasses import dataclass +from .api_models import ( + StatusTypes, Filter, Pins, Comparison, Feedback +) from services import status, map, visualizations, requests, comparison, github, email +from .utilities import build_cache router = APIRouter() @@ -17,92 +15,11 @@ """ -class Bounds(BaseModel): - north: float - south: float - east: float - west: float - - -class Filter(BaseModel): - startDate: str - endDate: str - ncList: List[int] - requestTypes: List[str] - zoom: Optional[int] = None - bounds: Optional[Bounds] = None - - @validator('startDate', 'endDate') - def parse_date(cls, v): - if isinstance(v, str): - try: - v = datetime.datetime.strptime(v, '%m/%d/%Y') - except ValueError: - try: - v = datetime.datetime.strptime(v, '%Y-%m-%d') - except ValueError: - pass - return v - - -class Pin(BaseModel): - srnumber: str - requesttype: str - latitude: float - longitude: float - - -Pins = List[Pin] - - -class Cluster(BaseModel): - count: int - expansion_zoom: Optional[int] - id: int - latitude: float - longitude: float - - -Clusters = List[Cluster] - - -@dataclass -class Set: - district: str - list: List[int] - - @validator('district') - def district_is_valid(cls, v): - assert v in ['cc', 'nc'], 'district must be either "nc" or "cc".' - return v - - def __getitem__(cls, item): - return getattr(cls, item) - - -class Comparison(BaseModel): - startDate: str - endDate: str - requestTypes: List[str] - set1: Set - set2: Set - - -class Feedback(BaseModel): - title: str - body: str - - -class StatusTypes(str, Enum): - api = "api" - database = "db" - system = "sys" - - @router.get("/status/{status_type}", description="Provides the status of backend systems") async def status_check(status_type: StatusTypes): if status_type == StatusTypes.api: + await build_cache() result = await status.api() if status_type == StatusTypes.database: result = await status.database() diff --git a/server/api/code/lacity_data_api/routers/shim.py b/server/api/code/lacity_data_api/routers/shim.py index 897149d0c..eafc11477 100644 --- a/server/api/code/lacity_data_api/routers/shim.py +++ b/server/api/code/lacity_data_api/routers/shim.py @@ -1,10 +1,15 @@ -from typing import List, Optional import datetime from fastapi import APIRouter from pydantic import BaseModel -from lacity_data_api.models import clusters, request_type +from .api_models import ( + Filter # StatusTypes, Pins, Comparison, Feedback +) +from ..models import ( + clusters, request_type, service_request +) +from .utilities import build_cache router = APIRouter() @@ -14,24 +19,84 @@ """ -class Bounds(BaseModel): - north: float - south: float - east: float - west: float +class SimpleServiceRequest(BaseModel): + request_id: int + type_id: int + latitude: float + longitude: float + class Config: + orm_mode = True -class Filter(BaseModel): - startDate: str - endDate: str - ncList: List[int] - requestTypes: List[str] - zoom: Optional[int] = None - bounds: Optional[Bounds] = None +@router.get("/status/api") +async def shim_get_api_status(): + currentTime = datetime.datetime.now() + last_pulled = datetime.datetime.now() + await build_cache() + + # SELECT last_pulled FROM metadata + return { + 'currentTime': currentTime, + 'gitSha': "DEVELOPMENT", + 'version': "0.1.1", + 'lastPulled': last_pulled + } + + +# TODO: return format is slightly different than current +@router.get("/servicerequest/{srnumber}", description=""" + The service request ID is the integer created from the srnumber + when the initial "1-" is removed. + """) +async def shim_get_service_request(srnumber: str): + id = int(srnumber[2:]) + result = await service_request.ServiceRequest.get_or_404(id) + return result.to_dict() + + +# TODO: return format is slightly different than current +@router.post("/open-requests") +async def get_open_requests(): + result = await service_request.get_open_requests() + + requests_list = [] + + types_dict = await request_type.get_types_dict() + + for i in result: + requests_list.append({ + 'srnumber': f"1-{i.request_id}", + 'requesttype': types_dict.get(i.type_id), + 'latitude': i.latitude, + 'longitude': i.longitude + }) + + return requests_list + + +@router.post("/map/clusters") +async def get_clusters(filter: Filter): + # convert type names to type ids + request_types = await request_type.get_types_by_str_list(filter.requestTypes) + type_ids = [i.type_id for i in request_types] + + result = await clusters.get_clusters_for_bounds( + filter.startDate, + filter.endDate, + type_ids, + filter.ncList, + filter.zoom, + filter.bounds + ) + + return result + + +# TODO: tries clustering by district and NC first @router.post("/new/clusters") -async def get_new_clusters(filter: Filter): +async def shim_get_clusters(filter: Filter): # have to convert the funky date formats start_date = datetime.datetime.strptime(filter.startDate, '%m/%d/%Y') end_date = datetime.datetime.strptime(filter.endDate, '%m/%d/%Y') @@ -42,15 +107,17 @@ async def get_new_clusters(filter: Filter): zoom = filter.zoom or 10 - if zoom < 11: - # get city clusters - result = await clusters.get_clusters_for_city( + if zoom < 12: + # get region clusters + result = await clusters.get_clusters_for_regions( start_date, end_date, type_ids, + filter.ncList, filter.zoom ) elif zoom < 14: + # get council clusters result = await clusters.get_clusters_for_councils( start_date, end_date, @@ -59,6 +126,7 @@ async def get_new_clusters(filter: Filter): filter.zoom ) else: + # use pysupercluster to cluster viewing area result = await clusters.get_clusters_for_bounds( start_date, end_date, @@ -71,19 +139,85 @@ async def get_new_clusters(filter: Filter): return result -@router.post("/new/heat") -async def get_new_heatmap(filter: Filter): - start_time = datetime.datetime.strptime(filter.startDate, '%m/%d/%Y') - end_time = datetime.datetime.strptime(filter.endDate, '%m/%d/%Y') +@router.post("/map/heat") +async def shim_get_heatmap(filter: Filter): # convert type names to type ids request_types = await request_type.get_types_by_str_list(filter.requestTypes) type_ids = [i.type_id for i in request_types] result = await clusters.get_points( - start_time, - end_time, + filter.startDate, + filter.endDate, type_ids, filter.ncList ) return result + + +# TODO: currently a placeholder +@router.post("/visualizations") +async def shim_get_visualizations(filter: Filter): + result_object = { + "frequency": { + "bins": [ + "2020-01-01", + "2020-01-21", + "2020-02-10", + "2020-03-01", + "2020-03-21", + "2020-04-10", + "2020-04-30", + "2020-05-20", + "2020-06-09", + "2020-06-29", + "2020-07-19", + "2020-08-08", + "2020-08-28", + "2020-09-17" + ], + "counts": { + "Dead Animal Removal": [ + 20, + 31, + 16, + 21, + 16, + 22, + 23, + 15, + 17, + 22, + 19, + 25, + 7 + ] + } + }, + "timeToClose": { + "Dead Animal Removal": { + "min": 0.001632, + "q1": 0.043319, + "median": 0.123883, + "q3": 0.693608, + "max": 2.700694, + "whiskerMin": 0.001632, + "whiskerMax": 1.03765, + "count": 254, + "outlierCount": 2 + } + }, + "counts": { + "type": { + "Dead Animal Removal": 254 + }, + "source": { + "Call": 165, + "Driver Self Report": 1, + "Mobile App": 36, + "Self Service": 50, + "Voicemail": 2 + } + } + } + return result_object diff --git a/server/api/code/lacity_data_api/routers/utilities.py b/server/api/code/lacity_data_api/routers/utilities.py new file mode 100644 index 000000000..775fe4c5d --- /dev/null +++ b/server/api/code/lacity_data_api/routers/utilities.py @@ -0,0 +1,12 @@ + +from ..models import request_type, council, region +from ..config import cache + + +async def build_cache(): + if cache.get('types_dict') is None: + cache['types_dict'] = await request_type.get_types_dict() + if cache.get('councils_dict') is None: + cache['councils_dict'] = await council.get_councils_dict() + if cache.get('regions_dict') is None: + cache['regions_dict'] = await region.get_regions_dict()