diff --git a/.github/workflows/Terraform_Checks.yml b/.github/workflows/Terraform_Checks.yml index 119b671f6..d0999e132 100644 --- a/.github/workflows/Terraform_Checks.yml +++ b/.github/workflows/Terraform_Checks.yml @@ -1,5 +1,8 @@ name: 'Terraform Checks' -on: [pull_request] +on: + pull_request: + branches-ignore: + - '**' env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} tf_version: 'latest' diff --git a/package.json b/package.json index 3c9e5d050..c96d996f6 100644 --- a/package.json +++ b/package.json @@ -3,6 +3,7 @@ "version": "0.1.0", "homepage": "https://hackforla.github.io/311-data", "dependencies": { + "@react-pdf/renderer": "^1.6.8", "axios": "^0.19.0", "babel-jest": "^24.9.0", "bulma": "^0.8.0", @@ -18,6 +19,7 @@ "eslint-import-resolver-webpack": "^0.12.1", "gh-pages": "^2.1.1", "html-webpack-plugin": "^3.2.0", + "html2canvas": "^1.0.0-rc.5", "jest": "^24.9.0", "leaflet": "^1.5.1", "leaflet.markercluster": "^1.4.1", @@ -28,6 +30,7 @@ "react-dom": "^16.8.6", "react-leaflet": "^2.4.0", "react-leaflet-choropleth": "^2.0.0", + "react-leaflet-easyprint": "^2.0.0", "react-leaflet-heatmap-layer": "^2.0.0", "react-leaflet-markercluster": "^2.0.0-rc3", "react-redux": "^7.1.3", diff --git a/public/index.html b/public/index.html index ed4f74296..39a8ee63f 100644 --- a/public/index.html +++ b/public/index.html @@ -8,7 +8,6 @@ - 311 Data diff --git a/server/src/app.py b/server/src/app.py index 729563bf8..8f4ca68b9 100644 --- a/server/src/app.py +++ b/server/src/app.py @@ -8,8 +8,8 @@ from datetime import datetime from multiprocessing import cpu_count -from services.time_to_close import time_to_close -from services.frequency import frequency +from services.timeToCloseService import TimeToCloseService +from services.frequencyService import FrequencyService from services.pinService import PinService from services.requestCountsService import RequestCountsService from services.requestDetailService import RequestDetailService @@ -53,7 +53,7 @@ async def index(request): @app.route('/timetoclose', methods=["POST"]) @compress.compress() async def timetoclose(request): - ttc_worker = time_to_close(app.config['Settings']) + ttc_worker = TimeToCloseService(app.config['Settings']) postArgs = request.json start = postArgs.get('startDate', None) @@ -61,22 +61,28 @@ async def timetoclose(request): ncs = postArgs.get('ncList', []) requests = postArgs.get('requestTypes', []) - data = ttc_worker.ttc(startDate=start, - endDate=end, - ncList=ncs, - requestTypes=requests) + data = await ttc_worker.get_ttc(startDate=start, + endDate=end, + ncList=ncs, + requestTypes=requests) return json(data) -@app.route('/requestfrequency') +@app.route('/requestfrequency', methods=["POST"]) @compress.compress() async def requestfrequency(request): - freq_worker = frequency(app.config['Settings']) + freq_worker = FrequencyService(app.config['Settings']) - data = freq_worker.freq_view_data(service=True, - councils=[], - aggregate=True) + postArgs = request.json + start = postArgs.get('startDate', None) + end = postArgs.get('endDate', None) + ncs = postArgs.get('ncList', []) + requests = postArgs.get('requestTypes', []) + data = await freq_worker.get_frequency(startDate=start, + endDate=end, + ncList=ncs, + requestTypes=requests) return json(data) diff --git a/server/src/services/dataService.py b/server/src/services/dataService.py index 8749fa8fe..f5a8355cc 100644 --- a/server/src/services/dataService.py +++ b/server/src/services/dataService.py @@ -27,7 +27,7 @@ def __init__(self, config=None, tableName="ingest_staging_table"): self.table = tableName self.data = None self.engine = db.create_engine(self.dbString) - self.session = sessionmaker(bind=self.engine)() + self.Session = sessionmaker(bind=self.engine) def standardFilters(self, startDate=None, @@ -54,10 +54,16 @@ def itemQuery(self, requestNumber): if not requestNumber or not isinstance(requestNumber, str): return {'Error': 'Missing request number'} - return self.session \ + session = self.Session() + record = session \ .query(Request) \ - .get(requestNumber) \ - ._asdict() + .get(requestNumber) + session.close() + + if record: + return record._asdict() + else: + return {'Error': 'Request number not found'} @includeMeta def query(self, queryItems=[], queryFilters=[], limit=None): @@ -70,11 +76,14 @@ def query(self, queryItems=[], queryFilters=[], limit=None): return {'Error': 'Missing query items'} selectFields = [getattr(Request, item) for item in queryItems] - records = self.session \ + + session = self.Session() + records = session \ .query(*selectFields) \ .filter(*queryFilters) \ .limit(limit) \ .all() + session.close() return [rec._asdict() for rec in records] @@ -94,7 +103,7 @@ def aggregateQuery(self, countFields=[], queryFilters=[]): return [{ 'field': field, 'counts': df.groupby(by=field).size().to_dict() - } for field in countFields] + } for field in countFields if field in df.columns] def storedProc(self): pass diff --git a/server/src/services/frequency.py b/server/src/services/frequency.py deleted file mode 100644 index a11828656..000000000 --- a/server/src/services/frequency.py +++ /dev/null @@ -1,133 +0,0 @@ -from configparser import ConfigParser -import sqlalchemy as db -import pandas as pd -import json - - -class frequency(object): - def __init__(self, config=None, tableName="ingest_staging_table"): - self.config = config - self.dbString = None if not self.config \ - else self.config['Database']['DB_CONNECTION_STRING'] - - self.table = tableName - self.data = None - pass - - def freq_view_all(self, serviced=False, aggregate=True): - """ - Returns the request type and associated dates for all data - Sorted by request type, followed by created date, - service date (if applicable), and then closed date - """ - # Todo: implement condition for serviced date - engine = db.create_engine(self.dbString) - - if serviced: - query = "SELECT \ - requesttype,\ - createddate,\ - closeddate,\ - servicedate\ - FROM %s" % self.table - else: - query = "SELECT \ - requesttype,\ - createddate,\ - closeddate\ - FROM %s" % self.table - - df = pd.read_sql_query(query, con=engine) - - if serviced: - df['servicedate'] = pd.to_datetime(df['servicedate']) - - df['closeddate'] = pd.to_datetime(df['closeddate']) - df = df.sort_values(by=['requesttype', 'createddate', 'closeddate']) - - return df.to_json(orient="records") - - def freq_aggregate(self, df): - request_counts = df['requesttype'].value_counts() - - return request_counts.to_json() - - def freq_view_data(self, - service=False, - aggregate=True, - councils=[], - startdate="", - enddate=""): - """ - Returns the request type, neighborhood council, created and - closed dates for all data sorted by request type, followed by - neighborhood council #, then created date, and then closed date - Returns serviced date as well if service is set to True - Returns data for all councils if councils=[], otherwise returns data - for only the array of neighborhood council #s - Returns summary data as well if aggregate is set to True - Returns only entries created between startdate and enddate if values - are set for those parameters - Format of startdate and enddate should be a string in - the form 2019-12-01 23:02:05 - """ - engine = db.create_engine(self.dbString) - - if service: - df = pd.read_sql_query("SELECT\ - requesttype,\ - createddate,\ - closeddate,\ - servicedate,\ - nc,\ - ncname\ - FROM %s" % self.table, con=engine) - df['servicedate'] = pd.to_datetime(df['servicedate']) - - else: - df = pd.read_sql_query("SELECT\ - requesttype,\ - createddate,\ - closeddate,\ - nc,\ - ncname\ - FROM %s" % self.table, con=engine) - - df['closeddate'] = pd.to_datetime(df['closeddate']) - - if councils != []: - df = df[df.nc.isin(councils)] - - if startdate != "": - start = pd.to_datetime(startdate) - df = df[(df['createddate'] >= start)] - - if enddate != "": - end = pd.to_datetime(enddate) - df = df[df['createddate'] <= end] - - df = df.sort_values(by=['requesttype', - 'nc', - 'createddate', - 'closeddate']) - df_json = json.loads(df.to_json(orient="records")) - - if aggregate: - summary = self.freq_aggregate(df) - json_data = [] - json_data.append(json.loads(summary)) - json_data.append(df_json) - return json_data - - return df_json - -# Todo: filter by NC at the sql request stage instead of afterwards - - -if __name__ == "__main__": - freq = frequency() - config = ConfigParser() - config.read("../setting.cfg") - freq.config = config - freq.dbString = config['Database']['DB_CONNECTION_STRING'] - freq.freq_view_data(service=True, aggregate=True) diff --git a/server/src/services/frequencyService.py b/server/src/services/frequencyService.py new file mode 100644 index 000000000..8ce45da4c --- /dev/null +++ b/server/src/services/frequencyService.py @@ -0,0 +1,120 @@ +import pandas as pd +import numpy as np +import math +from .dataService import DataService + + +class FrequencyService(object): + def __init__(self, config=None, tableName="ingest_staging_table"): + self.dataAccess = DataService(config, tableName) + + async def get_frequency(self, + startDate=None, + endDate=None, + ncList=[], + requestTypes=[]): + """ + Given a date range, covers the range with equal-length date bins, and + counts the number of requests that were created in each date bin. + + Example response if startDate = 01/01/18 and endDate = 03/02/2020 + { + 'bins': [ + "2018-01-01", + "2018-03-08", + "2018-05-13", + "2018-07-18", + "2018-09-22", + "2018-11-27", + "2019-02-01", + "2019-04-08", + "2019-06-13", + "2019-08-18", + "2019-10-23", + "2019-12-28", + "2020-03-03" + ], + 'counts': { + 'Graffiti Removal': [ + 125, 15, 53, 24, 98, 42, + 33, 128, 30, 16, 138, 57 + ], + 'Bulky Items': [ + 1, 1, 2, 3, 5, 8, + 13, 21, 34, 55, 89, 144 + ] + } + } + + Note that the number of bins is one greater than the number of counts, + because the list of bins includes the end date of the final bin. + """ + + def get_bins(startDate, endDate): + """ + Takes a date range a returns a list of equal-size date bins that + cover the range. + + For ranges of 24 days or less, each bin covers one calendar day. + + For larger ranges, each bin is the largest size such that: + (1) the size is a whole number of days (i.e. the bin edges + are all at midnight) + (2) the number of bins is at least 12. + + Not all date ranges are evenly divisible by a whole number of + days, so in cases where they aren't, we move the end date forward + so that the last bin is the same size as the rest. + """ + start = pd.to_datetime(startDate) + end = pd.to_datetime(endDate) + pd.Timedelta(days=1) + diff = (end - start).days + + # calculate size and number of bins + bin_size = 1 if diff <= 24 else diff // 12 + num_bins = math.ceil(diff / bin_size) + + # move the end date forward in cases where the range can't + # be evenly divided + if diff != num_bins * bin_size: + end = start + num_bins * pd.Timedelta(days=bin_size) + + bins = pd.date_range(start, end, freq='{}D'.format(bin_size)) + return bins, start, end + + def get_counts(dates, bins): + """ count the number of dates in each date bin """ + dates = dates.astype('datetime64[s]').astype('float') + counts, _ = np.histogram(dates, bins=bins) + return list(map(int, counts)) + + # generate the bins + bins, start, end = get_bins(startDate, endDate) + + # grab the necessary data from the db + fields = ['requesttype', 'createddate'] + filters = self.dataAccess.standardFilters( + start, end, ncList, requestTypes) + data = self.dataAccess.query(fields, filters) + + # read into a dataframe, drop the nulls, and halt if no rows exist + df = pd.DataFrame(data['data']).dropna() + if len(df) == 0: + data['data'] = {} + return data + + # convert bins to float so numpy can use them + bins_fl = np.array(bins).astype('datetime64[s]').astype('float') + + # count the requests created in each bin + counts = df \ + .groupby(by='requesttype') \ + .apply(lambda x: get_counts(x['createddate'].values, bins_fl)) \ + .to_dict() + + data['data'] = { + 'bins': list(bins.astype(str)), + 'counts': counts + } + + return data diff --git a/server/src/services/precache.py b/server/src/services/precache.py new file mode 100644 index 000000000..9c63e419c --- /dev/null +++ b/server/src/services/precache.py @@ -0,0 +1,68 @@ +from configparser import ConfigParser +import sqlalchemy as db +import pandas as pd +import json +from datetime import datetime + + +class precache(object): + def __init__(self, config=None, tableName="ingest_staging_table"): + self.config = config + self.dbString = None if not self.config \ + else self.config['Database']['DB_CONNECTION_STRING'] + + self.table = tableName + self.data = None + pass + + def recent_data(self, window, requestType, council): + engine = db.create_engine(self.dbString) + + now = datetime.now() + startdate = pd.Timestamp(now) - pd.Timedelta(days=window) + + query = "SELECT createddate, requesttype, ncname, latitude, longitude FROM %s \ + WHERE createddate > '%s'" % (self.table, startdate) + + if requestType != "all": + query += " AND requesttype = '%s'" % (requestType) + if council != "all": + query += " AND ncname = '%s'" % (council) + + df = pd.read_sql_query(query, con=engine) + self.data = df + + # return json.loads(df.to_json()) + + def compile_datasets(self, window=14, requestType='all', council='all'): + self.recent_data(window, requestType, council) + + df = self.data + + request_arr = df.requesttype.unique() + nc_arr = df.ncname.unique() + + df_arr = [] + for request in request_arr: + dates = df['createddate'][df['requesttype'] == request] + df_dates = pd.DataFrame({request: dates}) + df_arr.append(json.loads(df_dates.to_json())) + for nc in nc_arr: + dates = df['createddate'][df['ncname'] == nc] + df_dates = pd.DataFrame({nc: dates}) + df_arr.append(json.loads(df_dates.to_json())) + + return df_arr + + # def compile_graphs(self): + # plt.hist(x=df['createddate']) + # plt.show() + + +if __name__ == "__main__": + precache = precache() + config = ConfigParser() + config.read("../setting.cfg") + precache.config = config + precache.dbString = config['Database']['DB_CONNECTION_STRING'] + precache.compile_datasets(window=14) diff --git a/server/src/services/sqlIngest.py b/server/src/services/sqlIngest.py index 2cd31610a..cf152444c 100644 --- a/server/src/services/sqlIngest.py +++ b/server/src/services/sqlIngest.py @@ -81,8 +81,7 @@ def cleanData(self): def ingestData(self, ingestMethod='replace', tableName='ingest_staging_table'): '''Set up connection to database''' - asdf = 'Inserting data into ' + self.dialect + ' instance...' - print(asdf) + print('Inserting data into ' + self.dialect + ' instance...') ingestTimer = time.time() data = self.data.copy() # shard deepcopy for other endpoint operations engine = db.create_engine(self.dbString) @@ -252,4 +251,4 @@ def fix_nan_vals(resultDict): loader = DataHandler(config) loader.fetchSocrataFull() loader.cleanData() - loader.ingestData('ingest_staging_table') + loader.ingestData(tableName='ingest_staging_table') diff --git a/server/src/services/timeToCloseService.py b/server/src/services/timeToCloseService.py new file mode 100644 index 000000000..71e54fbde --- /dev/null +++ b/server/src/services/timeToCloseService.py @@ -0,0 +1,105 @@ +import pandas as pd +import numpy as np +from .dataService import DataService + + +class TimeToCloseService(object): + def __init__(self, config=None, tableName="ingest_staging_table"): + self.dataAccess = DataService(config, tableName) + + async def get_ttc(self, + startDate=None, + endDate=None, + ncList=[], + requestTypes=[]): + """ + For each requestType, returns the statistics necessary to generate + a boxplot of the number of days it took to close the requests. + + Example response: + { + lastPulled: Timestamp, + data: { + 'Bulky Items': { + 'min': float, + 'q1': float, + 'median': float, + 'q3': float, + 'max': float, + 'whiskerMin': float, + 'whiskerMax': float, + 'outliers': [float], + 'count': int + } + ... + } + } + """ + + def get_boxplot_stats(arr, C=1.5): + """ + Takes a one-dimensional numpy array of floats and generates boxplot + statistics for the data. The basic algorithm is standard. + See https://en.wikipedia.org/wiki/Box_plot + + The max length of the whiskers is the constant C, multiplied by the + interquartile range. This is a common method, although there + are others. The default value of C=1.5 is typical when this + method is used. + See matplotlib.org/3.1.3/api/_as_gen/matplotlib.pyplot.boxplot.html + """ + + # calculate first and third quantiles + q1 = np.quantile(arr, 0.25) + q3 = np.quantile(arr, 0.75) + + # calculate whiskers + iqr = q3 - q1 + whiskerMin = arr[arr >= q1 - C * iqr].min() + whiskerMax = arr[arr <= q3 + C * iqr].max() + + # calculate outliers + minOutliers = arr[arr < whiskerMin] + maxOutliers = arr[arr > whiskerMax] + outliers = list(np.concatenate((minOutliers, maxOutliers))) + + return { + 'min': np.min(arr), + 'q1': q1, + 'median': np.median(arr), + 'q3': q3, + 'max': np.max(arr), + 'whiskerMin': whiskerMin, + 'whiskerMax': whiskerMax, + 'count': len(arr), + 'outlierCount': len(outliers) + } + + # grab the necessary data from the db + fields = ['requesttype', 'createddate', 'closeddate'] + filters = self.dataAccess.standardFilters( + startDate, endDate, ncList, requestTypes) + data = self.dataAccess.query(fields, filters) + + # read into a dataframe, drop the nulls, and halt if no rows exist + df = pd.DataFrame(data['data']).dropna() + if len(df) == 0: + data['data'] = {} + return data + + # generate a new dataframe that contains the number of days it + # takes to close each request, plus the type of request + df['closeddate'] = pd.to_datetime(df['closeddate']) + df['createddate'] = pd.to_datetime(df['createddate']) + df['time-to-close'] = df['closeddate'] - df['createddate'] + df['hours-to-close'] = df['time-to-close'].astype('timedelta64[h]') + df['days-to-close'] = (df['hours-to-close'] / 24).round(2) + dtc_df = df[['requesttype', 'days-to-close']] + + # group the requests by type and get box plot stats for each type + data['data'] = dtc_df \ + .groupby(by='requesttype') \ + .apply(lambda df: get_boxplot_stats(df['days-to-close'].values)) \ + .to_dict() + + return data diff --git a/server/src/services/time_to_close.py b/server/src/services/time_to_close.py deleted file mode 100644 index 567039fdf..000000000 --- a/server/src/services/time_to_close.py +++ /dev/null @@ -1,275 +0,0 @@ -from configparser import ConfigParser -import sqlalchemy as db -import pandas as pd -import json -from .dataService import DataService -import numpy as np - - -class time_to_close(object): - def __init__(self, - config=None, - requestTypes=None, - tableName="ingest_staging_table"): - """ - Choose table from database by setting the value of tableName. - Default table is the staging table. - """ - self.config = config - self.dbString = None if not self.config\ - else self.config['Database']['DB_CONNECTION_STRING'] - self.table = tableName - self.data = None - self.dataAccess = DataService(config, tableName) - pass - - def ttc(self, startDate=None, endDate=None, ncList=[], requestTypes=[]): - """ - For each requestType, returns the statistics necessary to generate - a boxplot of the number of days it took to close the requests. - - Example response: - { - lastPulled: Timestamp, - data: { - 'Bulky Items': { - 'min': float, - 'q1': float, - 'median': float, - 'q3': float, - 'max': float, - 'whiskerMin': float, - 'whiskerMax': float, - 'outliers': [float], - 'count': int - } - ... - } - } - """ - - def get_boxplot_stats(arr, C=1.5): - """ - Takes a one-dimensional numpy array of floats and generates boxplot - statistics for the data. The basic algorithm is standard. - See https://en.wikipedia.org/wiki/Box_plot - - The max length of the whiskers is the constant C, multiplied by the - interquartile range. This is a common method, although there - are others. The default value of C=1.5 is typical when this - method is used. - See matplotlib.org/3.1.3/api/_as_gen/matplotlib.pyplot.boxplot.html - """ - - # calculate first and third quantiles - q1 = np.quantile(arr, 0.25) - q3 = np.quantile(arr, 0.75) - - # calculate whiskers - iqr = q3 - q1 - whiskerMin = arr[arr >= q1 - C * iqr].min() - whiskerMax = arr[arr <= q3 + C * iqr].max() - - # calculate outliers - minOutliers = arr[arr < whiskerMin] - maxOutliers = arr[arr > whiskerMax] - outliers = list(np.concatenate((minOutliers, maxOutliers))) - - return { - 'min': np.min(arr), - 'q1': q1, - 'median': np.median(arr), - 'q3': q3, - 'max': np.max(arr), - 'whiskerMin': whiskerMin, - 'whiskerMax': whiskerMax, - 'outliers': outliers, - 'count': len(arr) - } - - # grab the necessary data from the db - fields = ['requesttype', 'createddate', 'closeddate'] - filters = self.dataAccess.standardFilters( - startDate, endDate, ncList, requestTypes) - data = self.dataAccess.query(fields, filters) - - # read into a dataframe, drop the nulls, and halt if no rows exist - df = pd.DataFrame(data['data']).dropna() - if len(df) == 0: - data['data'] = {} - return data - - # generate a new dataframe that contains the number of days it - # takes to close each request, plus the type of request - df['closeddate'] = pd.to_datetime(df['closeddate']) - df['createddate'] = pd.to_datetime(df['createddate']) - df['time-to-close'] = df['closeddate'] - df['createddate'] - df['hours-to-close'] = df['time-to-close'].astype('timedelta64[h]') - df['days-to-close'] = (df['hours-to-close'] / 24).round(2) - dtc_df = df[['requesttype', 'days-to-close']] - - # group the requests by type and get box plot stats for each type - data['data'] = dtc_df \ - .groupby(by='requesttype') \ - .apply(lambda df: get_boxplot_stats(df['days-to-close'].values)) \ - .to_dict() - - return data - - def ttc_view_dates(self, service=False): - """ - Returns all rows under the CreatedDate and - ClosedDate columns in human-readable format - Returns all rows with a service date under - CreatedDate, ClosedDate, and ServicedDate columns - if serviced is True - """ - engine = db.create_engine(self.dbString) - - if service: - df = pd.read_sql_query( - "SELECT \ - createddate,\ - closeddate,\ - servicedate\ - FROM %s" % self.table, con=engine) - df = df[df['servicedate'].notnull()] - else: - df = pd.read_sql_query( - "SELECT \ - createddate,\ - closeddate\ - FROM %s" % self.table, con=engine) - - df['createddate'] = df['createddate'].apply( - lambda x: x.strftime('%m/%d/%Y %I:%M:%S %p')) - - return df.to_json(orient='index') - - def ttc_to_days(self, dt): - """ - Converts Unix time to days - """ - num_days = pd.Timedelta.total_seconds(dt)/(24.*3600) - if num_days <= .000001: - return 0 - - in_days = pd.Timedelta.total_seconds(dt)/(24.*3600) - return in_days - - def ttc_time_diff(self, alldata, service, allRequests, requestType): - """ - Sets self.data to a dataframe catalogging the time - it takes a request to close - Parameters are inherited from ttc_summary() - """ - - engine = db.create_engine(self.dbString) - - if service: - if not allRequests: - query = "SELECT \ - createddate,\ - closeddate,\ - servicedate\ - FROM %s WHERE requesttype=%s" %\ - (self.table, requestType) - print(query) - df = pd.read_sql_query( - query, con=engine) - else: - df = pd.read_sql_query( - "SELECT \ - createddate,\ - closeddate,\ - servicedate\ - FROM %s" % - self.table, con=engine) - df = df[df['servicedate'].notnull()] - df['servicedate'] = pd.to_datetime(df['servicedate']) - diff_df = pd.DataFrame( - df['servicedate'] - df['createddate'], - columns=['time_to_service']) - - else: - if not allRequests: - df = pd.read_sql_query( - "SELECT \ - createddate,\ - closeddate\ - FROM %s WHERE requesttype=%s" % - (self.table, requestType), con=engine) - else: - df = pd.read_sql_query( - "SELECT \ - createddate,\ - closeddate\ - FROM %s" % - self.table, con=engine) - diff_df = pd.DataFrame({'time_to_close': []}) - - df['createddate'] = pd.to_datetime(df['createddate']) - df['closeddate'] = pd.to_datetime(df['closeddate']) - diff_df['time_to_close'] = df['closeddate'] - df['createddate'] - diff_df = diff_df[diff_df['time_to_close'].notnull()] - - for column in diff_df: - diff_df[column] = diff_df[column].apply(self.ttc_to_days) - - self.data = diff_df - - def ttc_summary(self, - allData=False, - service=False, - allRequests=True, - requestType="", - viewDates=False): - """ - Returns summary data of the amount of time it takes for a - request to close as a dataframe. - If service is set to True, returns summary data of time_to_service - as well - If allData is set to True, returns the data of every entry as well - If allRequests are set to False, queries data of - the value of requestType only - """ - self.ttc_time_diff(allData, service, allRequests, requestType) - data = self.data - print(data) - - summary_arr = [] - - for column in data: - summary = data[column].describe() - df_desc = pd.DataFrame({column: summary}) - df_json = json.loads(df_desc.to_json()) - summary_arr.append(df_json) - - if not allData and not viewDates: - return summary_arr - - data_arr = [] - data_arr.append(summary_arr) - - if allData: - days_df = data.copy() - days_df_json = json.loads(days_df.to_json()) - data_arr.append(days_df_json) - - if viewDates: - dates = self.ttc_view_dates(service) - data_arr.append(json.loads(dates)) - - return data_arr - - # Todo: Implement functionality for only open status data? - # Todo: Implement option to filter by NC? - - -if __name__ == "__main__": - ttc = time_to_close() - config = ConfigParser() - config.read("../setting.cfg") - ttc.config = config - ttc.dbString = config['Database']['DB_CONNECTION_STRING'] - ttc.ttc_summary() diff --git a/server/test/test_time_to_close.py b/server/test/test_time_to_close.py index 85707b928..df704dfe4 100644 --- a/server/test/test_time_to_close.py +++ b/server/test/test_time_to_close.py @@ -1,4 +1,4 @@ -from src.services.time_to_close import time_to_close +from src.services.timeToCloseService import TimeToCloseService TESTCONFIG = { "Database": { @@ -13,7 +13,7 @@ def test_serviceExists(): print(testString) # Act - ttc_worker = time_to_close(TESTCONFIG) + ttc_worker = TimeToCloseService(TESTCONFIG) print(ttc_worker) # Assert diff --git a/src/App.jsx b/src/App.jsx index a56d1914d..17c2e1b38 100644 --- a/src/App.jsx +++ b/src/App.jsx @@ -5,6 +5,10 @@ import Header from './components/main/header/Header'; import Body from './components/main/body/Body'; import Footer from './components/main/footer/Footer'; import Tooltip from './components/main/tooltip/Tooltip'; +import SnapshotService, { SnapshotRenderer } from './components/export/SnapshotService'; +import Visualizations from './components/Visualizations'; + +SnapshotService.register({ Visualizations }); const App = () => { useEffect(() => { @@ -17,6 +21,7 @@ const App = () => {