From 7ab1bf5401eb33ec74e4598077527524f9f56430 Mon Sep 17 00:00:00 2001 From: Nils Krehl Date: Fri, 4 Nov 2022 15:04:27 +0100 Subject: [PATCH 1/6] bugfix for ckg internal server error due to not fixed Jinja dependency: upgrade flask (1.x -> 2.x) and dash (1.x -> 2.x) and adapt ckg code accordingly (use new dash pages feature and remove boilerplate navigation code) --- ckg/analytics_core/utils.py | 2 +- ckg/analytics_core/viz/viz.py | 6 +- ckg/ckg_utils.py | 4 +- ckg/config/analytics_factory_log.config | 45 + ckg/config/ckg_config.yml | 23 + ckg/config/graphdb_builder_log.config | 45 + ckg/config/graphdb_connector_log.config | 45 + ckg/config/report_manager_log.config | 45 + ckg/report_manager/app.py | 202 ++++- ckg/report_manager/apps/adminApp.py | 57 -- ckg/report_manager/apps/apps_config.py | 112 +-- ckg/report_manager/apps/basicApp.py | 113 --- ckg/report_manager/apps/dataUpload.py | 88 +- ckg/report_manager/apps/dataUploadApp.py | 85 -- ckg/report_manager/apps/homepageApp.py | 39 - ckg/report_manager/apps/homepageStats.py | 131 +-- ckg/report_manager/apps/imports.py | 182 ++-- ckg/report_manager/apps/importsApp.py | 37 - ckg/report_manager/apps/initialApp.py | 3 - ckg/report_manager/apps/loginApp.py | 25 - ckg/report_manager/apps/projectApp.py | 185 ---- ckg/report_manager/apps/projectCreation.py | 31 +- ckg/report_manager/apps/projectCreationApp.py | 118 --- ckg/report_manager/index.py | 828 ------------------ ckg/report_manager/pages/__init__.py | 0 ckg/report_manager/pages/adminPage.py | 72 ++ ckg/report_manager/pages/dataUploadPage.py | 397 +++++++++ ckg/report_manager/pages/homePage.py | 155 ++++ ckg/report_manager/pages/importsPage.py | 65 ++ ckg/report_manager/pages/initialPage.py | 15 + ckg/report_manager/pages/loginPage.py | 22 + ckg/report_manager/pages/logoutPage.py | 14 + .../pages/projectCreationPage.py | 298 +++++++ ckg/report_manager/pages/projectPage.py | 178 ++++ ckg/report_manager/report.py | 39 +- ckg/report_manager/utils.py | 58 +- requirements.txt | 252 ++++-- setup.py | 18 +- 38 files changed, 2202 insertions(+), 1832 deletions(-) create mode 100644 ckg/config/analytics_factory_log.config create mode 100644 ckg/config/ckg_config.yml create mode 100644 ckg/config/graphdb_builder_log.config create mode 100644 ckg/config/graphdb_connector_log.config create mode 100644 ckg/config/report_manager_log.config delete mode 100644 ckg/report_manager/apps/adminApp.py delete mode 100644 ckg/report_manager/apps/basicApp.py delete mode 100644 ckg/report_manager/apps/dataUploadApp.py delete mode 100644 ckg/report_manager/apps/homepageApp.py delete mode 100644 ckg/report_manager/apps/importsApp.py delete mode 100644 ckg/report_manager/apps/initialApp.py delete mode 100644 ckg/report_manager/apps/loginApp.py delete mode 100644 ckg/report_manager/apps/projectApp.py delete mode 100644 ckg/report_manager/apps/projectCreationApp.py delete mode 100644 ckg/report_manager/index.py create mode 100644 ckg/report_manager/pages/__init__.py create mode 100644 ckg/report_manager/pages/adminPage.py create mode 100644 ckg/report_manager/pages/dataUploadPage.py create mode 100644 ckg/report_manager/pages/homePage.py create mode 100644 ckg/report_manager/pages/importsPage.py create mode 100644 ckg/report_manager/pages/initialPage.py create mode 100644 ckg/report_manager/pages/loginPage.py create mode 100644 ckg/report_manager/pages/logoutPage.py create mode 100644 ckg/report_manager/pages/projectCreationPage.py create mode 100644 ckg/report_manager/pages/projectPage.py diff --git a/ckg/analytics_core/utils.py b/ckg/analytics_core/utils.py index 8b89e8bd..fa005cbc 100644 --- a/ckg/analytics_core/utils.py +++ b/ckg/analytics_core/utils.py @@ -5,7 +5,7 @@ import io import base64 import bs4 as bs -import dash_html_components as html +from dash import html import requests import networkx as nx from networkx.readwrite import json_graph diff --git a/ckg/analytics_core/viz/viz.py b/ckg/analytics_core/viz/viz.py index 58511f7e..b2106135 100644 --- a/ckg/analytics_core/viz/viz.py +++ b/ckg/analytics_core/viz/viz.py @@ -3,8 +3,8 @@ import pandas as pd import ast from collections import defaultdict -import dash_core_components as dcc -import dash_html_components as html +from dash import dcc +from dash import html import matplotlib import matplotlib.pyplot as plt import plotly @@ -13,7 +13,7 @@ import plotly.figure_factory as FF import plotly.express as px import math -import dash_table +from dash import dash_table import plotly.subplots as tools import plotly.io as pio from scipy.spatial.distance import pdist, squareform diff --git a/ckg/ckg_utils.py b/ckg/ckg_utils.py index a36575e2..ee4e37b4 100644 --- a/ckg/ckg_utils.py +++ b/ckg/ckg_utils.py @@ -3,6 +3,7 @@ import yaml import json import logging +from pathlib import Path def read_ckg_config(key=None): @@ -153,4 +154,5 @@ class DictDFEncoder(json.JSONEncoder): def default(self, obj): if hasattr(obj, 'to_json'): return obj.to_json(orient='records') - return json.JSONEncoder.default(self, obj) \ No newline at end of file + return json.JSONEncoder.default(self, obj) + diff --git a/ckg/config/analytics_factory_log.config b/ckg/config/analytics_factory_log.config new file mode 100644 index 00000000..caca363d --- /dev/null +++ b/ckg/config/analytics_factory_log.config @@ -0,0 +1,45 @@ +{ "version": 1, + "disable_existing_loggers": false, + "formatters": { + "simple": { + "format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s" + } + }, + "handlers": { + "console": { + "class": "logging.StreamHandler", + "level": "NOTSET", + "formatter": "simple", + "stream": "ext://sys.stdout" + }, + "info_file_handler": { + "class": "logging.handlers.RotatingFileHandler", + "level": "INFO", + "formatter": "simple", + "filename": "/home/st/Schreibtisch/CKG/log/analytics_factory.log", + "maxBytes": 10485760, + "backupCount": 20, + "encoding": "utf8" + }, + "error_file_handler": { + "class": "logging.handlers.SMTPHandler", + "level": "CRITICAL", + "formatter": "simple", + "mailhost": "localhost", + "fromaddr": "error@ckg.com", + "toaddrs": "alberto.santos@cpr.ku.dk", + "subject": "CKG ERROR - Analytics Factory" + } + }, + "loggers": { + "my_module": { + "level": "ERROR", + "handlers": ["console"], + "propagate": "no" + } + }, + "root":{ + "level": "INFO", + "handlers": ["info_file_handler", "error_file_handler"] + } +} diff --git a/ckg/config/ckg_config.yml b/ckg/config/ckg_config.yml new file mode 100644 index 00000000..5889eccc --- /dev/null +++ b/ckg/config/ckg_config.yml @@ -0,0 +1,23 @@ +version: 1.0 +ckg_directory: "/home/st/Schreibtisch/CKG/ckg" +data_directory: "/home/st/Schreibtisch/CKG/data" +archive_directory: "/home/st/Schreibtisch/CKG/data/archive" +databases_directory: "/home/st/Schreibtisch/CKG/data/databases" +experiments_directory: "/home/st/Schreibtisch/CKG/data/experiments" +ontologies_directory: "/home/st/Schreibtisch/CKG/data/ontologies" +users_directory: "/home/st/Schreibtisch/CKG/data/users" +stats_directory: "/home/st/Schreibtisch/CKG/data/stats" +downloads_directory: "/home/st/Schreibtisch/CKG/data/downloads" +reports_directory: "/home/st/Schreibtisch/CKG/data/reports" +tmp_directory: "/home/st/Schreibtisch/CKG/data/tmp" +imports_directory: "/home/st/Schreibtisch/CKG/data/imports" +imports_databases_directory: "/home/st/Schreibtisch/CKG/data/imports/databases" +imports_experiments_directory: "/home/st/Schreibtisch/CKG/data/imports/experiments" +imports_ontologies_directory: "/home/st/Schreibtisch/CKG/data/imports/ontologies" +imports_users_directory: "/home/st/Schreibtisch/CKG/data/imports/users" +imports_curated_directory: "/home/st/Schreibtisch/CKG/data/imports/curated" +log_directory: "/home/st/Schreibtisch/CKG/log" +report_manager_log: "/home/st/Schreibtisch/CKG/ckg/config/report_manager_log.config" +graphdb_connector_log: "/home/st/Schreibtisch/CKG/ckg/config/graphdb_connector_log.config" +graphdb_builder_log: "/home/st/Schreibtisch/CKG/ckg/config/graphdb_builder_log.config" +analytics_factory_log: "/home/st/Schreibtisch/CKG/ckg/config/analytics_factory_log.config" diff --git a/ckg/config/graphdb_builder_log.config b/ckg/config/graphdb_builder_log.config new file mode 100644 index 00000000..6d4f9f3c --- /dev/null +++ b/ckg/config/graphdb_builder_log.config @@ -0,0 +1,45 @@ +{ "version": 1, + "disable_existing_loggers": false, + "formatters": { + "simple": { + "format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s" + } + }, + "handlers": { + "console": { + "class": "logging.StreamHandler", + "level": "NOTSET", + "formatter": "simple", + "stream": "ext://sys.stdout" + }, + "info_file_handler": { + "class": "logging.handlers.RotatingFileHandler", + "level": "INFO", + "formatter": "simple", + "filename": "/home/st/Schreibtisch/CKG/log/graphdb_builder.log", + "maxBytes": 10485760, + "backupCount": 20, + "encoding": "utf8" + }, + "error_file_handler": { + "class": "logging.handlers.SMTPHandler", + "level": "CRITICAL", + "formatter": "simple", + "mailhost": "localhost", + "fromaddr": "error@ckg.com", + "toaddrs": "alberto.santos@cpr.ku.dk", + "subject": "CKG ERROR - GraphDB builder" + } + }, + "loggers": { + "my_module": { + "level": "ERROR", + "handlers": ["console"], + "propagate": "no" + } + }, + "root":{ + "level": "WARNING", + "handlers": ["info_file_handler", "error_file_handler"] + } +} diff --git a/ckg/config/graphdb_connector_log.config b/ckg/config/graphdb_connector_log.config new file mode 100644 index 00000000..ed13b1e2 --- /dev/null +++ b/ckg/config/graphdb_connector_log.config @@ -0,0 +1,45 @@ +{ "version": 1, + "disable_existing_loggers": false, + "formatters": { + "simple": { + "format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s" + } + }, + "handlers": { + "console": { + "class": "logging.StreamHandler", + "level": "NOTSET", + "formatter": "simple", + "stream": "ext://sys.stdout" + }, + "info_file_handler": { + "class": "logging.handlers.RotatingFileHandler", + "level": "INFO", + "formatter": "simple", + "filename": "/home/st/Schreibtisch/CKG/log/graphdb_connector.log", + "maxBytes": 10485760, + "backupCount": 20, + "encoding": "utf8" + }, + "error_file_handler": { + "class": "logging.handlers.SMTPHandler", + "level": "CRITICAL", + "formatter": "simple", + "mailhost": "localhost", + "fromaddr": "error@ckg.com", + "toaddrs": "alberto.santos@cpr.ku.dk", + "subject": "CKG ERROR - GraphDB connector" + } + }, + "loggers": { + "my_module": { + "level": "ERROR", + "handlers": ["console"], + "propagate": "no" + } + }, + "root":{ + "level": "WARNING", + "handlers": ["info_file_handler", "error_file_handler"] + } +} diff --git a/ckg/config/report_manager_log.config b/ckg/config/report_manager_log.config new file mode 100644 index 00000000..e94b663c --- /dev/null +++ b/ckg/config/report_manager_log.config @@ -0,0 +1,45 @@ +{ "version": 1, + "disable_existing_loggers": false, + "formatters": { + "simple": { + "format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s" + } + }, + "handlers": { + "console": { + "class": "logging.StreamHandler", + "level": "NOTSET", + "formatter": "simple", + "stream": "ext://sys.stdout" + }, + "info_file_handler": { + "class": "logging.handlers.RotatingFileHandler", + "level": "INFO", + "formatter": "simple", + "filename": "/home/st/Schreibtisch/CKG/log/report_manager.log", + "maxBytes": 10485760, + "backupCount": 20, + "encoding": "utf8" + }, + "error_file_handler": { + "class": "logging.handlers.SMTPHandler", + "level": "CRITICAL", + "formatter": "simple", + "mailhost": "localhost", + "fromaddr": "error@ckg.com", + "toaddrs": "alberto.santos@cpr.ku.dk", + "subject": "CKG ERROR - Report manager" + } + }, + "loggers": { + "my_module": { + "level": "ERROR", + "handlers": ["console"], + "propagate": "no" + } + }, + "root":{ + "level": "INFO", + "handlers": ["info_file_handler", "error_file_handler"] + } +} diff --git a/ckg/report_manager/app.py b/ckg/report_manager/app.py index ccb5852f..c235ed1b 100644 --- a/ckg/report_manager/app.py +++ b/ckg/report_manager/app.py @@ -1,21 +1,48 @@ import os +import subprocess +from datetime import datetime +from uuid import uuid4 + +import dash import flask import redis -import dash +from dash import html, dcc, Output, Input + +import ckg.report_manager.user as user from ckg import ckg_utils +from ckg.report_manager import utils +from ckg.report_manager.worker import run_minimal_update_task, \ + run_full_update_task + server = flask.Flask('app') cwd = os.path.dirname(os.path.abspath(__file__)) assets_path = os.path.join(cwd, 'assets') -app = dash.Dash('app', server=server, assets_folder=assets_path, meta_tags=[{"name": "viewport", "content": "width=device-width, initial-scale=1"}]) +os.chdir(cwd) +pages_path = "./pages" # os.path.join(cwd, 'pages') +app = dash.Dash("app", server=server, assets_folder=assets_path, external_stylesheets=[assets_path + "custom.css"], + meta_tags=[{"name": "viewport", "content": "width=device-width, initial-scale=1"}], use_pages=True, + pages_folder=pages_path) +ckg_config = ckg_utils.read_ckg_config() +def main(): + print("IN MAIN") + celery_working_dir = os.path.dirname(os.path.abspath(__file__)) + os.chdir(celery_working_dir) + queues = [('creation', 1, 'INFO'), ('compute', 3, 'INFO'), ('update', 1, 'INFO')] + for queue, processes, log_level in queues: + celery_cmdline = 'celery -A ckg.report_manager.worker worker --loglevel={} --concurrency={} -E -Q {}'.format( + log_level, processes, queue).split(" ") + print("Ready to call {} ".format(celery_cmdline)) + subprocess.Popen(celery_cmdline) + print("Done calling {} ".format(celery_cmdline)) + app.run_server(debug=True, port=8051) #application.run(debug=False, host='0.0.0.0') r = redis.StrictRedis.from_url('redis://localhost:6379') with open(os.path.join(assets_path, "app_template.html"), 'r', encoding='utf8') as f: template = f.read() - app.index_string = template app.scripts.config.serve_locally = False app.config.suppress_callback_exceptions = True @@ -25,3 +52,172 @@ for js in external_js: app.scripts.append_script({"external_url": js}) +app.layout = html.Div(children=[ + + html.Div(id="user-status-header"), + html.Hr(), + + html.H1('Multi-page app with Dash Pages'), + html.Div( + [ + html.Div( + dcc.Link( + f"{page['name']} - {page['path']}", href=page["relative_path"], refresh=True + ) + ) + for page in dash.page_registry.values() + ] + ), + html.Hr(), + + dcc.Loading(children=[html.Div([dcc.Location(id='url', refresh=False), + html.Div(id='page-content', + style={'padding-top': 10}, + className='container-fluid'), + dash.page_container])], + style={'text-align': 'center', + 'top': '50%', + 'left': '50%', + 'height': '250px'}, + type='cube', color='#2b8cbe'), +]) + + +@app.callback( + Output("user-status-header", "children"), + Input("url", "pathname"), +) +def update_authentication_status(_): + session_cookie = flask.request.cookies.get('custom-auth-session') + logged_in = session_cookie is not None + if logged_in: + return dcc.Link([html.Form([html.Button('Logout', type='submit')], action='/apps/logout', method='post', + style={'position': 'absolute', 'right': '0px'}, id='logout')], + href="apps/logoutPage") + return dcc.Link(html.Form([html.Button('Login', type='submit')], + style={'position': 'absolute', 'right': '0px'}, id='login'), href="/apps/loginPage") + + +@server.route('/apps/login', methods=['POST', 'GET']) +def route_login(): + data = flask.request.form + username = data.get('username') + password = data.get('password') + if not username or not password: + flask.abort(401) + elif not user.User(username).verify_password(password): + return flask.redirect('/login_error') + else: + rep = flask.redirect('/') + rep.set_cookie('custom-auth-session', + username + '_' + datetime.now().strftime('%Y%m-%d%H-%M%S-') + str(uuid4())) + return rep + + +@app.server.route('/apps/logout', methods=['POST']) +def route_logout(): + # Redirect back to the index and remove the session cookie. + rep = flask.redirect('/') + rep.set_cookie('custom-auth-session', '', expires=0) + return rep + + +@server.route('/create_user', methods=['POST', 'GET']) +def route_create_user(): + data = flask.request.form + name = data.get('name') + surname = data.get('surname') + affiliation = data.get('affiliation') + acronym = data.get('acronym') + email = data.get('email') + alt_email = data.get('alt_email') + phone = data.get('phone') + uname = name[0] + surname + username = uname + + registered = 'error_exists' + iter = 1 + while registered == 'error_exists': + u = user.User(username=username.lower(), name=name, surname=surname, affiliation=affiliation, acronym=acronym, + phone=phone, email=email, alternative_email=alt_email) + registered = u.register() + if registered is None: + rep = flask.redirect('/apps/admin?error_new_user={}'.format('Failed Database')) + elif registered == 'error_exists': + username = uname + str(iter) + iter += 1 + elif registered == 'error_email': + rep = flask.redirect('/apps/admin?error_new_user={}'.format('Email already registered')) + elif registered == 'error_database': + rep = flask.redirect('/apps/admin?error_new_user={}'.format('User could not be saved in the database')) + else: + rep = flask.redirect('/apps/admin?new_user={}'.format(username)) + + return rep + + +@server.route('/update_minimal', methods=['POST', 'GET']) +def route_minimal_update(): + session_cookie = flask.request.cookies.get('custom-auth-session') + username = session_cookie.split('_')[0] + internal_id = datetime.now().strftime('%Y%m-%d%H-%M%S-') + result = run_minimal_update_task.apply_async(args=[username], task_id='run_minimal_' + session_cookie + internal_id, + queue='update') + + rep = flask.redirect('/dashs/admin?running=minimal') + + return rep + + +@server.route('/update_full', methods=['POST', 'GET']) +def route_full_update(): + session_cookie = flask.request.cookies.get('custom-auth-session') + data = flask.request.form + download = data.get('dwn-radio') == 'true' + username = session_cookie.split('_')[0] + internal_id = datetime.now().strftime('%Y%m-%d%H-%M%S-') + result = run_full_update_task.apply_async(args=[username, download], + task_id='run_full_' + session_cookie + internal_id, queue='update') + + rep = flask.redirect('/apps/admin/running=full') + + return rep + + +@server.route('/downloads/') +def route_report_url(value): + uri = os.path.join(ckg_config['downloads_directory'], value + '.zip') + return flask.send_file(uri, download_name=value + '.zip', as_attachment=True, max_age=-1) + + +@server.route('/example_files') +def route_example_files_url(): + uri = os.path.join(ckg_config['data_directory'], 'example_files.zip') + return flask.send_file(uri, download_name='example_files.zip', as_attachment=True, max_age=-1) + + +@server.route('/apps/templates') +def serve_static(value): + cwd = os.path.dirname(os.path.abspath(__file__)) + directory = os.path.join(cwd, 'apps/templates/') + filename = os.path.join(directory, value) + url = filename + '.zip' + if not os.path.isfile(url): + utils.compress_directory(filename, os.path.join(directory, 'files'), compression_format='zip') + + return flask.send_file(url, download_name=f"{value}.zip", as_attachment=True, max_age=-1) + + +@server.route('/tmp/') +def route_upload_url(value): + page_id, project_id = value.split('_') + directory = ckg_config['tmp_directory'] + filename = os.path.join(directory, 'Uploaded_files_' + project_id) + url = filename + '.zip' + + return flask.send_file(url, download_name=filename.split('/')[-1] + '.zip', as_attachment=True, + max_age=-1) + + +if __name__ == '__main__': + main() diff --git a/ckg/report_manager/apps/adminApp.py b/ckg/report_manager/apps/adminApp.py deleted file mode 100644 index 936618ce..00000000 --- a/ckg/report_manager/apps/adminApp.py +++ /dev/null @@ -1,57 +0,0 @@ -from ckg.report_manager.apps import basicApp -import dash_core_components as dcc -import dash_html_components as html - - -class AdminApp(basicApp.BasicApp): - """ - Defines the Administrator dashboard App - Interface to create users or update the database - """ - def __init__(self, title, subtitle, description, layout=[], logo=None, footer=None): - self.pageType = "adminPage" - basicApp.BasicApp.__init__(self, title, subtitle, description, self.pageType, layout, logo, footer) - self.buildPage() - - def buildPage(self): - """ - Builds page with the basic layout from *basicApp.py* and adds the admin dashboard. - """ - self.add_basic_layout() - create_user_form = [html.H3("Create CKG User"), html.Form([ - html.Div(children=[html.Label('Name'), - dcc.Input(placeholder='name', name='name', type='text', required=True), - html.Label('Surname'), - dcc.Input(placeholder='surname', name='surname', type='text', required=True), - html.Label('Acronym'), - dcc.Input(placeholder='acronym', name='acronym', type='text'), - html.Label('Affiliation'), - dcc.Input(placeholder='affiliation', name='affiliation', type='text', required=True)]), - html.Div(children=[html.Label('E-mail'), - dcc.Input(placeholder='email', name='email', type='email', required=True), - html.Label('alternative E-mail'), - dcc.Input(placeholder='alt email', name='alt_e-mail', type='email'), - html.Label('Phone number'), - dcc.Input(placeholder='phone', name='phone', type='tel', required=True)]), - html.Div(children=[html.Button('CreateUser', type='submit', className='button_link')], style={'width': '100%', 'padding-left': '87%', 'padding-right': '0%'})], action='/create_user', method='post')] - update_database_bts = [html.H3("Build CKG Database"), - html.Div(children=[ - html.Form([html.Button('Minimal Update', type='submit', className='button_link')], action='/update_minimal', method='post'), - html.P("This option will load into CKG's graph database the licensed Ontologies and Databases and all their missing relationships.", className='description_p')]), - html.Br(), - html.Div(children=[ - html.Form([html.Button('Full Update', type='submit', className='button_link'), - html.Div(children=[html.H4("Download:"), - html.Label("Yes", className='radioitem'), - dcc.Input(id='Yes', - name='dwn-radio', - value=True, - type='radio'), - html.Label("No", className='radioitem'), - dcc.Input(id='No', - name='dwn-radio', - value=False, - type='radio')])], action='/update_full', method='post'), - html.P("This option will regenerate the entire database, downloading data from the different Ontologies and Databases (Download=Yes) and loading them and all existing projects into CKG's graph database.", className='description_p')])] - admin_options = html.Div(children=[html.Div(children=create_user_form, className='div_framed'), html.Div(children=update_database_bts, className='div_framed')]) - self.add_to_layout(admin_options) diff --git a/ckg/report_manager/apps/apps_config.py b/ckg/report_manager/apps/apps_config.py index 2b692654..1c9f6254 100644 --- a/ckg/report_manager/apps/apps_config.py +++ b/ckg/report_manager/apps/apps_config.py @@ -4,60 +4,64 @@ footer = '' -projectPage = {"overview":[ - ("overview", [], ["basicTable"], {}), - ("number_subjects", [], ["basicTable"], {}), - ("number_analytical_samples", [], ["basicTable"], {}) - ] - } -proteomicsPage= {"overview":[ - ("number_peptides_analytical_sample", [], ["basicBarPlot"], {"x_title":"Analytical sample", "y_title":"number of peptides"}), - ("number_proteins_analytical_sample", [], ["basicBarPlot"], {"x_title":"Analytical sample", "y_title":"number of proteins"}), - ("number_modified_proteins_analytical_sample", [], ["basicBarPlot"], {"x_title":"Analytical sample", "y_title":"number of modified proteins"}) - ], - "stratification":[ - ("identified_proteins_sample_group", - ["pca", "tsne", "umap"], - ["scatterPlot"], - {"imputation":True, "imputation_method":"Mixed", "x_title":"PC1", "y_title":"PC2", "components":2, "perplexity":40, "n_iter":1000, "init":'pca'}) - ], - "regulation":[ - ("identified_proteins_sample_group_with_gene", - ["ttest"], - ["volcanoPlot", "basicTable"], - {"imputation":True, "imputation_method":"Mixed", "alpha":0.05, "drop_cols":["sample","gene_name"], "name":"name"}) - ], - #"correlation":[ - # ("identified_proteins_sample_group", - # ["correlation"], - # ["3Dnetwork", "basicTable"], - # {"source":"node1", "target":"node2"}) - # ], - "action":[ - ("drug_acts_on_proteins", - [], - ["basicTable", "3Dnetwork"], - {"replace":[("ACTION","inhibition"),("PROTEINS",'"O60341"')]}) - ] - } -wesPage= {"overview":[ - ("number_somatic_mutations_by_type_analytical_sample",["basicBarPlot", "basicTable"]), - ], - "targets":[ - ("target_analysis_variants",["basicTable", "3dNetwork"]) - ] - } - +projectPage = {"overview": [ + ("overview", [], ["basicTable"], {}), + ("number_subjects", [], ["basicTable"], {}), + ("number_analytical_samples", [], ["basicTable"], {}) +] +} +proteomicsPage = {"overview": [ + ("number_peptides_analytical_sample", [], ["basicBarPlot"], + {"x_title": "Analytical sample", "y_title": "number of peptides"}), + ("number_proteins_analytical_sample", [], ["basicBarPlot"], + {"x_title": "Analytical sample", "y_title": "number of proteins"}), + ("number_modified_proteins_analytical_sample", [], ["basicBarPlot"], + {"x_title": "Analytical sample", "y_title": "number of modified proteins"}) +], + "stratification": [ + ("identified_proteins_sample_group", + ["pca", "tsne", "umap"], + ["scatterPlot"], + {"imputation": True, "imputation_method": "Mixed", "x_title": "PC1", "y_title": "PC2", "components": 2, + "perplexity": 40, "n_iter": 1000, "init": 'pca'}) + ], + "regulation": [ + ("identified_proteins_sample_group_with_gene", + ["ttest"], + ["volcanoPlot", "basicTable"], + {"imputation": True, "imputation_method": "Mixed", "alpha": 0.05, "drop_cols": ["sample", "gene_name"], + "name": "name"}) + ], + # "correlation":[ + # ("identified_proteins_sample_group", + # ["correlation"], + # ["3Dnetwork", "basicTable"], + # {"source":"node1", "target":"node2"}) + # ], + "action": [ + ("drug_acts_on_proteins", + [], + ["basicTable", "3Dnetwork"], + {"replace": [("ACTION", "inhibition"), ("PROTEINS", '"O60341"')]}) + ] +} +wesPage = {"overview": [ + ("number_somatic_mutations_by_type_analytical_sample", ["basicBarPlot", "basicTable"]), +], + "targets": [ + ("target_analysis_variants", ["basicTable", "3dNetwork"]) + ] +} ### Project Page configuration -pages = {"projectPage":{ - "project": projectPage, - "proteomics": proteomicsPage, - #"wes": wesPage - }, - "importsPage":{"stats_file":stats_file}, - "projectCreationPage":{} - } +pages = {"projectPage": { + "project": projectPage, + "proteomics": proteomicsPage, + # "wes": wesPage +}, + "importsPage": {"stats_file": stats_file}, + "projectCreationPage": {} +} ## Overview ## Project Name ## Project description @@ -65,7 +69,3 @@ ## Person responsible ## Participants ## Number of enrolled subjects - - - - diff --git a/ckg/report_manager/apps/basicApp.py b/ckg/report_manager/apps/basicApp.py deleted file mode 100644 index 413c7ea4..00000000 --- a/ckg/report_manager/apps/basicApp.py +++ /dev/null @@ -1,113 +0,0 @@ -import dash_html_components as html - - -class BasicApp: - """ - Defines what an App is in the report_manager. - Other Apps will inherit basic functionality from this class. - Attributes: Title, subtitle, description, logo, footer. - """ - def __init__(self, title, subtitle, description, page_type, layout=[], logo=None, footer=None): - self._title = title - self._subtitle = subtitle - self._description = description - self._page_type = page_type - self._logo = logo - self._footer = footer - self._layout = layout - - @property - def title(self): - return self._title - - @title.setter - def title(self, title): - self._title = title - - @property - def subtitle(self): - return self._subtitle - - @subtitle.setter - def subtitle(self, subtitle): - self._subtitle = subtitle - - @property - def description(self): - return self._description - - @description.setter - def description(self, description): - self._description = description - - @property - def page_type(self): - return self._page_type - - @page_type.setter - def page_type(self, page_type): - self._page_type = page_type - - @property - def logo(self): - return self._logo - - @logo.setter - def logo(self, logo): - self._logo = logo - - @property - def footer(self): - return self._footer - - @footer.setter - def footer(self, footer): - self._footer = footer - - @property - def layout(self): - return self._layout - - @layout.setter - def layout(self, layout): - self._layout = layout - - def add_to_layout(self, section): - self.layout.append(section) - - def extend_layout(self, sections): - self.layout.extend(sections) - - def get_HTML_title(self): - return html.H1(children=self.title) - - def get_HTML_subtitle(self): - return html.H2(children=self.subtitle) - - def get_HTML_description(self): - return html.Div(children=self.description) - - def add_basic_layout(self): - """ - Calls class functions to setup the layout: title, subtitle, description, \ - logo and footer. - """ - self.layout.append(html.Div([html.Form([html.Button('Logout', type='submit')], action='/apps/logout', method='post', - style={'display': 'none', 'position': 'absolute', 'right': '0px'}, id='logout_form')])) - self.layout.append(html.Div(html.H2('Invalid user name or password', className='error_msg'), id='error_msg', style={'display': 'none'})) - if self.title is not None: - self.layout.append(self.get_HTML_title()) - if self.subtitle is not None: - self.layout.append(self.get_HTML_subtitle()) - if self.description is not None: - self.layout.append(self.get_HTML_description()) - if self.logo is not None: - self.layout.append(self.logo) - if self.footer is not None: - self.layout.append(self.footer) - - def build_page(self): - """ - Builds page basic layout. - """ - self.add_basic_layout() \ No newline at end of file diff --git a/ckg/report_manager/apps/dataUpload.py b/ckg/report_manager/apps/dataUpload.py index 08b942d1..66ea4986 100644 --- a/ckg/report_manager/apps/dataUpload.py +++ b/ckg/report_manager/apps/dataUpload.py @@ -1,13 +1,15 @@ import os -import sys import re -import pandas as pd +import sys + import numpy as np +import pandas as pd + from ckg import ckg_utils -from ckg.graphdb_connector import connector +from ckg.analytics_core.viz import viz from ckg.graphdb_builder import builder_utils +from ckg.graphdb_connector import connector from ckg.graphdb_connector import query_utils -from ckg.analytics_core.viz import viz ckg_config = ckg_utils.read_ckg_config() log_config = ckg_config['graphdb_builder_log'] @@ -28,7 +30,9 @@ def get_data_upload_queries(): except Exception as err: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] - logger.error("Error: {}. Reading queries from file {}: {}, file: {},line: {}".format(err, queries_path, sys.exc_info(), fname, exc_tb.tb_lineno)) + logger.error( + "Error: {}. Reading queries from file {}: {}, file: {},line: {}".format(err, queries_path, sys.exc_info(), + fname, exc_tb.tb_lineno)) return data_upload_cypher @@ -53,7 +57,9 @@ def get_new_subject_identifier(driver): subject_identifier = None exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] - logger.error("Error: {}. Getting new subject identifiers: Query name ({}) - Query ({}), error info: {}, file: {},line: {}".format(err, query_name, query, sys.exc_info(), fname, exc_tb.tb_lineno)) + logger.error( + "Error: {}. Getting new subject identifiers: Query name ({}) - Query ({}), error info: {}, file: {},line: {}".format( + err, query_name, query, sys.exc_info(), fname, exc_tb.tb_lineno)) return subject_identifier @@ -75,7 +81,9 @@ def get_new_biosample_identifier(driver): identifier = None exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] - logger.error("Error: {}. Getting new biological sample identifiers: Query name ({}) - Query ({}), error info: {}, file: {},line: {}".format(err, query_name, query, sys.exc_info(), fname, exc_tb.tb_lineno)) + logger.error( + "Error: {}. Getting new biological sample identifiers: Query name ({}) - Query ({}), error info: {}, file: {},line: {}".format( + err, query_name, query, sys.exc_info(), fname, exc_tb.tb_lineno)) return identifier @@ -97,7 +105,9 @@ def get_new_analytical_sample_identifier(driver): identifier = None exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] - logger.error("Error: {}. Getting new analytical sample identifiers: Query name ({}) - Query ({}), error info: {}, file: {},line: {}".format(err, query_name, query, sys.exc_info(), fname, exc_tb.tb_lineno)) + logger.error( + "Error: {}. Getting new analytical sample identifiers: Query name ({}) - Query ({}), error info: {}, file: {},line: {}".format( + err, query_name, query, sys.exc_info(), fname, exc_tb.tb_lineno)) return identifier @@ -119,13 +129,15 @@ def get_subjects_enrolled_in_project(driver, projectId): query = data_upload_cypher[query_name]['query'] for q in query.split(';')[0:-1]: if '$' in q: - result = connector.getCursorData(driver, q+';', parameters={'external_id': str(projectId)}) + result = connector.getCursorData(driver, q + ';', parameters={'external_id': str(projectId)}) else: - result = connector.getCursorData(driver, q+';') + result = connector.getCursorData(driver, q + ';') except Exception as err: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] - logger.error("Error: {}. Getting new subjects enrolled in project: Query name ({}) - Query ({}), error info: {}, file: {},line: {}".format(err, query_name, query, sys.exc_info(), fname, exc_tb.tb_lineno)) + logger.error( + "Error: {}. Getting new subjects enrolled in project: Query name ({}) - Query ({}), error info: {}, file: {},line: {}".format( + err, query_name, query, sys.exc_info(), fname, exc_tb.tb_lineno)) return result.values @@ -142,7 +154,9 @@ def check_samples_in_project(driver, projectId): except Exception as err: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] - logger.error("Error: {}. Checking whether samples exist in project: Query name ({}) - Query ({}), error info: {}, file: {},line: {}".format(err, query_name, query, sys.exc_info(), fname, exc_tb.tb_lineno)) + logger.error( + "Error: {}. Checking whether samples exist in project: Query name ({}) - Query ({}), error info: {}, file: {},line: {}".format( + err, query_name, query, sys.exc_info(), fname, exc_tb.tb_lineno)) return result @@ -160,7 +174,9 @@ def check_external_ids_in_db(driver, projectId): except Exception as err: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] - logger.error("Error: {}. Checking if external identifiers exist in the database: Query name ({}) - Query ({}), error info: {}, file: {},line: {}".format(err, query_name, query, sys.exc_info(), fname, exc_tb.tb_lineno)) + logger.error( + "Error: {}. Checking if external identifiers exist in the database: Query name ({}) - Query ({}), error info: {}, file: {},line: {}".format( + err, query_name, query, sys.exc_info(), fname, exc_tb.tb_lineno)) return result @@ -177,11 +193,13 @@ def remove_samples_nodes_db(driver, projectId): project_cypher = ckg_utils.get_queries(os.path.join(directory, queries_path)) query = project_cypher[query_name]['query'].replace('PROJECTID', projectId).split(';')[:-2] for q in query: - result = connector.commitQuery(driver, q+';') + result = connector.commitQuery(driver, q + ';') except Exception as err: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] - logger.error("Error: {}. Removing nodes associated to project: Query name ({}) - Query ({}), error info: {}, file: {},line: {}".format(err, query_name, query, sys.exc_info(), fname, exc_tb.tb_lineno)) + logger.error( + "Error: {}. Removing nodes associated to project: Query name ({}) - Query ({}), error info: {}, file: {},line: {}".format( + err, query_name, query, sys.exc_info(), fname, exc_tb.tb_lineno)) return result @@ -197,7 +215,7 @@ def create_new_subjects(driver, data, projectId): subject_id = get_new_subject_identifier(driver) if subject_id is None: subject_id = '1' - subject_ids = ['S'+str(i) for i in np.arange(int(subject_id), int(subject_id) + len(external_ids))] + subject_ids = ['S' + str(i) for i in np.arange(int(subject_id), int(subject_id) + len(external_ids))] subject_dict = dict(zip(external_ids, subject_ids)) query_name = 'create_project_subject' for external_id, subject_id in subject_dict.items(): @@ -207,11 +225,13 @@ def create_new_subjects(driver, data, projectId): data_upload_cypher = get_data_upload_queries() queries = data_upload_cypher[query_name]['query'].split(';')[:-1] for query in queries: - res = connector.commitQuery(driver, query+';', parameters=parameters) + res = connector.commitQuery(driver, query + ';', parameters=parameters) except Exception as err: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] - logger.error("Error: {}. Creating new subjects: Query name ({}) - Query ({}), error info: {}, file: {},line: {}".format(err, query_name, query, sys.exc_info(), fname, exc_tb.tb_lineno)) + logger.error( + "Error: {}. Creating new subjects: Query name ({}) - Query ({}), error info: {}, file: {},line: {}".format( + err, query_name, query, sys.exc_info(), fname, exc_tb.tb_lineno)) data['subject id'] = data['subject external_id'].map(subject_dict) @@ -231,23 +251,25 @@ def create_new_biosamples(driver, data): if biosample_id is None: biosample_id = '1' - biosample_ids = ['BS'+str(i) for i in np.arange(int(biosample_id), int(biosample_id) + len(external_ids))] + biosample_ids = ['BS' + str(i) for i in np.arange(int(biosample_id), int(biosample_id) + len(external_ids))] biosample_dict = dict(zip(external_ids, biosample_ids)) biosample_subject_dict = dict(zip(external_ids, subject_ids)) query_name = 'create_subject_biosamples' for external_id, biosample_id in biosample_dict.items(): subject_id = biosample_subject_dict[external_id] - parameters = {'external_id': str(external_id), 'biosample_id':biosample_id, 'subject_id': subject_id} + parameters = {'external_id': str(external_id), 'biosample_id': biosample_id, 'subject_id': subject_id} try: query = '' data_upload_cypher = get_data_upload_queries() queries = data_upload_cypher[query_name]['query'].split(';')[:-1] for query in queries: - res = connector.commitQuery(driver, query+';', parameters=parameters) + res = connector.commitQuery(driver, query + ';', parameters=parameters) except Exception as err: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] - logger.error("Error: {}. Creating biological samples: Query name ({}) - Query ({}), error info: {}, file: {},line: {}".format(err, query_name, query, sys.exc_info(), fname, exc_tb.tb_lineno)) + logger.error( + "Error: {}. Creating biological samples: Query name ({}) - Query ({}), error info: {}, file: {},line: {}".format( + err, query_name, query, sys.exc_info(), fname, exc_tb.tb_lineno)) data['biological_sample id'] = data['biological_sample external_id'].map(biosample_dict) @@ -282,13 +304,16 @@ def create_new_ansamples(driver, data): data_upload_cypher = get_data_upload_queries() queries = data_upload_cypher[query_name]['query'].split(';')[:-1] for query in queries: - res = connector.commitQuery(driver, query+';', parameters=parameters) + res = connector.commitQuery(driver, query + ';', parameters=parameters) except Exception as err: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] - logger.error("Error: {}. Creating analytical samples: Query name ({}) - Query ({}), error info: {}, file: {},line: {}".format(err, query_name, query, sys.exc_info(), fname, exc_tb.tb_lineno)) + logger.error( + "Error: {}. Creating analytical samples: Query name ({}) - Query ({}), error info: {}, file: {},line: {}".format( + err, query_name, query, sys.exc_info(), fname, exc_tb.tb_lineno)) - data = data.rename(columns={'asample_id': 'analytical_sample id', 'external_id': 'analytical_sample external_id', 'biosample_id': 'biological_sample id'}) + data = data.rename(columns={'asample_id': 'analytical_sample id', 'external_id': 'analytical_sample external_id', + 'biosample_id': 'biological_sample id'}) return data @@ -362,21 +387,26 @@ def get_project_information(driver, project_id): code = section['query'] queries.extend(code.replace("PROJECTID", project_id).split(';')[0:-1]) for query in queries: - result = connector.sendQuery(driver, query+";")[0] + result = connector.sendQuery(driver, query + ";")[0] data.append(result) except Exception as err: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] - logger.error("Error: {}. Creating analytical samples: Query name ({}) - Query ({}), error info: {}, file: {},line: {}".format(err, query_name, query, sys.exc_info(), fname, exc_tb.tb_lineno)) + logger.error( + "Error: {}. Creating analytical samples: Query name ({}) - Query ({}), error info: {}, file: {},line: {}".format( + err, query_name, query, sys.exc_info(), fname, exc_tb.tb_lineno)) if data: for i, j in enumerate(data): df = pd.DataFrame([data[i]], columns=data[i].keys()) header = '_'.join(df.columns[0].split('_', 1)[1:]).capitalize() df.rename(columns={df.columns[0]: 'project'}, inplace=True) - res.append(viz.get_table(df, identifier='new_project_{}'.format(header), args={'title':'{} data uploaded for project {}'.format(header, project_id)})) + res.append(viz.get_table(df, identifier='new_project_{}'.format(header), + args={'title': '{} data uploaded for project {}'.format(header, project_id)})) else: res = None - logger.error("Error: No data was uploaded for project: {}. Review your experimental design and data files and the logs for errors.".format(project_id)) + logger.error( + "Error: No data was uploaded for project: {}. Review your experimental design and data files and the logs for errors.".format( + project_id)) return res diff --git a/ckg/report_manager/apps/dataUploadApp.py b/ckg/report_manager/apps/dataUploadApp.py deleted file mode 100644 index d92c8893..00000000 --- a/ckg/report_manager/apps/dataUploadApp.py +++ /dev/null @@ -1,85 +0,0 @@ -import dash_core_components as dcc -import dash_html_components as html -from ckg.report_manager.apps import basicApp -from ckg import ckg_utils - - -DataTypes = ['experimental_design', 'clinical', 'proteomics', 'interactomics', 'phosphoproteomics'] - - -class DataUploadApp(basicApp.BasicApp): - """ - Defines what the dataUpload App is in the report_manager. - Used to upload experimental and clinical data to correct project folder. - - .. warning:: There is a size limit of 55MB. Files bigger than this will have to be moved manually. - """ - def __init__(self, title, subtitle, description, layout=[], logo=None, footer=None): - self.pageType = "UploadDataPage" - basicApp.BasicApp.__init__(self, title, subtitle, description, self.pageType, layout, logo, footer) - self.buildPage() - - def buildPage(self): - """ - Builds page with the basic layout from *basicApp.py* and adds relevant Dash components for project data upload. - """ - self.add_basic_layout() - layout = [html.Div([ - html.Div([html.H4('Project identifier:', style={'marginTop': 30, 'marginBottom': 20}), - dcc.Input(id='project_id', placeholder='e.g. P0000001', type='text', value='', debounce=True, maxLength=8, minLength=8, style={'width':'100%', 'height':'55px'}), - dcc.Markdown(id='existing-project')], - style={'width': '20%'}), - html.Br(), - html.Div(id='upload-form', children=[ - html.Div(children=[html.A("Download example files", - id='example_files', - href= '/example_files', - n_clicks=0, - className="button_link")], - style={'width':'100%', 'padding-left': '87%', 'padding-right': '0%'}), - html.Div(children=[html.Label('Select upload data type:', style={'marginTop': 10})], - style={'width': '49%', 'marginLeft': '0%', 'verticalAlign': 'top', 'fontSize': '18px'}), - html.Div(children=[dcc.RadioItems(id='upload-data-type-picker', options=[{'label': i, 'value': i} for i in DataTypes], value=None, - inputStyle={"margin-right": "5px"}, style={'display': 'block', 'fontSize': '16px'})]), - html.Div(children=[html.H5('Proteomics tool:'), dcc.RadioItems(id='prot-tool', options=[{'label': i, 'value': i} for i in ['MaxQuant', 'DIA-NN','Spectronaut', 'FragPipe', 'mzTab']], value='', - inputStyle={"margin-right": "5px"}, style={'display': 'block', 'fontSize': '16px'})], id='proteomics-tool', style={'padding-top': 20}), - html.Div(children=[html.H5('Select the type of file uploaded:'), dcc.Dropdown(id='prot-file', options=[{'label': i, 'value': i} for i in ['Protein groups', 'Peptides', 'Phospho STY sites']], value='', - style={'display': 'block', 'fontSize': '14px', 'width': '250px'})], id='proteomics-file', style={'padding-top': 20}), - html.Div([html.H4('Upload file (max. 100Mb)', style={'marginTop': 30, 'marginBottom': 20}), - dcc.Upload(id='upload-data', children=html.Div(['Drag and Drop or ', html.A('Select Files')]), - style={'width': '100%', - 'height': '60px', - 'lineHeight': '60px', - 'borderWidth': '1px', - 'borderStyle': 'dashed', - 'borderRadius': '5px', - 'textAlign': 'center', - 'margin': '0px'}, - multiple=False, max_size=1024 * 1024 * 1000)]), - html.Br(), - html.Div(children=[dcc.Markdown('**Uploaded Files:**', id='markdown-title'), dcc.Markdown(id='uploaded-files')]), - html.Div([html.Button("Upload Data to CKG", - id='submit_button', - n_clicks=0, - className="button_link")], - style={'width':'100%', 'padding-left': '87%', 'padding-right': '0%'})]), - html.Div(children=[ - html.A('Download Files(.zip)', - id='data_download_link', - href='', - n_clicks=0, - style={'display': 'none'}, - className="button_link")]), - html.Div(children=[ - html.A(children='', - id='link-project-report', - href='', - target='', - n_clicks=0, - style={'display': 'none'}, - className="button_link")]), - html.Div(id='data-upload-result', children=[dcc.Markdown(id='upload-result')], style={'width': '100%'}), - html.Hr()]), - html.Div(id='project_table', children=[])] - - self.extend_layout(layout) diff --git a/ckg/report_manager/apps/homepageApp.py b/ckg/report_manager/apps/homepageApp.py deleted file mode 100644 index 07b09462..00000000 --- a/ckg/report_manager/apps/homepageApp.py +++ /dev/null @@ -1,39 +0,0 @@ -from ckg.report_manager.apps import basicApp -from ckg.report_manager.apps import homepageStats as hpstats - - -class HomePageApp(basicApp.BasicApp): - """ - Defines what the HomePage App is in the report_manager. - Enables the tracking of the number of entitites, relationships, projects, and others, currently in the grapha database. - """ - def __init__(self, title, subtitle, description, layout=[], logo=None, footer=None): - self.pageType = "homePage" - basicApp.BasicApp.__init__(self, title, subtitle, description, self.pageType, layout, logo, footer) - self.buildPage() - - def buildPage(self): - """ - Builds page with the basic layout from *basicApp.py* and adds all the relevant plots from *homepageStats.py*. - """ - args = {} - args['valueCol'] = 'value' - args['textCol'] = 'size' - args['y'] = 'index' - args['x'] = 'number' - args['orientation'] = 'h' - args['title'] = '' - args['x_title'] = '' - args['y_title'] = '' - args['height'] = 900 - args['width'] = 900 - - self.add_basic_layout() - layout = hpstats.quick_numbers_panel() - dfs = hpstats.get_db_stats_data() - plots = [] - plots.append(hpstats.plot_store_size_components(dfs, title='DB Store Size', args=args)) - plots.append(hpstats.plot_node_rel_per_label(dfs, focus='nodes', title='Nodes per Label', args=args)) - plots.append(hpstats.plot_node_rel_per_label(dfs, focus='relationships', title='Relationships per Type', args=args)) - self.extend_layout(layout) - self.extend_layout(plots) diff --git a/ckg/report_manager/apps/homepageStats.py b/ckg/report_manager/apps/homepageStats.py index f605c7dd..a459705d 100644 --- a/ckg/report_manager/apps/homepageStats.py +++ b/ckg/report_manager/apps/homepageStats.py @@ -1,14 +1,18 @@ import os import sys -import pandas as pd + +import dash import numpy as np +import pandas as pd import plotly.graph_objs as go -import dash_core_components as dcc -import dash_html_components as html +from dash import dcc, Output, Input +from dash import html + from ckg import ckg_utils -from ckg.graphdb_connector import connector -from ckg.analytics_core.viz import viz from ckg.analytics_core import utils +from ckg.analytics_core.viz import viz +from ckg.graphdb_connector import connector + def size_converter(value): """ @@ -18,14 +22,14 @@ def size_converter(value): :return: String with converted value and units. """ unit = 'KB' - val = np.round(value*0.001, 2) + val = np.round(value * 0.001, 2) if len(str(val).split('.')[0]) > 3: unit = 'MB' - val = np.round(val*0.001, 2) + val = np.round(val * 0.001, 2) if len(str(val).split('.')[0]) > 3: unit = 'GB' - val = np.round(val*0.001, 2) - return str(val)+' '+unit + val = np.round(val * 0.001, 2) + return str(val) + ' ' + unit def get_query(): @@ -42,7 +46,9 @@ def get_query(): except Exception as err: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] - raise Exception("Erro: {}. Reading queries from file {}: {}, file: {},line: {}".format(err, queries_path, sys.exc_info(), fname, exc_tb.tb_lineno)) + raise Exception( + "Erro: {}. Reading queries from file {}: {}, file: {},line: {}".format(err, queries_path, sys.exc_info(), + fname, exc_tb.tb_lineno)) return data_upload_cypher @@ -53,22 +59,22 @@ def get_db_schema(): :return: network with all the database nodes and how they are related """ style = [{'selector': 'node', - 'style': {'label': 'data(name)', - 'background-color': 'data(color)', - 'text-valign': 'center', - 'text-halign': 'center', - 'border-color': 'gray', - 'border-width': '1px', - 'width': 55, - 'height': 55, - 'opacity': 0.8, - 'font-size': '14'}}, - {'selector': 'edge', - 'style': {'label': 'data(label)', - 'curve-style': 'bezier', - 'opacity': 0.7, - 'width': 0.4, - 'font-size': '5'}}] + 'style': {'label': 'data(name)', + 'background-color': 'data(color)', + 'text-valign': 'center', + 'text-halign': 'center', + 'border-color': 'gray', + 'border-width': '1px', + 'width': 55, + 'height': 55, + 'opacity': 0.8, + 'font-size': '14'}}, + {'selector': 'edge', + 'style': {'label': 'data(label)', + 'curve-style': 'bezier', + 'opacity': 0.7, + 'width': 0.4, + 'font-size': '5'}}] layout = {'name': 'cose', 'idealEdgeLength': 100, 'nodeOverlap': 20, @@ -87,7 +93,7 @@ def get_db_schema(): query_name = 'db_schema' cypher = get_query() driver = connector.getGraphDatabaseConnectionConfiguration() - + if driver is not None: if query_name in cypher: if 'query' in cypher[query_name]: @@ -104,7 +110,9 @@ def get_db_schema(): except Exception as err: plot = html.Div(children=html.H1("Error accessing the database statistics", className='error_msg')) else: - plot = html.Div(children=html.H1("Error: Cypher query {} for accessing the database statistics does not exist".format(query_name), className='error_msg')) + plot = html.Div(children=html.H1( + "Error: Cypher query {} for accessing the database statistics does not exist".format(query_name), + className='error_msg')) else: plot = html.Div(children=html.H1("Database is offline", className='error_msg')) @@ -125,7 +133,7 @@ def get_db_stats_data(): dfs = {} cypher = get_query() driver = connector.getGraphDatabaseConnectionConfiguration() - + if driver is not None: for i, j in zip(df_names, query_names): query = cypher[j]['query'] @@ -159,7 +167,7 @@ def plot_store_size_components(dfs, title, args): if 'store_size' in dfs: data = pd.read_json(dfs['store_size'], orient='records') data.index = ['Array store', 'Logical Log', 'Node store', 'Property store', - 'Relationship store', 'String store', 'Total store size'] + 'Relationship store', 'String store', 'Total store size'] data.columns = ['value', 'size'] data = data.iloc[:-1] fig = viz.get_pieplot(data, identifier='store_size_pie', args=args) @@ -184,22 +192,22 @@ def plot_node_rel_per_label(dfs, title, args, focus='nodes'): data = pd.read_json(dfs['meta_stats'], orient='records') if focus == 'nodes': data = pd.DataFrame.from_dict(data['labels'][0], orient='index', columns=[ - 'number']).reset_index() + 'number']).reset_index() elif focus == 'relationships': data = pd.DataFrame.from_dict( data['relTypesCount'][0], orient='index', columns=['number']).reset_index() data = data.sort_values('number') - + if not data.empty: fig = viz.get_barplot(data, identifier='node_rel_per_label_{}'.format(focus), args=args) fig.figure['layout'] = go.Layout(barmode='relative', - height=args['height'], - xaxis={'type': 'log', 'range': [0, np.log10(data['number'].iloc[-1])]}, - yaxis={'showline': True, 'linewidth': 1, 'linecolor': 'black'}, - font={'family': 'MyriadPro-Regular', 'size': 12}, - template='plotly_white', - bargap=0.2) + height=args['height'], + xaxis={'type': 'log', 'range': [0, np.log10(data['number'].iloc[-1])]}, + yaxis={'showline': True, 'linewidth': 1, 'linecolor': 'black'}, + font={'family': 'MyriadPro-Regular', 'size': 12}, + template='plotly_white', + bargap=0.2) return html.Div([html.H3(title), fig], style={'margin': '0%', 'padding': '0%'}) @@ -214,18 +222,18 @@ def indicator(color, text, id_value): :return: Dash div containing title and an html.P element. """ return html.Div([html.H4(id=id_value), - html.P(text)], style={'border-radius': '5px', - 'background-color': '#f9f9f9', - 'margin': '0.3%', - 'padding': '1%', - 'position': 'relative', - 'box-shadow': '2px 2px 2px lightgrey', - 'width': '19%', - # 'height': '15%', - # 'width':'230px', - 'height': '140px', - 'display': 'inline-block', - 'vertical-align': 'middle'}) + html.P(text)], style={'borderRadius': '5px', + 'backgroundColor': '#f9f9f9', + 'margin': '0.3%', + 'padding': '1%', + 'position': 'relative', + 'boxShadow': '2px 2px 2px lightgrey', + 'width': '19%', + # 'height': '15%', + # 'width':'230px', + 'height': '140px', + 'display': 'inline-block', + 'verticalAlign': 'middle'}) def quick_numbers_panel(): @@ -238,18 +246,20 @@ def quick_numbers_panel(): project_links = [html.H4('No available Projects')] try: driver = connector.getGraphDatabaseConnectionConfiguration() - + if driver is not None: projects = connector.find_nodes(driver, node_type='Project', parameters={}) for project in projects: project_ids.append((project['n']['name'], project['n']['id'])) project_links = [html.H4('Available Projects:')] + + print("Connected successfully") except Exception: - pass + print("Error connecting to Database") # pass for project_name, project_id in project_ids: project_links.append(html.A(project_name.title(), - id='link-internal', + id=f'link-internal-{project_id}', href='/apps/project?project_id={}&force=0'.format(project_id), target='', n_clicks=0, @@ -257,7 +267,7 @@ def quick_numbers_panel(): project_dropdown = [html.H6('Project finder:'), dcc.Dropdown(id='project_option', - options=[{'label': name, 'value': (name, value)} for name, value in project_ids], + options=[{'label': name, 'value': value} for name, value in project_ids], value='', multi=False, clearable=True, @@ -306,3 +316,16 @@ def quick_numbers_panel(): ] return layout + + +@dash.callback(Output("project_url", "children"), + [Input("project_option", "value")]) +def update_project_url(value): + if value is not None and len(value) > 1: + return html.A(value[0].title(), + href='/apps/project?project_id={}&force=0'.format(value[1]), + target='', + n_clicks=0, + className="button_link") + else: + return '' diff --git a/ckg/report_manager/apps/imports.py b/ckg/report_manager/apps/imports.py index 4aabe31e..0d3ea6d3 100644 --- a/ckg/report_manager/apps/imports.py +++ b/ckg/report_manager/apps/imports.py @@ -1,13 +1,15 @@ -from ckg.analytics_core.viz import viz -import dash_core_components as dcc +from collections import defaultdict +from itertools import chain + +import numpy as np +import pandas as pd import plotly.graph_objs as go import plotly.subplots as tools -import pandas as pd -import numpy as np -from itertools import chain -from collections import defaultdict +from dash import dcc from natsort import natsorted +from ckg.analytics_core.viz import viz + def get_stats_data(filename, n=3): """ @@ -27,11 +29,11 @@ def get_stats_data(filename, n=3): df['Import_flag'] = k else: aux = store[key] - aux['Import_flag']= k + aux['Import_flag'] = k df = df.append(aux) - + if not df.empty: - df['datetime'] = pd.to_datetime(df['date']+' '+df['time']) + df['datetime'] = pd.to_datetime(df['date'] + ' ' + df['time']) imp = select_last_n_imports(df, n=n) df = df[df['import_id'].isin(imp)].reset_index(drop=True) @@ -48,15 +50,16 @@ def select_last_n_imports(stats_file, n=3): """ partial = [] full = [] - if 'datetime' in stats_file and 'import_id' in stats_file and 'Import_flag' in stats_file: - df = stats_file[['datetime', 'import_id', 'Import_flag']].sort_values('datetime', ascending=False).drop_duplicates(['import_id'], keep = 'first', inplace = False) + if 'datetime' in stats_file and 'import_id' in stats_file and 'Import_flag' in stats_file: + df = stats_file[['datetime', 'import_id', 'Import_flag']].sort_values('datetime', + ascending=False).drop_duplicates( + ['import_id'], keep='first', inplace=False) full = df[df['Import_flag'] == 'full'] full = full.iloc[:n, 1].tolist() partial = df[df['Import_flag'] == 'partial'] partial = partial.iloc[:n, 1].tolist() - - return partial + full + return partial + full def remove_legend_duplicates(figure): @@ -67,7 +70,7 @@ def remove_legend_duplicates(figure): """ seen = [] if 'data' in figure: - for n,i in enumerate(figure['data']): + for n, i in enumerate(figure['data']): name = figure['data'][n]['name'] if name in seen: figure.data[n].update(showlegend=False) @@ -104,10 +107,10 @@ def get_databases_entities_relationships(stats_file, key='full', options='databa stats = stats_file if not stats.empty: if 'Import_type' in stats: - mask = (stats['Import_type']=='entity') - mask2 = (stats['Import_type']=='relationships') - ent = list(set(list(zip(stats.loc[mask,'filename'], stats.loc[mask,'dataset'])))) - rel = list(set(list(zip(stats.loc[mask2,'filename'], stats.loc[mask2,'dataset'])))) + mask = (stats['Import_type'] == 'entity') + mask2 = (stats['Import_type'] == 'relationships') + ent = list(set(list(zip(stats.loc[mask, 'filename'], stats.loc[mask, 'dataset'])))) + rel = list(set(list(zip(stats.loc[mask2, 'filename'], stats.loc[mask2, 'dataset'])))) if 'import_id' in stats and 'datetime in stats': dat = [] @@ -200,29 +203,29 @@ def get_dropdown_menu(fig, options_dict, add_button=True, equal_traces=True, num visible[start:end] = [True] * number_traces start += number_traces else: - number_traces = len([element for tupl in options_dict[i] for element in tupl])*2 + number_traces = len([element for tupl in options_dict[i] for element in tupl]) * 2 visible = [False] * len(fig['data']) end = start + number_traces visible[start:end] = [True] * number_traces start += number_traces temp_dict = dict(label=str(i), - method='update', - args=[{'visible': visible}, - {'title': 'Date: '+i}]) + method='update', + args=[{'visible': visible}, + {'title': 'Date: ' + i}]) list_updatemenus.append(temp_dict) if add_button: button = [dict(label='All', - method='update', - args=[{'visible': [True] * len(fig['data'])}, {'title': 'All'}])] + method='update', + args=[{'visible': [True] * len(fig['data'])}, {'title': 'All'}])] list_updatemenus = list_updatemenus + button else: pass - updatemenus = list([dict(active=len(list_updatemenus)-1, - buttons=list_updatemenus, - direction='down', - showactive=True, x=-0.17, xanchor='left', y=1.1, yanchor='top'), ]) + updatemenus = list([dict(active=len(list_updatemenus) - 1, + buttons=list_updatemenus, + direction='down', + showactive=True, x=-0.17, xanchor='left', y=1.1, yanchor='top'), ]) return updatemenus @@ -317,11 +320,12 @@ def plot_total_number_imported(stats_file, plot_title): if len(traces) > 0: if type(traces[0]) == list: traces = list(chain.from_iterable(traces)) - + layout = go.Layout(title='', xaxis=dict(title=''), yaxis={'title': 'Number of imports'}, legend={'font': {'size': 11}}, margin=go.layout.Margin(l=80, r=40, t=100, b=50), annotations=[dict(text='{}'.format(plot_title), font=dict(family='Arial', size=18), - showarrow=False, xref='paper', x=-0.06, xanchor='left', yref='paper', y=1.15, yanchor='top')]) + showarrow=False, xref='paper', x=-0.06, xanchor='left', yref='paper', y=1.15, + yanchor='top')]) fig = go.Figure(data=traces, layout=layout) fig['layout']['template'] = 'plotly_white' @@ -340,8 +344,8 @@ def plot_total_numbers_per_date(stats_file, plot_title): df_full = get_totals_per_date(stats_file, key='full', import_types=True) df_partial = get_totals_per_date(stats_file, key='partial', import_types=True) - traces_f = viz.getPlotTraces(df_full, key='full', type='scaled markers', div_factor=float(10^1000)) - traces_p = viz.getPlotTraces(df_partial, key='partial', type='scaled markers', div_factor=float(10^1000)) + traces_f = viz.getPlotTraces(df_full, key='full', type='scaled markers', div_factor=float(10 ^ 1000)) + traces_p = viz.getPlotTraces(df_partial, key='partial', type='scaled markers', div_factor=float(10 ^ 1000)) traces = traces_f + traces_p if type(traces[0]) == list: @@ -350,13 +354,14 @@ def plot_total_numbers_per_date(stats_file, plot_title): pass layout = go.Layout(title='', - xaxis={'showgrid': True}, - yaxis={'title': 'Imported entities/relationships'}, - legend={'font': {'size':11}}, - height=550, - margin=go.layout.Margin(l=80, r=40, t=100, b=100), - annotations=[dict(text='{}'.format(plot_title), font=dict(family='Arial', size=18), - showarrow=False, xref='paper', x=-0.06, xanchor='left', yref='paper', y=1.15, yanchor='top')]) + xaxis={'showgrid': True}, + yaxis={'title': 'Imported entities/relationships'}, + legend={'font': {'size': 11}}, + height=550, + margin=go.layout.Margin(l=80, r=40, t=100, b=100), + annotations=[dict(text='{}'.format(plot_title), font=dict(family='Arial', size=18), + showarrow=False, xref='paper', x=-0.06, xanchor='left', yref='paper', y=1.15, + yanchor='top')]) fig = go.Figure(data=traces, layout=layout) fig['layout']['template'] = 'plotly_white' @@ -381,7 +386,7 @@ def plot_databases_numbers_per_date(stats_file, plot_title, key='full', dropdown elif key == 'partial': stats = stats_file[stats_file['Import_flag'] == 'partial'] else: - + ('Syntax error') dropdown_options = get_databases_entities_relationships(stats_file, key=key, options=dropdown_options) @@ -396,19 +401,20 @@ def plot_databases_numbers_per_date(stats_file, plot_title, key='full', dropdown if type(traces[0]) == list: traces = list(chain.from_iterable(traces)) - layout = go.Layout(title='', xaxis = {'showgrid':True, 'type':'log','title':'Imported entities/relationships'}, - legend={'font':{'size':11}}, height=600, margin=go.layout.Margin(l=40,r=40,t=80,b=100), - annotations=[dict(text='{}'.format(plot_title), font = dict(family='Arial', size = 18), - showarrow=False, xref = 'paper', x=-0.17, xanchor='left', yref = 'paper', y=1.2, yanchor='top')]) + layout = go.Layout(title='', xaxis={'showgrid': True, 'type': 'log', 'title': 'Imported entities/relationships'}, + legend={'font': {'size': 11}}, height=600, margin=go.layout.Margin(l=40, r=40, t=80, b=100), + annotations=[dict(text='{}'.format(plot_title), font=dict(family='Arial', size=18), + showarrow=False, xref='paper', x=-0.17, xanchor='left', yref='paper', y=1.2, + yanchor='top')]) fig = go.Figure(data=traces, layout=layout) fig['layout']['template'] = 'plotly_white' if dropdown: updatemenus = get_dropdown_menu(fig, dropdown_options, add_button=True, equal_traces=True, number_traces=2) - fig.layout.update(go.Layout(updatemenus = updatemenus)) + fig.layout.update(go.Layout(updatemenus=updatemenus)) - names = set([fig['data'][n]['name'] for n,i in enumerate(fig['data'])]) + names = set([fig['data'][n]['name'] for n, i in enumerate(fig['data'])]) colors = dict(zip(names, ['red', 'blue', 'green', 'yellow', 'orange'])) for name in names: @@ -416,10 +422,12 @@ def plot_databases_numbers_per_date(stats_file, plot_title, key='full', dropdown # remove_legend_duplicates(fig) #Removes legend from individual plots. - return dcc.Graph(id = 'databases imports {}'.format(key), figure = fig) + return dcc.Graph(id='databases imports {}'.format(key), figure=fig) -def plot_import_numbers_per_database(stats_file, plot_title, key='full', subplot_titles = ('',''), colors=True, plots_1='entities', plots_2='relationships', dropdown=True, dropdown_options='databases'): +def plot_import_numbers_per_database(stats_file, plot_title, key='full', subplot_titles=('', ''), colors=True, + plots_1='entities', plots_2='relationships', dropdown=True, + dropdown_options='databases'): """ Creates plotly multiplot figure with breakdown of imported numbers and size of the respective files, per database and \ import type (entities or relationships). @@ -451,61 +459,69 @@ def plot_import_numbers_per_database(stats_file, plot_title, key='full', subplot ent_colors = set_colors(ent) rel_colors = set_colors(rel) - fig = tools.make_subplots(2, 2, subplot_titles = subplot_titles, vertical_spacing = 0.18, horizontal_spacing = 0.2) + fig = tools.make_subplots(2, 2, subplot_titles=subplot_titles, vertical_spacing=0.18, horizontal_spacing=0.2) for i, j in stats.groupby(['dataset', 'filename']): date = pd.Series(str(j['datetime'].sort_values().reset_index(drop=True)[0])) - j = j.sort_values(['import_id', 'datetime']).drop_duplicates(['dataset', 'import_id', 'filename'], keep='first', inplace=False) + j = j.sort_values(['import_id', 'datetime']).drop_duplicates(['dataset', 'import_id', 'filename'], keep='first', + inplace=False) entities_df = j[j['Import_type'] == 'entity'] relationships_df = j[j['Import_type'] == 'relationships'] if not entities_df['Imported_number'].empty: fig.append_trace(go.Scattergl(visible=True, - x=entities_df['datetime'], - y=entities_df['Imported_number'], - mode='markers+lines', - marker = dict(color = ent_colors[i[1]]), - name=i[1].split('.')[0]),1,1) + x=entities_df['datetime'], + y=entities_df['Imported_number'], + mode='markers+lines', + marker=dict(color=ent_colors[i[1]]), + name=i[1].split('.')[0]), 1, 1) fig.append_trace(go.Scattergl(visible=True, - x=entities_df['datetime'], - y=entities_df['file_size'], - mode='markers+lines', - marker = dict(color = ent_colors[i[1]]), - name=i[1].split('.')[0], - showlegend=False),2,1) + x=entities_df['datetime'], + y=entities_df['file_size'], + mode='markers+lines', + marker=dict(color=ent_colors[i[1]]), + name=i[1].split('.')[0], + showlegend=False), 2, 1) if not relationships_df['Imported_number'].empty: fig.append_trace(go.Scattergl(visible=True, - x=relationships_df['datetime'], - y=relationships_df['Imported_number'], - mode='markers+lines', - marker = dict(color = rel_colors[i[1]]), - name=i[1].split('.')[0]),1,2) + x=relationships_df['datetime'], + y=relationships_df['Imported_number'], + mode='markers+lines', + marker=dict(color=rel_colors[i[1]]), + name=i[1].split('.')[0]), 1, 2) fig.append_trace(go.Scattergl(visible=True, - x=relationships_df['datetime'], - y=relationships_df['file_size'], - mode='markers+lines', - marker = dict(color = rel_colors[i[1]]), - name=i[1].split('.')[0], - showlegend=False),2,2) + x=relationships_df['datetime'], + y=relationships_df['file_size'], + mode='markers+lines', + marker=dict(color=rel_colors[i[1]]), + name=i[1].split('.')[0], + showlegend=False), 2, 2) - fig.layout.update(go.Layout(legend={'orientation':'v', 'font':{'size':11}}, - height=700, margin=go.layout.Margin(l=20,r=20,t=150,b=60))) + fig.layout.update(go.Layout(legend={'orientation': 'v', 'font': {'size': 11}}, + height=700, margin=go.layout.Margin(l=20, r=20, t=150, b=60))) annotations = [] - annotations.append(dict(text='{}'.format(plot_title), font = dict(family='Arial', size = 18), - showarrow=False, xref = 'paper', x=-0.07, xanchor='left', yref = 'paper', y=1.3, yanchor='top')) - annotations.append({'font':{'size': 14},'showarrow':False,'text':subplot_titles[0],'x':0.23,'xanchor':'center','xref':'paper','y':1.0,'yanchor':'bottom','yref':'paper'}) - annotations.append({'font':{'size': 14},'showarrow':False,'text':subplot_titles[1],'x':0.78,'xanchor':'center','xref':'paper','y':1.0,'yanchor':'bottom','yref':'paper'}) - annotations.append({'font':{'size': 14},'showarrow':False,'text':subplot_titles[2],'x':0.23,'xanchor':'center','xref':'paper','y':0.44,'yanchor':'bottom','yref':'paper'}) - annotations.append({'font':{'size': 14},'showarrow':False,'text':subplot_titles[3],'x':0.78,'xanchor':'center','xref':'paper','y':0.44,'yanchor':'bottom','yref':'paper'}) + annotations.append(dict(text='{}'.format(plot_title), font=dict(family='Arial', size=18), + showarrow=False, xref='paper', x=-0.07, xanchor='left', yref='paper', y=1.3, yanchor='top')) + annotations.append( + {'font': {'size': 14}, 'showarrow': False, 'text': subplot_titles[0], 'x': 0.23, 'xanchor': 'center', + 'xref': 'paper', 'y': 1.0, 'yanchor': 'bottom', 'yref': 'paper'}) + annotations.append( + {'font': {'size': 14}, 'showarrow': False, 'text': subplot_titles[1], 'x': 0.78, 'xanchor': 'center', + 'xref': 'paper', 'y': 1.0, 'yanchor': 'bottom', 'yref': 'paper'}) + annotations.append( + {'font': {'size': 14}, 'showarrow': False, 'text': subplot_titles[2], 'x': 0.23, 'xanchor': 'center', + 'xref': 'paper', 'y': 0.44, 'yanchor': 'bottom', 'yref': 'paper'}) + annotations.append( + {'font': {'size': 14}, 'showarrow': False, 'text': subplot_titles[3], 'x': 0.78, 'xanchor': 'center', + 'xref': 'paper', 'y': 0.44, 'yanchor': 'bottom', 'yref': 'paper'}) fig.layout['annotations'] = annotations fig['layout']['template'] = 'plotly_white' if dropdown: updatemenus = get_dropdown_menu(fig, dropdown_options, add_button=True, equal_traces=False) - fig.layout.update(go.Layout(updatemenus = updatemenus)) - + fig.layout.update(go.Layout(updatemenus=updatemenus)) - return dcc.Graph(id = 'imports-breakdown per database {}'.format(key), figure = fig) + return dcc.Graph(id='imports-breakdown per database {}'.format(key), figure=fig) diff --git a/ckg/report_manager/apps/importsApp.py b/ckg/report_manager/apps/importsApp.py deleted file mode 100644 index ce6b0b9c..00000000 --- a/ckg/report_manager/apps/importsApp.py +++ /dev/null @@ -1,37 +0,0 @@ -import os -from ckg import ckg_utils -from ckg.report_manager.apps import basicApp -from ckg.report_manager.apps import imports -from ckg.analytics_core.viz import viz - - -class ImportsApp(basicApp.BasicApp): - """ - Defines what the imports App is in the report_manager. - Enables the tracking of the number of imported entitites and relationships, updates and file sizes. - """ - def __init__(self, title, subtitle, description, layout = [], logo = None, footer = None): - self.pageType = "importsPage" - basicApp.BasicApp.__init__(self, title, subtitle, description, self.pageType, layout, logo, footer) - self.buildPage() - - def buildPage(self): - """ - Builds page with the basic layout from *basicApp.py* and adds all the relevant plots from *imports.py*. - """ - plots = [] - self.add_basic_layout() - stats_dir = ckg_utils.read_ckg_config(key='stats_directory') - stats_file = os.path.join(stats_dir, "stats.hdf") - if os.path.exists(stats_file): - stats_df = imports.get_stats_data(stats_file, n=3) - plots.append(imports.plot_total_number_imported(stats_df, 'Number of imported entities and relationships')) - plots.append(imports.plot_total_numbers_per_date(stats_df, 'Imported entities vs relationships')) - plots.append(imports.plot_databases_numbers_per_date(stats_df, 'Full imports: entities/relationships per database', key='full', dropdown=True, dropdown_options='dates')) - plots.append(imports.plot_databases_numbers_per_date(stats_df, 'Partial imports: entities/relationships per database', key='partial', dropdown=True, dropdown_options='dates')) - plots.append(imports.plot_import_numbers_per_database(stats_df, 'Full imports: Breakdown entities/relationships', key='full', subplot_titles = ('Entities imported', 'Relationships imported', 'File size', 'File size'), colors=True, plots_1='entities', plots_2='relationships', dropdown=True, dropdown_options='databases')) - plots.append(imports.plot_import_numbers_per_database(stats_df, 'Partial imports: Breakdown entities/relationships', key='partial', subplot_titles = ('Entities imported', 'Relationships imported', 'File size', 'File size'), colors=True, plots_1='entities', plots_2='relationships', dropdown=True, dropdown_options='databases')) - else: - plots.append(viz.get_markdown(text="# There are no statistics about recent imports.")) - - self.extend_layout(plots) diff --git a/ckg/report_manager/apps/initialApp.py b/ckg/report_manager/apps/initialApp.py deleted file mode 100644 index 6666f8bd..00000000 --- a/ckg/report_manager/apps/initialApp.py +++ /dev/null @@ -1,3 +0,0 @@ -import dash_html_components as html - -layout = [html.H1('Welcome to the app'), html.H3('You are successfully authorized')] diff --git a/ckg/report_manager/apps/loginApp.py b/ckg/report_manager/apps/loginApp.py deleted file mode 100644 index d0434c04..00000000 --- a/ckg/report_manager/apps/loginApp.py +++ /dev/null @@ -1,25 +0,0 @@ -from ckg.report_manager.apps import basicApp -import dash_core_components as dcc -import dash_html_components as html - - -class LoginApp(basicApp.BasicApp): - """ - Defines the login App - Enables user to access the reports - """ - def __init__(self, title, subtitle, description, layout=[], logo=None, footer=None): - self.pageType = "loginPage" - basicApp.BasicApp.__init__(self, title, subtitle, description, self.pageType, layout, logo, footer) - self.buildPage() - - def buildPage(self): - """ - Builds page with the basic layout from *basicApp.py* and adds the login form. - """ - self.add_basic_layout() - login_form = html.Div([html.Form([ - dcc.Input(placeholder='username', name='username', type='text'), - dcc.Input(placeholder='password', name='password', type='password'), - html.Button('Login', type='submit')], action='/apps/login', method='post')]) - self.add_to_layout(login_form) diff --git a/ckg/report_manager/apps/projectApp.py b/ckg/report_manager/apps/projectApp.py deleted file mode 100644 index 8575ac47..00000000 --- a/ckg/report_manager/apps/projectApp.py +++ /dev/null @@ -1,185 +0,0 @@ -import os -from datetime import datetime -from uuid import uuid4 -from ckg import ckg_utils -from ckg.report_manager.apps import basicApp -from ckg.report_manager import project -import dash_html_components as html -import dash_core_components as dcc -from ckg.report_manager.worker import generate_project_report - - -class ProjectApp(basicApp.BasicApp): - """ - Defines what a project App is in the report_manager. - Includes multiple tabs for different data types. - """ - def __init__(self, id, projectId, title, subtitle, description, layout=[], logo=None, footer=None, force=False): - self._id = id - print("Project id", self._id) - self._project_id = projectId - self._page_type = "projectPage" - self._session_id = projectId + datetime.now().strftime('%Y%m-%d%H-%M%S-') + str(uuid4()) - self._force = force - self._configuration_files = {} - basicApp.BasicApp.__init__(self, title, subtitle, description, self.page_type, layout, logo, footer) - self.build_page() - - @property - def id(self): - """ - Retrieves page identifier. - """ - return self._id - - @id.setter - def id(self, id): - """ - Sets 'id' input value as id property of the class. - - :param str id: page identifier. - """ - self._id = id - - @property - def session_id(self): - """ - Retrieves session identifier. - """ - return self._session_id - - @session_id.setter - def session_id(self, session_id): - """ - Sets 'session_id' input value as id property of the class. - - :param str session_id: session identifier. - """ - self._session_id = session_id - - @property - def project_id(self): - """ - Retrieves project identifier. - """ - return self._project_id - - @project_id.setter - def project_id(self, project_id): - """ - Sets 'project_id' input value as project_id property of the class. - - :param str project_id: project identifier. - """ - self._project_id = project_id - - @property - def configuration_files(self): - """ - Retrieves project configuration files. - """ - return self._configuration_files - - @configuration_files.setter - def configuration_files(self, configuration_files): - """ - Sets 'configuration_files' input value as configuration_files property of the class. - - :param dict configuration_files: configuration files. - """ - self._configuration_files = configuration_files - - @property - def force(self): - """ - Retrieves attribute force (whether or not the project report needs to be regenerated). - """ - return self._force - - @force.setter - def force(self, force): - """ - Sets 'force' value as force property of the class. - - :param boolean force: force. - """ - self._force = force - - def build_header(self): - buttons = html.Div([html.Div([html.A('Download Project Report', - id='download-zip', - href="", - target="_blank", - n_clicks=0, - className="button_link" - )]), - html.Div([html.A("Regenerate Project Report", - id='regenerate', - title=self.id, - href='', - target='', - n_clicks=0, - className="button_link")]), - html.Div([html.H3("Change Analysis' Configuration: "), - dcc.Dropdown( - id='my-dropdown', - options=[ - {'label': '', 'value': self.id+'/defaults'}, - {'label': 'Proteomics configuration', 'value': self.id+'/proteomics'}, - {'label': 'Interactomics configuration', 'value': self.id+'/interactomics'}, - {'label': 'Phosphoproteomics configuration', 'value': self.id+'/phosphoproteomics'}, - {'label': 'Clinical data configuration', 'value': self.id+'/clinical'}, - {'label': 'Multiomics configuration', 'value': self.id+'/multiomics'}, - {'label': 'Reset to defaults', 'value': self.id + '/reset'}], - value=self.id+'/defaults', - clearable=False, - style={'width': '50%', 'margin-bottom': '10px'}), - dcc.Upload(id='upload-config', - children=html.Div(['Drag and Drop or ', - html.A('Select Files')]), - max_size=-1, - multiple=False), - html.Div(id='output-data-upload')]) - ]) - - return buttons - - def build_page(self): - """ - Builds project and generates the report. - For each data type in the report (e.g. 'proteomics', 'clinical'), \ - creates a designated tab. - A button to download the entire project and report is added. - """ - config_files = {} - tmp_dir = ckg_utils.read_ckg_config(key='tmp_directory') - if os.path.exists(tmp_dir): - directory = os.path.join(tmp_dir, self.id) - if os.path.exists(directory): - config_files = {f.split('.')[0]: os.path.join(directory, f) for f in os.listdir(directory) if os.path.isfile(os.path.join(directory, f))} - - result = generate_project_report.apply_async(args=[self.project_id, config_files, self.force], task_id='generate_report'+self.session_id, queue='compute') - result_output = result.get() - print(result_output) - - p = project.Project(self.project_id, datasets={}, knowledge=None, report={}, configuration_files=config_files) - p.build_project(False) - - if p.name is not None: - self.title = "Project: {}".format(p.name) - else: - self.title = '' - self.add_basic_layout() - plots = p.show_report("app") - p = None - tabs = [] - buttons = self.build_header() - - self.add_to_layout(buttons) - for data_type in plots: - if len(plots[data_type]) >= 1: - tab_content = [html.Div(plots[data_type])] - tab = dcc.Tab(tab_content, label=data_type) - tabs.append(tab) - lc = dcc.Tabs(tabs) - self.add_to_layout(lc) diff --git a/ckg/report_manager/apps/projectCreation.py b/ckg/report_manager/apps/projectCreation.py index 46a7574a..3f863361 100644 --- a/ckg/report_manager/apps/projectCreation.py +++ b/ckg/report_manager/apps/projectCreation.py @@ -1,10 +1,11 @@ import os import sys + from ckg import ckg_utils -from ckg.graphdb_connector import connector from ckg.graphdb_builder import builder_utils from ckg.graphdb_builder.builder import loader from ckg.graphdb_builder.experiments import experiments_controller as eh +from ckg.graphdb_connector import connector try: ckg_config = ckg_utils.read_ckg_config() @@ -28,7 +29,9 @@ def get_project_creation_queries(): except Exception as err: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] - logger.error("Reading queries from file {}: {}, file: {},line: {}, err: {}".format(queries_path, sys.exc_info(), fname, exc_tb.tb_lineno, err)) + logger.error( + "Reading queries from file {}: {}, file: {},line: {}, err: {}".format(queries_path, sys.exc_info(), fname, + exc_tb.tb_lineno, err)) return project_creation_cypher @@ -50,13 +53,14 @@ def check_if_node_exists(driver, node, node_property, value): query = cypher[query_name]['query'].replace('NODE', node).replace('PROPERTY', node_property) for q in query.split(';')[0:-1]: if '$' in q: - result = connector.getCursorData(driver, q+';', parameters={'value': value}) + result = connector.getCursorData(driver, q + ';', parameters={'value': value}) else: - result = connector.getCursorData(driver, q+';') + result = connector.getCursorData(driver, q + ';') except Exception as err: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] - logger.error("Reading query {}: {}, file: {},line: {}, error: {}".format(query_name, sys.exc_info(), fname, exc_tb.tb_lineno, err)) + logger.error("Reading query {}: {}, file: {},line: {}, error: {}".format(query_name, sys.exc_info(), fname, + exc_tb.tb_lineno, err)) return result @@ -81,12 +85,13 @@ def get_new_project_identifier(driver, projectId): else: length = len(last_project.split('P')[-1]) new_length = len(str(new_id)) - external_identifier = 'P'+'0'*(length-new_length)+str(new_id) + external_identifier = 'P' + '0' * (length - new_length) + str(new_id) except Exception as err: external_identifier = None exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] - logger.error("Reading query {}: {}, file: {},line: {}, err: {}".format(query_name, sys.exc_info(), fname, exc_tb.tb_lineno, err)) + logger.error("Reading query {}: {}, file: {},line: {}, err: {}".format(query_name, sys.exc_info(), fname, + exc_tb.tb_lineno, err)) return external_identifier @@ -108,7 +113,8 @@ def get_subject_number_in_project(driver, projectId): except Exception as err: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] - logger.error("Error: {}. Reading query {}: {}, file: {},line: {}".format(err, query_name, sys.exc_info(), fname, exc_tb.tb_lineno)) + logger.error("Error: {}. Reading query {}: {}, file: {},line: {}".format(err, query_name, sys.exc_info(), fname, + exc_tb.tb_lineno)) return result @@ -141,9 +147,11 @@ def create_new_project(driver, projectId, data, separator='|'): projectDir = os.path.join(ckg_config['experiments_directory'], os.path.join(external_identifier, 'project')) ckg_utils.checkDirectory(projectDir) - data.to_excel(os.path.join(projectDir, 'ProjectData_{}.xlsx'.format(external_identifier)), index=False, encoding='utf-8') + data.to_excel(os.path.join(projectDir, 'ProjectData_{}.xlsx'.format(external_identifier)), index=False, + encoding='utf-8') - datasetPath = os.path.join(os.path.join(ckg_config['imports_experiments_directory'], external_identifier), 'project') + datasetPath = os.path.join(os.path.join(ckg_config['imports_experiments_directory'], external_identifier), + 'project') ckg_utils.checkDirectory(datasetPath) eh.generate_dataset_imports(external_identifier, 'project', datasetPath) loader.partialUpdate(imports=['project'], specific=[external_identifier]) @@ -154,5 +162,6 @@ def create_new_project(driver, projectId, data, separator='|'): except Exception as err: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] - logger.error("Reading query {}: {}, file: {},line: {}, err: {}".format(query_name, sys.exc_info(), fname, exc_tb.tb_lineno, err)) + logger.error("Reading query {}: {}, file: {},line: {}, err: {}".format(query_name, sys.exc_info(), fname, + exc_tb.tb_lineno, err)) return done, external_identifier diff --git a/ckg/report_manager/apps/projectCreationApp.py b/ckg/report_manager/apps/projectCreationApp.py deleted file mode 100644 index 46f48f28..00000000 --- a/ckg/report_manager/apps/projectCreationApp.py +++ /dev/null @@ -1,118 +0,0 @@ -from ckg.report_manager.apps import basicApp -import dash_core_components as dcc -import dash_html_components as html -from ckg.graphdb_connector import connector - - - -DataTypes = ['clinical', 'proteomics', - 'interactomics', 'phosphoproteomics', - 'longitudinal_proteomics', 'longitudinal_clinical'] - - -class ProjectCreationApp(basicApp.BasicApp): - """ - Defines what the project creation App is in the report_manager. - Includes multiple fill in components to gather project information and metadata. - """ - def __init__(self, title, subtitle, description, layout=[], logo=None, footer=None): - self.pageType = "projectCreationPage" - basicApp.BasicApp.__init__(self, title, subtitle, description, self.pageType, layout, logo, footer) - self.buildPage() - - def buildPage(self): - """ - Builds page with the basic layout from *basicApp.py* and adds relevant Dash components for project creation. - """ - self.add_basic_layout() - driver = connector.getGraphDatabaseConnectionConfiguration() - - if driver is not None: - try: - users = [] - tissues = [] - diseases = [] - user_nodes = connector.find_nodes(driver, node_type='User') - tissue_nodes = connector.find_nodes(driver, node_type='Tissue') - disease_nodes = connector.find_nodes(driver, node_type='Disease') - for user in user_nodes: - users.append((user['n']['name'])) - for tissue in tissue_nodes: - tissues.append((tissue['n']['name'])) - for disease in disease_nodes: - diseases.append((disease['n']['name'])) - - layout = [html.Div([ - html.Div([html.H4('Project information', style={'width': '15.5%', 'verticalAlign': 'top', 'display': 'inline-block'}), - html.H4('', id='update_project_id', style={'width': '15%', 'verticalAlign': 'top', 'display': 'none'}), - html.Br(), - html.Div(children=[html.Label('Project name:*', style={'marginTop': 15}), - dcc.Input(id='project name', placeholder='Insert name...', type='text', style={'width': '100%', 'height': '35px'})], - style={'width': '100%'}), - html.Br(), - html.Div(children=[html.Label('Project Acronym:', style={'marginTop': 15}), - dcc.Input(id='project acronym', placeholder='Insert name...', type='text', style={'width': '100%', 'height': '35px'})], - style={'width': '100%'}), - html.Br(), - html.Div(children=[html.Label('Project Responsible:*', style={'marginTop': 15})], - style={'width': '49%', 'verticalAlign': 'top', 'display': 'inline-block'}), - html.Div(children=[html.Label('Project Participants:*', style={'marginTop': 15})], - style={'width': '49%', 'marginLeft': '2%', 'verticalAlign': 'top', 'display': 'inline-block'}), - html.Div(children=[dcc.Dropdown(id='responsible-picker', options=[{'label': i, 'value': i} for i in users], value=[], multi=True, searchable=True, style={'width': '100%'})], - style={'width': '49%', 'verticalAlign': 'top', 'display': 'inline-block'}), - html.Div(children=[dcc.Dropdown(id='participant-picker', options=[{'label': i, 'value': i} for i in users], value=[], multi=True, searchable=True, style={'width': '100%'})], - style={'width': '49%', 'marginLeft': '2%', 'verticalAlign': 'top', 'display': 'inline-block'}), - html.Br(), - html.Br(), - html.Div(children=[html.Label('Project Data Types:*', style={'marginTop': 10})], - style={'width': '49%', 'marginLeft': '0%', 'verticalAlign': 'top', 'display': 'inline-block'}), - html.Div(children=[html.Label('Project Disease:*', style={'marginTop': 10})], - style={'width': '49%', 'marginLeft': '2%', 'verticalAlign': 'top', 'display': 'inline-block'}), - html.Div(children=[dcc.Dropdown(id='data-types-picker', options=[{'label': i, 'value': i} for i in DataTypes], value=[], multi=True, searchable=True, style={'width': '100%'})], - style={'width': '49%', 'marginLeft': '0%', 'verticalAlign': 'top', 'display': 'inline-block'}), - html.Div(children=[dcc.Dropdown(id='disease-picker', options=[{'label': i, 'value': i} for i in diseases], value=[], multi=True, searchable=True, style={'width': '100%'})], - style={'width': '49%', 'marginLeft': '2%', 'verticalAlign': 'top', 'display': 'inline-block'}), - html.Br(), - html.Br(), - html.Div(children=[html.Label('Project Tissue:*', style={'marginTop': 10})], - style={'width': '49%', 'marginLeft': '0%', 'verticalAlign': 'top', 'display': 'inline-block'}), - html.Div(children=[html.Label('Project Intervention:', style={'marginTop': 10})], - style={'width': '49%', 'marginLeft': '2%', 'verticalAlign': 'top', 'display': 'inline-block'}), - html.Div(children=[dcc.Dropdown(id='tissue-picker', options=[{'label': i, 'value': i} for i in tissues], value=[], multi=True, searchable=True, style={'width': '100%'})], - style={'width': '49%', 'marginLeft': '0%', 'verticalAlign': 'top', 'display': 'inline-block'}), - html.Div(children=[dcc.Input(id='intervention-picker', placeholder='E.g. SNOMED identifier|SNOMED identifier|...', type='text', style={'width': '100%', 'height': '54px'})], - style={'width': '49%', 'marginLeft': '2%', 'verticalAlign': 'top', 'display': 'inline-block'}), - html.Br(), - html.Br(), - html.Div(children=[html.Label('Timepoints:', style={'marginTop': 15}), - dcc.Input(id='number_timepoints', placeholder='E.g. 2 months|15 days|24 hours...', type='text', style={'width': '100%', 'height': '35px'})], - style={'width': '49%', 'marginLeft': '0%', 'verticalAlign': 'top', 'display': 'inline-block'}), - html.Br(), - html.Br(), - html.Div(children=[html.Label('Follows up project:', style={'marginTop': 15}), - dcc.Input(id='related_to', placeholder='Use the Project Identifier (P000000X)', type='text', style={'width': '100%', 'height': '35px'})], - style={'width': '49%', 'marginLeft': '0%', 'verticalAlign': 'top', 'display': 'inline-block'}), - html.Br(), - html.Br(), - html.Div(children=[html.Label('Project Description:', style={'marginTop': 15}), - dcc.Textarea(id='project description', placeholder='Enter description...', style={'width': '100%', 'height': '100px'})]), - html.Br(), - html.Div(children=[html.Label('Starting Date:', style={'marginTop': 10}), - dcc.DatePickerSingle(id='date-picker-start', placeholder='Select date...', clearable=True)], - style={'width': '30%', 'verticalAlign': 'top', 'marginTop': 10, 'display': 'inline-block'}), - html.Div(children=[html.Label('Ending Date:', style={'marginTop': 10}), - dcc.DatePickerSingle(id='date-picker-end', placeholder='Select date...', clearable=True)], - style={'width': '30%', 'verticalAlign': 'top', 'marginTop': 10, 'display': 'inline-block'}), - html.Div(children=html.Button('Create Project', id='project_button', n_clicks=0, className="button_link"), style={'width': '100%', 'padding-left': '87%', 'padding-right': '0%'}), - html.Br(), - html.Div(children=[html.A(children=html.Button('Download Clinical Data template', id='download_button', n_clicks=0, - style={'fontSize': '16px', 'display': 'block'}), - id='download_link', href='', n_clicks=0)], style={'width': '100%', 'padding-left': '87%', 'padding-right': '0%'}), - html.Br(), - html.Div(children=[html.H1(id='project-creation')]), - html.Br()]), - html.Hr()])] - except Exception as e: - layout = [html.Div(children=html.H1("Database is offline", className='error_msg'))] - - self.extend_layout(layout) diff --git a/ckg/report_manager/index.py b/ckg/report_manager/index.py deleted file mode 100644 index f54f0899..00000000 --- a/ckg/report_manager/index.py +++ /dev/null @@ -1,828 +0,0 @@ -import warnings -import os -import shutil -import subprocess -import re -import pandas as pd -import numpy as np -import time -from datetime import datetime -from uuid import uuid4 -import base64 -import flask -import dash_core_components as dcc -import dash_html_components as html -from dash.dependencies import Input, Output, State -from dash.exceptions import PreventUpdate -from ckg import ckg_utils -import ckg.report_manager.user as user -from ckg.report_manager.app import app, server as application -from ckg.report_manager.apps import initialApp, adminApp, projectCreationApp, dataUploadApp, dataUpload, projectApp, importsApp, homepageApp, loginApp, projectCreation -from ckg.graphdb_builder import builder_utils -from ckg.graphdb_builder.builder import loader, builder -from ckg.graphdb_builder.experiments import experiments_controller as eh -from ckg.report_manager import utils -from ckg.report_manager.worker import create_new_project, create_new_identifiers, run_minimal_update_task, run_full_update_task -from ckg.graphdb_connector import connector - -warnings.filterwarnings("ignore", category=DeprecationWarning) -warnings.filterwarnings("ignore", category=RuntimeWarning) - -try: - ckg_config = ckg_utils.read_ckg_config() - log_config = ckg_config['report_manager_log'] - logger = builder_utils.setup_logging(log_config, key="index page") - config = builder_utils.setup_config('builder') - separator = config["separator"] -except Exception as err: - logger.error("Reading configuration > {}.".format(err)) - -app.layout = dcc.Loading(children=[html.Div([dcc.Location(id='url', refresh=False), - html.Div(id='page-content', - style={'padding-top': 10}, - className='container-fluid')])], - style={'text-align': 'center', - 'top': '50%', - 'left': '50%', - 'height': '250px'}, - type='cube', color='#2b8cbe') - - -@app.callback([Output('page-content', 'children'), - Output('logout_form', 'style'), - Output('error_msg', 'style')], - [Input('url', 'href')]) -def display_page(pathname): - session_cookie = flask.request.cookies.get('custom-auth-session') - logged_in = session_cookie is not None - if not logged_in: - if pathname is not None and 'error' in pathname: - error = {'display': 'block'} - else: - error = {'display': 'none'} - login_form = loginApp.LoginApp("Login", "", "", layout=[], logo=None, footer=None) - return (login_form.layout, {'display': 'none'}, error) - elif pathname is not None: - if '/apps/initial' in pathname: - return (initialApp.layout, {'display': 'block', - 'position': 'absolute', - 'right': '50px'}, {'display': 'none'}) - elif '/apps/login' in pathname: - if logged_in: - stats_db = homepageApp.HomePageApp("CKG homepage", "Database Stats", "", layout=[], logo=None, footer=None) - return (stats_db.layout, {'display': 'block', - 'position': 'absolute', - 'right': '50px'}, {'display': 'none'}) - elif '/apps/admin' in pathname: - layout = [] - if 'admin?' in pathname: - if 'new_user' in pathname: - username = pathname.split('=')[1] - if 'error' in pathname: - layout.append(html.Div(children=[html.H3("– Error creating new user: {} – ".format(username.replace('%20', ' ')))], className='error_panel')) - else: - layout.append(html.Div(children=[html.H3("– New user successfully created: {} –".format(username))], className='info_panel')) - elif 'running' in pathname: - running_type = pathname.split('=')[1] - layout.append(html.Div(children=[html.H3("– The {} update is running. This will take a while, check the logs: graphdb_builder.log for more information –".format(running_type))], className='info_panel')) - admin_page = adminApp.AdminApp("CKG Admin Dashboard", "Admin Dashboard", "", layout=layout, logo=None, footer=None) - return (admin_page.layout, {'display': 'block', - 'position': 'absolute', - 'right': '50px'}, {'display': 'none'}) - elif '/apps/projectCreationApp' in pathname: - projectCreation_form = projectCreationApp.ProjectCreationApp("Project Creation", "", "", layout=[], logo=None, footer=None) - return (projectCreation_form.layout, {'display': 'block', 'position': 'absolute', 'right': '50px'}, {'display': 'none'}) - elif '/apps/dataUploadApp' in pathname: - dataUpload_form = dataUploadApp.DataUploadApp("Data Upload", "", "", layout=[], logo=None, footer=None) - return (dataUpload_form.layout, {'display': 'block', 'position': 'absolute', 'right': '50px'}, {'display': 'none'}) - elif '/apps/project?' in pathname: - project_id, force, session_id = get_project_params_from_url(pathname) - if session_id is None: - session_id = datetime.now().strftime('%Y%m-%d%H-%M%S-') + str(uuid4()) - if project_id is None: - return (initialApp.layout, {'display': 'block', - 'position': 'absolute', - 'right': '50px'}, {'display': 'none'}) - else: - project = projectApp.ProjectApp(session_id, project_id, project_id, "", "", layout=[], logo=None, footer=None, force=force) - return (project.layout, {'display': 'block', - 'position': 'absolute', - 'right': '50px'}, {'display': 'none'}) - elif '/apps/imports' in pathname: - imports = importsApp.ImportsApp("CKG imports monitoring", "Statistics", "", layout=[], logo=None, footer=None) - return (imports.layout, {'display': 'block', - 'position': 'absolute', - 'right': '50px'}, {'display': 'none'}) - elif '/apps/homepage' in pathname or pathname.count('/') <= 3: - stats_db = homepageApp.HomePageApp("CKG homepage", "Database Stats", "", layout=[], logo=None, footer=None) - return (stats_db.layout, {'display': 'block', - 'position': 'absolute', - 'right': '50px'}, {'display': 'none'}) - else: - return ('404', {'display': 'block', 'position': 'absolute', 'right': '50px'}, {'display': 'none'}) - - return (None, None, {'display': 'none'}) - - -def get_project_params_from_url(pathname): - force = False - project_id = None - session_id = None - regex_id = r"project_id=(\w+)" - regex_force = r"force=(\d)" - regex_session = r"session=(.+)" - match_id = re.search(regex_id, pathname) - if match_id: - project_id = match_id.group(1) - match_force = re.search(regex_force, pathname) - if match_force: - force = bool(int(match_force.group(1))) - match_session = re.search(regex_session, pathname) - if match_session: - session_id = match_session.group(1) - - return project_id, force, session_id - - - -@app.callback([Output('upload-config', 'style'), - Output('output-data-upload', 'children'), - Output('upload-config', 'filename')], - [Input('upload-config', 'contents'), - Input('my-dropdown', 'value')], - [State('upload-config', 'filename')]) -def update_output(contents, value, fname): - display = {'display': 'none'} - uploaded = None - if value is not None: - page_id, dataset = value.split('/') - if not os.path.exists(ckg_config['tmp_directory']): - os.makedirs(ckg_config['tmp_directory']) - directory = os.path.join(ckg_config['tmp_directory'], page_id) - if dataset != "defaults": - display = {'width': '50%', - 'height': '60px', - 'lineHeight': '60px', - 'borderWidth': '2px', - 'borderStyle': 'dashed', - 'borderRadius': '15px', - 'textAlign': 'center', - 'margin-bottom': '20px', - 'display': 'block'} - if not os.path.exists(directory): - os.makedirs(directory) - - if fname is None: - contents = None - if contents is not None: - with open(os.path.join(directory, dataset+'.yml'), 'wb') as out: - content_type, content_string = contents.split(',') - decoded = base64.b64decode(content_string) - out.write(decoded) - uploaded = dcc.Markdown("**{} configuration uploaded: {}** ✅".format(dataset.title(),fname)) - fname = None - contents = None - else: - uploaded = None - elif dataset == 'reset': - display = {'display': 'none'} - if os.path.exists(directory): - shutil.rmtree(directory) - return display, uploaded, fname - - -@app.callback(Output('db-creation-date', 'children'), - [Input('db_stats_df', 'data')]) -def update_db_date(df): - db_date = "Unknown" - if 'kernel_monitor' in df: - kernel = pd.read_json(df['kernel_monitor'], orient='records') - db_date = kernel['storeCreationDate'][0] - - return html.H3('Store Creation date: {}'.format(db_date)) - - -@app.callback([Output("db_indicator_14", "children"), - Output("db_indicator_1", "children"), - Output("db_indicator_3", "children"), - Output("db_indicator_2", "children"), - Output("db_indicator_4", "children"), - Output("db_indicator_5", "children"), - Output("db_indicator_6", "children"), - Output("db_indicator_7", "children"), - Output("db_indicator_8", "children"), - Output("db_indicator_9", "children"), - Output("db_indicator_10", "children"), - Output("db_indicator_11", "children"), - Output("db_indicator_12", "children"), - Output("db_indicator_13", "children"), - ], - [Input("db_stats_df", "data")]) -def number_panel_update(df): - updates = [] - if 'projects' in df: - projects = pd.read_json(df['projects'], orient='records') - if not projects.empty and 'Projects' in projects: - projects = projects['Projects'][0] - updates.append(projects) - if 'meta_stats' in df: - meta_stats = pd.read_json(df['meta_stats'], orient='records') - if not meta_stats.empty: - if 'nodeCount' in meta_stats: - ent = meta_stats['nodeCount'][0] - else: - ent = '0' - updates.append(ent) - if 'relCount' in meta_stats: - rel = meta_stats['relCount'][0] - else: - rel = '0' - updates.append(rel) - if 'labelCount' in meta_stats: - labels = meta_stats['labelCount'][0] - else: - labels = '0' - updates.append(labels) - if 'relTypeCount' in meta_stats: - types = meta_stats['relTypeCount'][0] - else: - types = '0' - updates.append(types) - if 'propertyKeyCount' in meta_stats: - prop = meta_stats['propertyKeyCount'][0] - else: - prop = '0' - updates.append(prop) - - if 'store_size' in df: - store_size = pd.read_json(df['store_size'], orient='records') - if not store_size.empty and 'size' in store_size: - ent_store = store_size['size'][2] - rel_store = store_size['size'][4] - prop_store = store_size['size'][3] - string_store = store_size['size'][5] - array_store = store_size['size'][0] - log_store = store_size['size'][1] - else: - ent_store = '0 MB' - rel_store = '0 MB' - prop_store = '0 MB' - string_store = '0 MB' - array_store = '0 MB' - log_store = '0 MB' - - updates.extend([ent_store, rel_store, prop_store, string_store, array_store, log_store]) - - if 'transactions' in df: - transactions = pd.read_json(df['transactions'], orient='records') - if not transactions.empty and 'name' in transactions: - t_open = transactions.loc[transactions['name'] == 'NumberOfOpenedTransactions', 'value'].iloc[0] - t_comm = transactions.loc[transactions['name'] == 'NumberOfCommittedTransactions', 'value'].iloc[0] - else: - t_open = '0' - t_comm = '0' - - updates.extend([t_open, t_comm]) - - return [dcc.Markdown("**{}**".format(i)) for i in updates] - - -@app.callback(Output("project_url", "children"), - [Input("project_option", "value")]) -def update_project_url(value): - if value is not None and len(value) > 1: - return html.A(value[0].title(), - href='/apps/project?project_id={}&force=0'.format(value[1]), - target='', - n_clicks=0, - className="button_link") - else: - return '' - - -@app.server.route('/apps/login', methods=['POST', 'GET']) -def route_login(): - data = flask.request.form - username = data.get('username') - password = data.get('password') - - if not username or not password: - flask.abort(401) - elif not user.User(username).verify_password(password): - return flask.redirect('/login_error') - else: - rep = flask.redirect('/') - rep.set_cookie('custom-auth-session', username+'_'+datetime.now().strftime('%Y%m-%d%H-%M%S-') + str(uuid4())) - return rep - - -@app.server.route('/apps/logout', methods=['POST']) -def route_logout(): - # Redirect back to the index and remove the session cookie. - rep = flask.redirect('/') - rep.set_cookie('custom-auth-session', '', expires=0) - - return rep - - -@app.server.route('/create_user', methods=['POST', 'GET']) -def route_create_user(): - data = flask.request.form - name = data.get('name') - surname = data.get('surname') - affiliation = data.get('affiliation') - acronym = data.get('acronym') - email = data.get('email') - alt_email = data.get('alt_email') - phone = data.get('phone') - uname = name[0] + surname - username = uname - - registered = 'error_exists' - iter = 1 - while registered == 'error_exists': - u = user.User(username=username.lower(), name=name, surname=surname, affiliation=affiliation, acronym=acronym, phone=phone, email=email, alternative_email=alt_email) - registered = u.register() - if registered is None: - rep = flask.redirect('/apps/admin?error_new_user={}'.format('Failed Database')) - elif registered == 'error_exists': - username = uname + str(iter) - iter += 1 - elif registered == 'error_email': - rep = flask.redirect('/apps/admin?error_new_user={}'.format('Email already registered')) - elif registered == 'error_database': - rep = flask.redirect('/apps/admin?error_new_user={}'.format('User could not be saved in the database')) - else: - rep = flask.redirect('/apps/admin?new_user={}'.format(username)) - - return rep - - -@app.server.route('/update_minimal', methods=['POST', 'GET']) -def route_minimal_update(): - session_cookie = flask.request.cookies.get('custom-auth-session') - username = session_cookie.split('_')[0] - internal_id = datetime.now().strftime('%Y%m-%d%H-%M%S-') - result = run_minimal_update_task.apply_async(args=[username], task_id='run_minimal_'+session_cookie+internal_id, queue='update') - - rep = flask.redirect('/apps/admin?running=minimal') - - return rep - - -@app.server.route('/update_full', methods=['POST', 'GET']) -def route_full_update(): - session_cookie = flask.request.cookies.get('custom-auth-session') - data = flask.request.form - download = data.get('dwn-radio') == 'true' - username = session_cookie.split('_')[0] - internal_id = datetime.now().strftime('%Y%m-%d%H-%M%S-') - result = run_full_update_task.apply_async(args=[username, download], task_id='run_full_'+session_cookie+internal_id, queue='update') - - rep = flask.redirect('/apps/admin/running=full') - - return rep - - -@app.callback(Output('download-zip', 'href'), - [Input('download-zip', 'n_clicks')], - [State('url', 'href')]) -def generate_report_url(n_clicks, pathname): - project_id, force, session_id = get_project_params_from_url(pathname) - return '/downloads/{}'.format(project_id) - - -@application.route('/downloads/') -def route_report_url(value): - uri = os.path.join(ckg_config['downloads_directory'], value + '.zip') - return flask.send_file(uri, attachment_filename=value + '.zip', as_attachment=True, cache_timeout=-1) - -@application.route('/example_files') -def route_example_files_url(): - uri = os.path.join(ckg_config['data_directory'], 'example_files.zip') - return flask.send_file(uri, attachment_filename='example_files.zip', as_attachment=True, cache_timeout=-1) - -###Callback regenerate project -@app.callback(Output('regenerate', 'href'), - [Input('regenerate', 'n_clicks'), - Input('regenerate', 'title')], - [State('url', 'href')]) -def regenerate_report(n_clicks, title, pathname): - basic_path = '/'.join(pathname.split('/')[0:3]) - project_id, force, session_id = get_project_params_from_url(pathname) - return basic_path+'/apps/project?project_id={}&force=1&session={}'.format(project_id, title) - - -###Callbacks for project creation app -def image_formatter(im): - data_im = base64.b64encode(im).decode('ascii') - return f'' - - -@app.callback([Output('project-creation', 'children'), - Output('update_project_id', 'children'), - Output('update_project_id', 'style'), - Output('download_button', 'style')], - [Input('project_button', 'n_clicks')], - [State('project name', 'value'), - State('project acronym', 'value'), - State('responsible-picker', 'value'), - State('participant-picker', 'value'), - State('data-types-picker', 'value'), - State('number_timepoints', 'value'), - State('related_to', 'value'), - State('disease-picker', 'value'), - State('tissue-picker', 'value'), - State('intervention-picker', 'value'), - #State('number_subjects', 'value'), - State('project description', 'value'), - State('date-picker-start', 'date'), - State('date-picker-end', 'date')]) -def create_project(n_clicks, name, acronym, responsible, participant, datatype, timepoints, related_to, disease, tissue, intervention, description, start_date, end_date): - if n_clicks > 0: - session_cookie = flask.request.cookies.get('custom-auth-session') - responsible = separator.join(responsible) - participant = separator.join(participant) - datatype = separator.join(datatype) - disease = separator.join(disease) - tissue = separator.join(tissue) - arguments = [name, datatype, disease, tissue, responsible] - driver = connector.getGraphDatabaseConnectionConfiguration() - - if driver is not None: - # Check if clinical variables exist in the database - if intervention is not None: - intervention = intervention.strip() - if intervention != '': - interventions = list() - exist = dict() - for i in intervention.split(separator): - res = projectCreation.check_if_node_exists(driver, 'Clinical_variable', 'id', i) - if res.empty: - exist[i] = True - else: - exist[i] = False - interventions.append('{} ({})'.format(res['n.name'][0], i)) - intervention = separator.join(interventions) - - if any(exist.values()): - response = 'The intervention(s) "{}" specified does(do) not exist.'.format(', '.join([k for k,n in exist.items() if n==True])) - return response, None, {'display': 'none'}, {'display': 'none'} - - if any(not arguments[n] for n, i in enumerate(arguments)): - response = "Insufficient information to create project. Fill in all fields with '*'." - return response, None, {'display': 'none'}, {'display': 'none'} - - # Get project data from filled-in fields - projectData = pd.DataFrame([name, acronym, description, related_to, datatype, timepoints, disease, tissue, intervention, responsible, participant, start_date, end_date]).T - projectData.columns = ['name', 'acronym', 'description', 'related_to', 'datatypes', 'timepoints', 'disease', 'tissue', 'intervention', 'responsible', 'participant', 'start_date', 'end_date'] - projectData['status'] = '' - - projectData.fillna(value=pd.np.nan, inplace=True) - projectData.replace('', np.nan, inplace=True) - - # Generate project internal identifier bsed on timestamp - # Excel file is saved in folder with internal id name - epoch = time.time() - internal_id = "%s%d" % ("CP", epoch) - projectData.insert(loc=0, column='internal_id', value=internal_id) - result = create_new_project.apply_async(args=[internal_id, projectData.to_json(), separator], task_id='project_creation_'+session_cookie+internal_id, queue='creation') - result_output = result.get() - if len(result_output) > 0: - external_id = list(result_output.keys())[0] - done_msg = result_output[external_id] - if external_id != '' and done_msg is not None: - response = "Project successfully submitted. Download Clinical Data template." - elif done_msg is None: - response = "There was a problem when creating the project. Please, contact the administrator." - else: - response = 'A project with the same name already exists in the database.' - else: - response = "There was a problem when creating the project. Please, try again or contact the administrator." - external_id = response - else: - response = "The Database is temporarily offline. Contact your administrator or start the datatabase." - - return response, '- '+external_id, {'display': 'inline-block'}, {'display': 'block'} - else: - return None, None, {'display': 'none'}, {'display': 'none'} - - -@app.callback(Output('project-creation', 'style'), - [Input('project-creation', 'children')]) -def change_style(style): - if style is not None and 'successfully' in style: - return {'fontSize':'20px', 'marginLeft':'70%', 'color': 'black'} - else: - return {'fontSize':'20px', 'marginLeft':'70%', 'color': 'red'} - - -@app.callback(Output('download_link', 'href'), - [Input('update_project_id', 'children')]) -def update_download_link(project): - if project is not None and project != '': - return '/apps/templates{}'.format('Design_and_Clinical_templates') - else: - return '' - -@application.route('/apps/templates') -def serve_static(value): - cwd = os.path.dirname(os.path.abspath(__file__)) - directory = os.path.join(cwd,'apps/templates/') - filename = os.path.join(directory, value) - url = filename+'.zip' - if not os.path.isfile(url): - utils.compress_directory(filename, os.path.join(directory, 'files'), compression_format='zip') - - return flask.send_file(url, attachment_filename = value+'.zip', as_attachment = True, cache_timeout=-1) - - -###Callbacks for data upload app -@app.callback([Output('existing-project', 'children'), - Output('upload-form', 'style'), - Output('link-project-report', 'children'), - Output('link-project-report', 'href')], - [Input('project_id', 'value')], - [State('data_download_link', 'style')]) -def activate_upload_form(projectid, download_style): - m = '' - style = {'pointer-events': 'none', 'opacity': 0.5} - download_style.update({'display': 'none'}) - report_title = '' - report_href = '' - driver = connector.getGraphDatabaseConnectionConfiguration() - if driver is not None: - if len(projectid) > 7: - project = connector.find_node(driver, node_type='Project', parameters={'id': projectid}) - if len(project) == 0: - m = 'ERROR: Project "{}" does not exist in the database.'.format(projectid) - else: - if 'name' in project: - report_title = 'Generate report: {}'.format(project['name']) - report_href = '/apps/project?project_id={}&force=0'.format(projectid) - m = 'Uploading data for Project: **{}**'.format(project['name']) - style = {} - else: - m = 'ERROR: Database if temporarily offline. Contact your administrator or start the database.' - - return m, style, report_title, report_href - - -@app.callback(Output('proteomics-tool', 'style'), - [Input('upload-data-type-picker', 'value'), - Input('prot-tool', 'value')]) -def show_proteomics_options(datatype, prot_tool): - display = {'display': 'none'} - if datatype in ['proteomics', 'interactomics', 'phosphoproteomics']: - if prot_tool == '': - display = {'display': 'block'} - else: - display = {'display': 'block'} - - return display - - -@app.callback([Output('proteomics-file', 'style'), - Output('upload-data', 'disabled')], - [Input('upload-data-type-picker', 'value'), - Input('prot-tool', 'value'), - Input('prot-file', 'value')]) -def show_proteomics_file_options(datatype, prot_tool, prot_file): - display = ({'display': 'none'}, False) - if datatype in ['proteomics', 'interactomics', 'phosphoproteomics']: - if prot_tool is not None and prot_tool != '': - if prot_file == '' and prot_tool != 'mzTab': - display = ({'display': 'block'}, True) - else: - display = ({'display': 'block'}, False) - else: - display = ({'display': 'block'}, True) - - return display - - -@app.callback([Output('uploaded-files', 'children'), - Output('upload-data', 'filename'), - Output('prot-tool', 'value'), - Output('prot-file', 'value')], - [Input('upload-data', 'contents')], - [State('upload-data-type-picker', 'value'), - State('prot-tool', 'value'), - State('prot-file', 'value'), - State('project_id', 'value'), - State('upload-data', 'filename')]) -def save_files_in_tmp(content, dataset, prot_tool, prot_file, projectid, uploaded_file): - if dataset is not None: - session_cookie = flask.request.cookies.get('custom-auth-session') - temporaryDirectory = os.path.join(ckg_config['tmp_directory'], session_cookie + "upload") - if not os.path.exists(ckg_config['tmp_directory']): - os.makedirs(ckg_config['tmp_directory']) - elif not os.path.exists(temporaryDirectory): - os.makedirs(temporaryDirectory) - - directory = os.path.join(temporaryDirectory, dataset) - if os.path.exists(directory) and uploaded_file is not None: - if os.path.exists(os.path.join(directory, uploaded_file)): - shutil.rmtree(directory) - - builder_utils.checkDirectory(directory) - if dataset in ['proteomics', 'interactomics', 'phosphoproteomics'] and prot_tool != '' and (prot_file != '' or prot_tool == 'mzTab'): - selected_file = prot_tool.lower() + "-" + prot_file.lower() - if selected_file in config['file_proteomics']: - filename = config['file_proteomics'][selected_file] - else: - if prot_tool == 'mzTab': - filename = dataset+'_'+prot_tool.lower()+'.mztab' - else: - filename = dataset+'_'+prot_tool.lower()+'_'+prot_file.replace(' ', '').lower()+'.'+uploaded_file.split('.')[-1] - directory = os.path.join(directory, prot_tool.lower()) - if os.path.exists(directory): - if os.path.exists(os.path.join(directory, filename)): - os.remove(os.path.join(directory, filename)) - builder_utils.checkDirectory(directory) - elif dataset == 'experimental_design': - filename = config['file_design'].split('_')[0]+'_'+projectid+'.'+uploaded_file.split('.')[-1] - elif dataset == 'clinical': - filename = config['file_clinical'].split('_')[0]+'_'+projectid+'.'+uploaded_file.split('.')[-1] - - if uploaded_file is None: - content = None - if content is not None: - data = builder_utils.parse_contents(content, filename) - builder_utils.export_contents(data, directory, filename) - - uploaded = uploaded_file - uploaded_file = None - return uploaded, uploaded_file, '', '' - else: - raise PreventUpdate - - return '', None, '', '' - - -@app.callback([Output('upload-result', 'children'), - Output('data_download_link', 'style'), - Output('link-project-report', 'style'), - Output('project_table', 'children')], - [Input('submit_button', 'n_clicks'), - Input('project_id', 'value')]) -def run_processing(n_clicks, project_id): - message = None - style = {'display': 'none'} - table = None - - if n_clicks > 0: - session_cookie = flask.request.cookies.get('custom-auth-session') - destDir = os.path.join(ckg_config['experiments_directory'], project_id) - builder_utils.checkDirectory(destDir) - temporaryDirectory = os.path.join(ckg_config['tmp_directory'], session_cookie+"upload") - datasets = builder_utils.listDirectoryFoldersNotEmpty(temporaryDirectory) - driver = connector.getGraphDatabaseConnectionConfiguration() - if driver is not None: - res_n = dataUpload.check_samples_in_project(driver, project_id) - if 'experimental_design' in datasets: - dataset = 'experimental_design' - directory = os.path.join(temporaryDirectory, dataset) - destination = os.path.join(destDir, dataset) - experimental_files = os.listdir(directory) - regex = r"{}.+".format(config['file_design'].replace('PROJECTID', project_id)) - r = re.compile(regex) - experimental_filename = list(filter(r.match, experimental_files)) - if len(experimental_filename) > 0: - experimental_filename = experimental_filename.pop() - designData = builder_utils.readDataset(os.path.join(directory, experimental_filename)) - designData = designData.astype(str) - designData.columns = [c.lower() for c in designData.columns] - if 'subject external_id' in designData.columns and 'biological_sample external_id' in designData.columns and 'analytical_sample external_id' in designData.columns: - if (res_n > 0).any().values.sum() > 0: - res = dataUpload.remove_samples_nodes_db(driver, project_id) - res_n = dataUpload.check_samples_in_project(driver, project_id) - if (res_n > 0).any().values.sum() > 0: - message = 'ERROR: There is already an experimental design loaded into the database and there was an error when trying to delete it. Contact your administrator.' - return message, style, style, table - - res_n = None - result = create_new_identifiers.apply_async(args=[project_id, designData.to_json(), directory, experimental_filename], task_id='data_upload_'+session_cookie+datetime.now().strftime('%Y%m-%d%H-%M%S-'), queue='creation') - result_output = result.wait(timeout=None, propagate=True, interval=0.2) - res_n = pd.DataFrame.from_dict(result_output['res_n']) - builder_utils.copytree(directory, destination) - else: - message = 'ERROR: The Experimental design file provided ({}) is missing some of the required fields: {}'.format(experimental_filename, ','.join(['subject external_id','biological_sample external_id','analytical_sample external_id'])) - builder_utils.remove_directory(directory) - - return message, style, style, table - - if 'clinical' in datasets: - dataset = 'clinical' - directory = os.path.join(temporaryDirectory, dataset) - clinical_files = os.listdir(directory) - regex = r"{}.+".format(config['file_clinical'].replace('PROJECTID', project_id) ) - r = re.compile(regex) - clinical_filename = list(filter(r.match, clinical_files)) - if len(clinical_filename) > 0: - clinical_filename = clinical_filename.pop() - data = builder_utils.readDataset(os.path.join(directory, clinical_filename)) - data.columns = [c.lower() for c in data.columns] - external_ids = {} - if 'subject external_id' in data and 'biological_sample external_id' in data: - external_ids['subjects'] = data['subject external_id'].astype(str).unique().tolist() - external_ids['biological_samples'] = data['biological_sample external_id'].astype(str).unique().tolist() - dataUpload.create_mapping_cols_clinical(driver, data, directory, clinical_filename, separator=separator) - if 0 in res_n.values: - samples = ', '.join([k for (k,v) in res_n if v == 0]) - message = 'ERROR: No {} for project {} in the database. Please upload first the experimental design (ExperimentalDesign_{}.xlsx)'.format(samples, project_id, project_id) - builder_utils.remove_directory(directory) - - return message, style, style, table - else: - db_ids = dataUpload.check_external_ids_in_db(driver, project_id).to_dict() - message = '' - intersections = {} - differences_in = {} - differences_out = {} - for col in external_ids: - intersect = list(set(db_ids[col].values()).intersection(external_ids[col])) - difference_in = list(set(db_ids[col].values()).difference(external_ids[col])) - difference_out = list(set(external_ids[col]).difference(set(db_ids[col].values()))) - if len(difference_in) > 0 or len(difference_out) > 0: - intersections[col] = intersect - differences_in[col] = difference_in - differences_out[col] = difference_out - for col in intersections: - message += 'WARNING: Some {} identifiers were not matched:\n Matching: {}\n No information provided: {} \n Non-existing in the database: {}\n'.format(col, len(intersections[col]), ','.join(differences_in[col]), ','.join(differences_out[col])) - else: - message = 'ERROR: Format of the Clinical Data file is not correct. Check template in the documentation. Check columns: subject external_id, biological_sample external_id and analytical_sample external_id' - builder_utils.remove_directory(directory) - - return message, style, style, table - try: - for dataset in datasets: - if dataset != "experimental_design": - source = os.path.join(temporaryDirectory, dataset) - destination = os.path.join(destDir, dataset) - builder_utils.copytree(source, destination) - datasetPath = os.path.join(os.path.join(ckg_config['imports_experiments_directory'], project_id), dataset) - eh.generate_dataset_imports(project_id, dataset, datasetPath) - - loader.partialUpdate(imports=['experiment'], specific=[project_id]) - filename = os.path.join(ckg_config['tmp_directory'], 'Uploaded_files_'+project_id) - utils.compress_directory(filename, temporaryDirectory, compression_format='zip') - style.update({'display':'inline-block'}) - message = 'Files successfully uploaded.' - table = dataUpload.get_project_information(driver, project_id) - if table is None: - message = 'Error: No data was uploaded for project: {}. Review your experimental design and data files.'.format(project_id) - except Exception as err: - style.update({'display':'none'}) - message = str(err) - else: - style.update({'display':'none'}) - message = "ERROR: Database is offline. Contact your administrator or start the database." - - return message, style, style, table - -@app.callback(Output('upload-result', 'style'), - [Input('upload-result', 'children')]) -def change_style_data_upload(upload_result): - if upload_result is None: - return {'fontSize':'20px', 'marginLeft':'70%', 'color': 'black'} - else: - if 'ERROR' in upload_result: - return {'fontSize':'20px', 'marginLeft':'70%', 'color': 'red'} - if 'WARNING' in upload_result: - return {'fontSize':'20px', 'marginLeft':'70%', 'color': 'orange'} - else: - return {'fontSize':'20px', 'marginLeft':'70%', 'color': 'black'} - -@app.callback(Output('data_download_link', 'href'), - [Input('data_download_link', 'n_clicks'), - Input('project_id', 'value')]) -def generate_upload_zip(n_clicks, project_id): - session_cookie = flask.request.cookies.get('custom-auth-session') - return '/tmp/{}_{}'.format(session_cookie+"upload", project_id) - -@application.route('/tmp/') -def route_upload_url(value): - page_id, project_id = value.split('_') - directory = ckg_config['tmp_directory'] - filename = os.path.join(directory, 'Uploaded_files_'+project_id) - url = filename+'.zip' - - return flask.send_file(url, attachment_filename = filename.split('/')[-1]+'.zip', as_attachment = True, cache_timeout=-1) - -def main(): - print("IN MAIN") - celery_working_dir = os.path.dirname(os.path.abspath(__file__)) - os.chdir(celery_working_dir) - queues = [('creation', 1, 'INFO'), ('compute', 3, 'INFO'), ('update', 1, 'INFO')] - for queue, processes, log_level in queues: - celery_cmdline = 'celery -A ckg.report_manager.worker worker --loglevel={} --concurrency={} -E -Q {}'.format(log_level, processes, queue).split(" ") - print("Ready to call {} ".format(celery_cmdline)) - subprocess.Popen(celery_cmdline) - print("Done callling {} ".format(celery_cmdline)) - - application.run(debug=False, host='0.0.0.0') - - -if __name__ == '__main__': - main() diff --git a/ckg/report_manager/pages/__init__.py b/ckg/report_manager/pages/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ckg/report_manager/pages/adminPage.py b/ckg/report_manager/pages/adminPage.py new file mode 100644 index 00000000..35dcb32e --- /dev/null +++ b/ckg/report_manager/pages/adminPage.py @@ -0,0 +1,72 @@ +import dash +import flask +from dash import html, dcc + +title = "CKG Admin Dashboard" +subtitle = "" +description = "" + +dash.register_page(__name__, path='/apps/admin', title=f"{title} - {subtitle}", description=description) + + +def layout(): + # TODO: adapt authentication for admin + session_cookie = flask.request.cookies.get('custom-auth-session') + logged_in = session_cookie is not None + if logged_in == False: + return html.Div(["Please ", dcc.Link("login", href="/apps/loginPage"), " to continue"]) + + create_user_form = [html.H3("Create CKG User"), html.Form([ + html.Div(children=[html.Label('Name'), + dcc.Input(placeholder='name', name='name', type='text', required=True), + html.Label('Surname'), + dcc.Input(placeholder='surname', name='surname', type='text', required=True), + html.Label('Acronym'), + dcc.Input(placeholder='acronym', name='acronym', type='text'), + html.Label('Affiliation'), + dcc.Input(placeholder='affiliation', name='affiliation', type='text', required=True)]), + html.Div(children=[html.Label('E-mail'), + dcc.Input(placeholder='email', name='email', type='email', required=True), + html.Label('alternative E-mail'), + dcc.Input(placeholder='alt email', name='alt_e-mail', type='email'), + html.Label('Phone number'), + dcc.Input(placeholder='phone', name='phone', type='tel', required=True)]), + html.Div(children=[html.Button('CreateUser', type='submit', className='button_link')], + style={'width': '100%', 'padding-left': '87%', 'padding-right': '0%'})], action='/create_user', + method='post')] + update_database_bts = [html.H3("Build CKG Database"), + html.Div(children=[ + html.Form([html.Button('Minimal Update', type='submit', className='button_link')], + action='/update_minimal', method='post'), + html.P( + "This option will load into CKG's graph database the licensed Ontologies and Databases and all their missing relationships.", + className='description_p')]), + html.Br(), + html.Div(children=[ + html.Form([html.Button('Full Update', type='submit', className='button_link'), + html.Div(children=[html.H4("Download:"), + html.Label("Yes", className='radioitem'), + dcc.Input(id='Yes', + name='dwn-radio', + value=True, + type='radio'), + html.Label("No", className='radioitem'), + dcc.Input(id='No', + name='dwn-radio', + value=False, + type='radio')])], action='/update_full', + method='post'), + html.P( + "This option will regenerate the entire database, downloading data from the different Ontologies and Databases (Download=Yes) and loading them and all existing projects into CKG's graph database.", + className='description_p')])] + admin_options = html.Div(children=[html.Div(children=create_user_form, className='div_framed'), + html.Div(children=update_database_bts, className='div_framed')]) + + admin_layout = [html.Div(children=[ + html.H1(children=title), + html.H2(children=subtitle), + html.Div(children=description), + ])] + + admin_layout.extend(admin_options) + return admin_layout diff --git a/ckg/report_manager/pages/dataUploadPage.py b/ckg/report_manager/pages/dataUploadPage.py new file mode 100644 index 00000000..f50e4f84 --- /dev/null +++ b/ckg/report_manager/pages/dataUploadPage.py @@ -0,0 +1,397 @@ +import os +import re +import shutil +from datetime import datetime + +import dash +import flask +import pandas as pd +from dash import dcc +from dash import html +from dash.dependencies import Input, Output, State +from dash.exceptions import PreventUpdate + +from ckg import ckg_utils +from ckg.graphdb_builder import builder_utils +from ckg.graphdb_builder.builder import loader +from ckg.graphdb_builder.experiments import experiments_controller as eh +from ckg.graphdb_connector import connector +from ckg.report_manager import utils +from ckg.report_manager.apps import dataUpload +from ckg.report_manager.worker import create_new_identifiers + +ckg_config = ckg_utils.read_ckg_config() +config = builder_utils.setup_config('builder') +separator = config["separator"] + +title = "Data Upload" +subtitle = "" +description = "" + +dash.register_page(__name__, path='/apps/dataUploadApp', title=f"{title} - {subtitle}", description=description) + +DataTypes = ['experimental_design', 'clinical', 'proteomics', 'interactomics', 'phosphoproteomics'] + + +def layout(): + session_cookie = flask.request.cookies.get('custom-auth-session') + logged_in = session_cookie is not None + if logged_in == False: + return html.Div(["Please ", dcc.Link("login", href="/apps/loginPage"), " to continue"]) + + data_upload_layout = [html.Div([ + html.H1(children=title), + html.H2(children=subtitle), + html.Div(children=description), + html.Div([html.H4('Project identifier:', style={'marginTop': 30, 'marginBottom': 20}), + dcc.Input(id='project_id', placeholder='e.g. P0000001', type='text', value='', debounce=True, + maxLength=8, minLength=8, style={'width': '100%', 'height': '55px'}), + dcc.Markdown(id='existing-project')], + style={'width': '20%'}), + html.Br(), + html.Div(id='upload-form', children=[ + html.Div(children=[html.A("Download example files", + id='example_files', + href='/example_files', + n_clicks=0, + className="button_link")], + style={'width': '100%', 'padding-left': '87%', 'padding-right': '0%'}), + html.Div(children=[html.Label('Select upload data type:', style={'marginTop': 10})], + style={'width': '49%', 'marginLeft': '0%', 'verticalAlign': 'top', 'fontSize': '18px'}), + html.Div(children=[ + dcc.RadioItems(id='upload-data-type-picker', options=[{'label': i, 'value': i} for i in DataTypes], + value=None, + inputStyle={"margin-right": "5px"}, style={'display': 'block', 'fontSize': '16px'})]), + html.Div(children=[html.H5('Proteomics tool:'), dcc.RadioItems(id='prot-tool', + options=[{'label': i, 'value': i} for i in + ['MaxQuant', 'DIA-NN', + 'Spectronaut', 'FragPipe', + 'mzTab']], value='', + inputStyle={"margin-right": "5px"}, + style={'display': 'block', + 'fontSize': '16px'})], + id='proteomics-tool', style={'padding-top': 20}), + html.Div(children=[html.H5('Select the type of file uploaded:'), dcc.Dropdown(id='prot-file', options=[ + {'label': i, 'value': i} for i in ['Protein groups', 'Peptides', 'Phospho STY sites']], value='', + style={'display': 'block', + 'fontSize': '14px', + 'width': '250px'})], + id='proteomics-file', style={'padding-top': 20}), + html.Div([html.H4('Upload file (max. 100Mb)', style={'marginTop': 30, 'marginBottom': 20}), + dcc.Upload(id='upload-data', children=html.Div(['Drag and Drop or ', html.A('Select Files')]), + style={'width': '100%', + 'height': '60px', + 'lineHeight': '60px', + 'borderWidth': '1px', + 'borderStyle': 'dashed', + 'borderRadius': '5px', + 'textAlign': 'center', + 'margin': '0px'}, + multiple=False, max_size=1024 * 1024 * 1000)]), + html.Br(), + html.Div( + children=[dcc.Markdown('**Uploaded Files:**', id='markdown-title'), dcc.Markdown(id='uploaded-files')]), + html.Div([html.Button("Upload Data to CKG", + id='submit_button', + n_clicks=0, + className="button_link")], + style={'width': '100%', 'padding-left': '87%', 'padding-right': '0%'})]), + html.Div(children=[ + html.A('Download Files(.zip)', + id='data_download_link', + href='', + n_clicks=0, + style={'display': 'none'}, + className="button_link")]), + html.Div(children=[ + html.A(children='', + id='link-project-report', + href='', + target='', + n_clicks=0, + style={'display': 'none'}, + className="button_link")]), + html.Div(id='data-upload-result', children=[dcc.Markdown(id='upload-result')], style={'width': '100%'}), + html.Hr()]), + html.Div(id='project_table', children=[])] + return data_upload_layout + + +@dash.callback([Output('existing-project', 'children'), + Output('upload-form', 'style'), + Output('link-project-report', 'children'), + Output('link-project-report', 'href')], + [Input('project_id', 'value')], + [State('data_download_link', 'style')]) +def activate_upload_form(projectid, download_style): + m = '' + style = {'pointer-events': 'none', 'opacity': 0.5} + download_style.update({'display': 'none'}) + report_title = '' + report_href = '' + driver = connector.getGraphDatabaseConnectionConfiguration() + if driver is not None: + if len(projectid) > 7: + project = connector.find_node(driver, node_type='Project', parameters={'id': projectid}) + if len(project) == 0: + m = 'ERROR: Project "{}" does not exist in the database.'.format(projectid) + else: + if 'name' in project: + report_title = 'Generate report: {}'.format(project['name']) + report_href = '/apps/project?project_id={}&force=0'.format(projectid) + m = 'Uploading data for Project: **{}**'.format(project['name']) + style = {} + else: + m = 'ERROR: Database if temporarily offline. Contact your administrator or start the database.' + + return m, style, report_title, report_href + + +@dash.callback(Output('proteomics-tool', 'style'), + [Input('upload-data-type-picker', 'value'), + Input('prot-tool', 'value')]) +def show_proteomics_options(datatype, prot_tool): + display = {'display': 'none'} + if datatype in ['proteomics', 'interactomics', 'phosphoproteomics']: + if prot_tool == '': + display = {'display': 'block'} + else: + display = {'display': 'block'} + + return display + + +@dash.callback([Output('proteomics-file', 'style'), + Output('upload-data', 'disabled')], + [Input('upload-data-type-picker', 'value'), + Input('prot-tool', 'value'), + Input('prot-file', 'value')]) +def show_proteomics_file_options(datatype, prot_tool, prot_file): + display = ({'display': 'none'}, False) + if datatype in ['proteomics', 'interactomics', 'phosphoproteomics']: + if prot_tool is not None and prot_tool != '': + if prot_file == '' and prot_tool != 'mzTab': + display = ({'display': 'block'}, True) + else: + display = ({'display': 'block'}, False) + else: + display = ({'display': 'block'}, True) + + return display + + +@dash.callback([Output('uploaded-files', 'children'), + Output('upload-data', 'filename'), + Output('prot-tool', 'value'), + Output('prot-file', 'value')], + [Input('upload-data', 'contents')], + [State('upload-data-type-picker', 'value'), + State('prot-tool', 'value'), + State('prot-file', 'value'), + State('project_id', 'value'), + State('upload-data', 'filename')]) +def save_files_in_tmp(content, dataset, prot_tool, prot_file, projectid, uploaded_file): + if dataset is not None: + session_cookie = flask.request.cookies.get('custom-auth-session') + temporaryDirectory = os.path.join(ckg_config['tmp_directory'], session_cookie + "upload") + if not os.path.exists(ckg_config['tmp_directory']): + os.makedirs(ckg_config['tmp_directory']) + elif not os.path.exists(temporaryDirectory): + os.makedirs(temporaryDirectory) + + directory = os.path.join(temporaryDirectory, dataset) + if os.path.exists(directory) and uploaded_file is not None: + if os.path.exists(os.path.join(directory, uploaded_file)): + shutil.rmtree(directory) + + builder_utils.checkDirectory(directory) + if dataset in ['proteomics', 'interactomics', 'phosphoproteomics'] and prot_tool != '' and ( + prot_file != '' or prot_tool == 'mzTab'): + selected_file = prot_tool.lower() + "-" + prot_file.lower() + if selected_file in config['file_proteomics']: + filename = config['file_proteomics'][selected_file] + else: + if prot_tool == 'mzTab': + filename = dataset + '_' + prot_tool.lower() + '.mztab' + else: + filename = dataset + '_' + prot_tool.lower() + '_' + prot_file.replace(' ', '').lower() + '.' + \ + uploaded_file.split('.')[-1] + directory = os.path.join(directory, prot_tool.lower()) + if os.path.exists(directory): + if os.path.exists(os.path.join(directory, filename)): + os.remove(os.path.join(directory, filename)) + builder_utils.checkDirectory(directory) + elif dataset == 'experimental_design': + filename = config['file_design'].split('_')[0] + '_' + projectid + '.' + uploaded_file.split('.')[-1] + elif dataset == 'clinical': + filename = config['file_clinical'].split('_')[0] + '_' + projectid + '.' + uploaded_file.split('.')[-1] + + if uploaded_file is None: + content = None + if content is not None: + data = builder_utils.parse_contents(content, filename) + builder_utils.export_contents(data, directory, filename) + + uploaded = uploaded_file + uploaded_file = None + return uploaded, uploaded_file, '', '' + else: + raise PreventUpdate + + return '', None, '', '' + + +@dash.callback([Output('upload-result', 'children'), + Output('data_download_link', 'style'), + Output('link-project-report', 'style'), + Output('project_table', 'children')], + [Input('submit_button', 'n_clicks'), + Input('project_id', 'value')]) +def run_processing(n_clicks, project_id): + message = None + style = {'display': 'none'} + table = None + + if n_clicks > 0: + session_cookie = flask.request.cookies.get('custom-auth-session') + destDir = os.path.join(ckg_config['experiments_directory'], project_id) + builder_utils.checkDirectory(destDir) + temporaryDirectory = os.path.join(ckg_config['tmp_directory'], session_cookie + "upload") + datasets = builder_utils.listDirectoryFoldersNotEmpty(temporaryDirectory) + driver = connector.getGraphDatabaseConnectionConfiguration() + if driver is not None: + res_n = dataUpload.check_samples_in_project(driver, project_id) + if 'experimental_design' in datasets: + dataset = 'experimental_design' + directory = os.path.join(temporaryDirectory, dataset) + destination = os.path.join(destDir, dataset) + experimental_files = os.listdir(directory) + regex = r"{}.+".format(config['file_design'].replace('PROJECTID', project_id)) + r = re.compile(regex) + experimental_filename = list(filter(r.match, experimental_files)) + if len(experimental_filename) > 0: + experimental_filename = experimental_filename.pop() + designData = builder_utils.readDataset(os.path.join(directory, experimental_filename)) + designData = designData.astype(str) + designData.columns = [c.lower() for c in designData.columns] + if 'subject external_id' in designData.columns and 'biological_sample external_id' in designData.columns and 'analytical_sample external_id' in designData.columns: + if (res_n > 0).any().values.sum() > 0: + res = dataUpload.remove_samples_nodes_db(driver, project_id) + res_n = dataUpload.check_samples_in_project(driver, project_id) + if (res_n > 0).any().values.sum() > 0: + message = 'ERROR: There is already an experimental design loaded into the database and there was an error when trying to delete it. Contact your administrator.' + return message, style, style, table + + res_n = None + result = create_new_identifiers.apply_async( + args=[project_id, designData.to_json(), directory, experimental_filename], + task_id='data_upload_' + session_cookie + datetime.now().strftime('%Y%m-%d%H-%M%S-'), + queue='creation') + result_output = result.wait(timeout=None, propagate=True, interval=0.2) + res_n = pd.DataFrame.from_dict(result_output['res_n']) + builder_utils.copytree(directory, destination) + else: + message = 'ERROR: The Experimental design file provided ({}) is missing some of the required fields: {}'.format( + experimental_filename, ','.join(['subject external_id', 'biological_sample external_id', + 'analytical_sample external_id'])) + builder_utils.remove_directory(directory) + + return message, style, style, table + + if 'clinical' in datasets: + dataset = 'clinical' + directory = os.path.join(temporaryDirectory, dataset) + clinical_files = os.listdir(directory) + regex = r"{}.+".format(config['file_clinical'].replace('PROJECTID', project_id)) + r = re.compile(regex) + clinical_filename = list(filter(r.match, clinical_files)) + if len(clinical_filename) > 0: + clinical_filename = clinical_filename.pop() + data = builder_utils.readDataset(os.path.join(directory, clinical_filename)) + data.columns = [c.lower() for c in data.columns] + external_ids = {} + if 'subject external_id' in data and 'biological_sample external_id' in data: + external_ids['subjects'] = data['subject external_id'].astype(str).unique().tolist() + external_ids['biological_samples'] = data['biological_sample external_id'].astype( + str).unique().tolist() + dataUpload.create_mapping_cols_clinical(driver, data, directory, clinical_filename, + separator=separator) + if 0 in res_n.values: + samples = ', '.join([k for (k, v) in res_n if v == 0]) + message = 'ERROR: No {} for project {} in the database. Please upload first the experimental design (ExperimentalDesign_{}.xlsx)'.format( + samples, project_id, project_id) + builder_utils.remove_directory(directory) + + return message, style, style, table + else: + db_ids = dataUpload.check_external_ids_in_db(driver, project_id).to_dict() + message = '' + intersections = {} + differences_in = {} + differences_out = {} + for col in external_ids: + intersect = list(set(db_ids[col].values()).intersection(external_ids[col])) + difference_in = list(set(db_ids[col].values()).difference(external_ids[col])) + difference_out = list(set(external_ids[col]).difference(set(db_ids[col].values()))) + if len(difference_in) > 0 or len(difference_out) > 0: + intersections[col] = intersect + differences_in[col] = difference_in + differences_out[col] = difference_out + for col in intersections: + message += 'WARNING: Some {} identifiers were not matched:\n Matching: {}\n No information provided: {} \n Non-existing in the database: {}\n'.format( + col, len(intersections[col]), ','.join(differences_in[col]), + ','.join(differences_out[col])) + else: + message = 'ERROR: Format of the Clinical Data file is not correct. Check template in the documentation. Check columns: subject external_id, biological_sample external_id and analytical_sample external_id' + builder_utils.remove_directory(directory) + + return message, style, style, table + try: + for dataset in datasets: + if dataset != "experimental_design": + source = os.path.join(temporaryDirectory, dataset) + destination = os.path.join(destDir, dataset) + builder_utils.copytree(source, destination) + datasetPath = os.path.join( + os.path.join(ckg_config['imports_experiments_directory'], project_id), dataset) + eh.generate_dataset_imports(project_id, dataset, datasetPath) + + loader.partialUpdate(imports=['experiment'], specific=[project_id]) + filename = os.path.join(ckg_config['tmp_directory'], 'Uploaded_files_' + project_id) + utils.compress_directory(filename, temporaryDirectory, compression_format='zip') + style.update({'display': 'inline-block'}) + message = 'Files successfully uploaded.' + table = dataUpload.get_project_information(driver, project_id) + if table is None: + message = 'Error: No data was uploaded for project: {}. Review your experimental design and data files.'.format( + project_id) + except Exception as err: + style.update({'display': 'none'}) + message = str(err) + else: + style.update({'display': 'none'}) + message = "ERROR: Database is offline. Contact your administrator or start the database." + + return message, style, style, table + + +@dash.callback(Output('upload-result', 'style'), + [Input('upload-result', 'children')]) +def change_style_data_upload(upload_result): + if upload_result is None: + return {'fontSize': '20px', 'marginLeft': '70%', 'color': 'black'} + else: + if 'ERROR' in upload_result: + return {'fontSize': '20px', 'marginLeft': '70%', 'color': 'red'} + if 'WARNING' in upload_result: + return {'fontSize': '20px', 'marginLeft': '70%', 'color': 'orange'} + else: + return {'fontSize': '20px', 'marginLeft': '70%', 'color': 'black'} + + +@dash.callback(Output('data_download_link', 'href'), + [Input('data_download_link', 'n_clicks'), + Input('project_id', 'value')]) +def generate_upload_zip(n_clicks, project_id): + session_cookie = flask.request.cookies.get('custom-auth-session') + return '/tmp/{}_{}'.format(session_cookie + "upload", project_id) diff --git a/ckg/report_manager/pages/homePage.py b/ckg/report_manager/pages/homePage.py new file mode 100644 index 00000000..9d1fcccf --- /dev/null +++ b/ckg/report_manager/pages/homePage.py @@ -0,0 +1,155 @@ +import dash +import flask +import pandas as pd +from dash import html, dcc, Output, Input + +from ckg.report_manager.apps import homepageStats as hpstats + +title = "CKG homepage" +subtitle = "Database Stats" +description = "" + +dash.register_page(__name__, path='/', title=f"{title} - {subtitle}", description=description) + + +def layout(): + session_cookie = flask.request.cookies.get('custom-auth-session') + logged_in = session_cookie is not None + + print(logged_in) + + if logged_in == False: + return html.Div(["Please ", dcc.Link("login", href="/apps/loginPage"), " to continue"]) + else: + plots = get_plots() + + homepage_layout = html.Div(children=[ + html.H1(children=title), + html.H2(children=subtitle), + html.Div(children=description), + + html.Div(hpstats.quick_numbers_panel()), + + html.Div(plots[0]), + html.Div(plots[1]), + html.Div(plots[2]) + ]) + return homepage_layout + + +def get_plots(): + args = {} + args['valueCol'] = 'value' + args['textCol'] = 'size' + args['y'] = 'index' + args['x'] = 'number' + args['orientation'] = 'h' + args['title'] = '' + args['x_title'] = '' + args['y_title'] = '' + args['height'] = 900 + args['width'] = 900 + dfs = hpstats.get_db_stats_data() + plots = [] + plots.append(hpstats.plot_store_size_components(dfs, title='DB Store Size', args=args)) + plots.append(hpstats.plot_node_rel_per_label(dfs, focus='nodes', title='Nodes per Label', args=args)) + plots.append(hpstats.plot_node_rel_per_label(dfs, focus='relationships', title='Relationships per Type', args=args)) + + return plots + + +@dash.callback(Output('db-creation-date', 'children'), + [Input('db_stats_df', 'data')]) +def update_db_date(df): + db_date = "Unknown" + if 'kernel_monitor' in df: + kernel = pd.read_json(df['kernel_monitor'], orient='records') + db_date = kernel['storeCreationDate'][0] + + return html.H3('Store Creation date: {}'.format(db_date)) + + +@dash.callback([Output("db_indicator_14", "children"), + Output("db_indicator_1", "children"), + Output("db_indicator_3", "children"), + Output("db_indicator_2", "children"), + Output("db_indicator_4", "children"), + Output("db_indicator_5", "children"), + Output("db_indicator_6", "children"), + Output("db_indicator_7", "children"), + Output("db_indicator_8", "children"), + Output("db_indicator_9", "children"), + Output("db_indicator_10", "children"), + Output("db_indicator_11", "children"), + Output("db_indicator_12", "children"), + Output("db_indicator_13", "children"), + ], + [Input("db_stats_df", "data")]) +def number_panel_update(df): + print("Update func") + updates = [] + if 'projects' in df: + projects = pd.read_json(df['projects'], orient='records') + if not projects.empty and 'Projects' in projects: + projects = projects['Projects'][0] + updates.append(projects) + if 'meta_stats' in df: + meta_stats = pd.read_json(df['meta_stats'], orient='records') + if not meta_stats.empty: + if 'nodeCount' in meta_stats: + ent = meta_stats['nodeCount'][0] + else: + ent = '0' + updates.append(ent) + if 'relCount' in meta_stats: + rel = meta_stats['relCount'][0] + else: + rel = '0' + updates.append(rel) + if 'labelCount' in meta_stats: + labels = meta_stats['labelCount'][0] + else: + labels = '0' + updates.append(labels) + if 'relTypeCount' in meta_stats: + types = meta_stats['relTypeCount'][0] + else: + types = '0' + updates.append(types) + if 'propertyKeyCount' in meta_stats: + prop = meta_stats['propertyKeyCount'][0] + else: + prop = '0' + updates.append(prop) + + if 'store_size' in df: + store_size = pd.read_json(df['store_size'], orient='records') + if not store_size.empty and 'size' in store_size: + ent_store = store_size['size'][2] + rel_store = store_size['size'][4] + prop_store = store_size['size'][3] + string_store = store_size['size'][5] + array_store = store_size['size'][0] + log_store = store_size['size'][1] + else: + ent_store = '0 MB' + rel_store = '0 MB' + prop_store = '0 MB' + string_store = '0 MB' + array_store = '0 MB' + log_store = '0 MB' + + updates.extend([ent_store, rel_store, prop_store, string_store, array_store, log_store]) + + if 'transactions' in df: + transactions = pd.read_json(df['transactions'], orient='records') + if not transactions.empty and 'name' in transactions: + t_open = transactions.loc[transactions['name'] == 'NumberOfOpenedTransactions', 'value'].iloc[0] + t_comm = transactions.loc[transactions['name'] == 'NumberOfCommittedTransactions', 'value'].iloc[0] + else: + t_open = '0' + t_comm = '0' + + updates.extend([t_open, t_comm]) + + return [dcc.Markdown("**{}**".format(i)) for i in updates] diff --git a/ckg/report_manager/pages/importsPage.py b/ckg/report_manager/pages/importsPage.py new file mode 100644 index 00000000..fee93b71 --- /dev/null +++ b/ckg/report_manager/pages/importsPage.py @@ -0,0 +1,65 @@ +import os + +import dash +import flask +from dash import html, dcc + +from ckg import ckg_utils +from ckg.analytics_core.viz import viz +from ckg.report_manager.apps import imports + +title = "CKG imports monitoring" +subtitle = "Statistics" +description = "" + +dash.register_page(__name__, path='/apps/imports', title=f"{title} - {subtitle}", description=description) + + +def layout(): + session_cookie = flask.request.cookies.get('custom-auth-session') + logged_in = session_cookie is not None + if logged_in == False: + return html.Div(["Please ", dcc.Link("login", href="/apps/loginPage"), " to continue"]) + + plots = get_plots() + imports_layout = html.Div(children=[ + html.H1(children=title), + html.H2(children=subtitle), + html.Div(children=description), + html.Div(children=[html.Div(plot) for plot in plots]), + ]) + return imports_layout + + +def get_plots(): + plots = [] + stats_dir = ckg_utils.read_ckg_config(key='stats_directory') + stats_file = os.path.join(stats_dir, "stats.hdf") + if os.path.exists(stats_file): + stats_df = imports.get_stats_data(stats_file, n=3) + plots.append(imports.plot_total_number_imported(stats_df, 'Number of imported entities and relationships')) + plots.append(imports.plot_total_numbers_per_date(stats_df, 'Imported entities vs relationships')) + plots.append( + imports.plot_databases_numbers_per_date(stats_df, 'Full imports: entities/relationships per database', + key='full', dropdown=True, dropdown_options='dates')) + plots.append( + imports.plot_databases_numbers_per_date(stats_df, 'Partial imports: entities/relationships per database', + key='partial', dropdown=True, dropdown_options='dates')) + plots.append( + imports.plot_import_numbers_per_database(stats_df, 'Full imports: Breakdown entities/relationships', + key='full', + subplot_titles=( + 'Entities imported', 'Relationships imported', 'File size', + 'File size'), colors=True, plots_1='entities', + plots_2='relationships', + dropdown=True, dropdown_options='databases')) + plots.append( + imports.plot_import_numbers_per_database(stats_df, 'Partial imports: Breakdown entities/relationships', + key='partial', subplot_titles=( + 'Entities imported', 'Relationships imported', 'File size', 'File size'), colors=True, + plots_1='entities', + plots_2='relationships', dropdown=True, + dropdown_options='databases')) + else: + plots.append(viz.get_markdown(text="# There are no statistics about recent imports.")) + return plots diff --git a/ckg/report_manager/pages/initialPage.py b/ckg/report_manager/pages/initialPage.py new file mode 100644 index 00000000..9e003e01 --- /dev/null +++ b/ckg/report_manager/pages/initialPage.py @@ -0,0 +1,15 @@ +import dash +from dash import html + +title = "Welcome to the app" +subtitle = "You are successfully authorized" +description = "" + +dash.register_page(__name__, path='/apps/initial', title=f"{title} - {subtitle}", description=description) + + +def layout(): + inital_layout = [html.H1(children=title), + html.H2(children=subtitle), + html.Div(children=description)] + return inital_layout diff --git a/ckg/report_manager/pages/loginPage.py b/ckg/report_manager/pages/loginPage.py new file mode 100644 index 00000000..a4150c15 --- /dev/null +++ b/ckg/report_manager/pages/loginPage.py @@ -0,0 +1,22 @@ +import dash +from dash import html, dcc + +title = "CKG login" +subtitle = "" +description = "" + +dash.register_page(__name__, path="/apps/loginPage", title=f"{title} - {subtitle}", description=description) + + +def layout(): + login_layout = html.Div(children=[ + html.H1(children=title), + html.H2(children=subtitle), + html.Div(children=description), + + html.Div([html.Form([ + dcc.Input(placeholder='username', name='username', type='text', id="username-box"), + dcc.Input(placeholder='password', name='password', type='password', id="password-box"), + html.Button('Login', type='submit')], action='/apps/login', method='post', id="login-button")]) + ]) + return login_layout diff --git a/ckg/report_manager/pages/logoutPage.py b/ckg/report_manager/pages/logoutPage.py new file mode 100644 index 00000000..5fe10dfc --- /dev/null +++ b/ckg/report_manager/pages/logoutPage.py @@ -0,0 +1,14 @@ +import dash +from dash import html, dcc + +dash.register_page(__name__, path="/apps/logoutPage") + + +def layout(): + return html.Div( + [ + html.Div(html.H2("You have been logged out.")), + html.Br(), + dcc.Link("Login", href="/apps/loginPage"), + ] + ) diff --git a/ckg/report_manager/pages/projectCreationPage.py b/ckg/report_manager/pages/projectCreationPage.py new file mode 100644 index 00000000..ba63d2ff --- /dev/null +++ b/ckg/report_manager/pages/projectCreationPage.py @@ -0,0 +1,298 @@ +import base64 +import time + +import dash +import flask +import numpy as np +import pandas as pd +from dash import dcc +from dash import html +from dash.dependencies import Input, Output, State + +from ckg.graphdb_builder import builder_utils +from ckg.graphdb_connector import connector +from ckg.report_manager.apps import projectCreation +from ckg.report_manager.worker import create_new_project + +title = "Project Creation" +subtitle = "" +description = "" + +dash.register_page(__name__, path='/apps/projectCreationApp', title=f"{title} - {subtitle}", description=description) + +DataTypes = ['clinical', 'proteomics', + 'interactomics', 'phosphoproteomics', + 'longitudinal_proteomics', 'longitudinal_clinical'] + + +def layout(): + session_cookie = flask.request.cookies.get('custom-auth-session') + logged_in = session_cookie is not None + if logged_in == False: + return html.Div(["Please ", dcc.Link("login", href="/apps/loginPage"), " to continue"]) + + data = get_data() + + if data == None: + database_offline_error_layout = [html.Div(children=html.H1("Database is offline", className='error_msg'))] + return database_offline_error_layout + else: + users, tissues, diseases = data + project_creation_layout = [html.Div([ + html.H1(children=title), + html.H2(children=subtitle), + html.Div(children=description), + html.Div([html.H4('Project information', + style={'width': '15.5%', 'verticalAlign': 'top', 'display': 'inline-block'}), + html.H4('', id='update_project_id', + style={'width': '15%', 'verticalAlign': 'top', 'display': 'none'}), + html.Br(), + html.Div(children=[html.Label('Project name:*', style={'marginTop': 15}), + dcc.Input(id='project name', placeholder='Insert name...', type='text', + style={'width': '100%', 'height': '35px'})], + style={'width': '100%'}), + html.Br(), + html.Div(children=[html.Label('Project Acronym:', style={'marginTop': 15}), + dcc.Input(id='project acronym', placeholder='Insert name...', type='text', + style={'width': '100%', 'height': '35px'})], + style={'width': '100%'}), + html.Br(), + html.Div(children=[html.Label('Project Responsible:*', style={'marginTop': 15})], + style={'width': '49%', 'verticalAlign': 'top', 'display': 'inline-block'}), + html.Div(children=[html.Label('Project Participants:*', style={'marginTop': 15})], + style={'width': '49%', 'marginLeft': '2%', 'verticalAlign': 'top', + 'display': 'inline-block'}), + html.Div(children=[ + dcc.Dropdown(id='responsible-picker', options=[{'label': i, 'value': i} for i in users], + value=[], multi=True, searchable=True, style={'width': '100%'})], + style={'width': '49%', 'verticalAlign': 'top', 'display': 'inline-block'}), + html.Div(children=[ + dcc.Dropdown(id='participant-picker', options=[{'label': i, 'value': i} for i in users], + value=[], multi=True, searchable=True, style={'width': '100%'})], + style={'width': '49%', 'marginLeft': '2%', 'verticalAlign': 'top', + 'display': 'inline-block'}), + html.Br(), + html.Br(), + html.Div(children=[html.Label('Project Data Types:*', style={'marginTop': 10})], + style={'width': '49%', 'marginLeft': '0%', 'verticalAlign': 'top', + 'display': 'inline-block'}), + html.Div(children=[html.Label('Project Disease:*', style={'marginTop': 10})], + style={'width': '49%', 'marginLeft': '2%', 'verticalAlign': 'top', + 'display': 'inline-block'}), + html.Div(children=[ + dcc.Dropdown(id='data-types-picker', options=[{'label': i, 'value': i} for i in DataTypes], + value=[], multi=True, searchable=True, style={'width': '100%'})], + style={'width': '49%', 'marginLeft': '0%', 'verticalAlign': 'top', + 'display': 'inline-block'}), + html.Div(children=[ + dcc.Dropdown(id='disease-picker', options=[{'label': i, 'value': i} for i in diseases], + value=[], multi=True, searchable=True, style={'width': '100%'})], + style={'width': '49%', 'marginLeft': '2%', 'verticalAlign': 'top', + 'display': 'inline-block'}), + html.Br(), + html.Br(), + html.Div(children=[html.Label('Project Tissue:*', style={'marginTop': 10})], + style={'width': '49%', 'marginLeft': '0%', 'verticalAlign': 'top', + 'display': 'inline-block'}), + html.Div(children=[html.Label('Project Intervention:', style={'marginTop': 10})], + style={'width': '49%', 'marginLeft': '2%', 'verticalAlign': 'top', + 'display': 'inline-block'}), + html.Div(children=[ + dcc.Dropdown(id='tissue-picker', options=[{'label': i, 'value': i} for i in tissues], + value=[], multi=True, searchable=True, style={'width': '100%'})], + style={'width': '49%', 'marginLeft': '0%', 'verticalAlign': 'top', + 'display': 'inline-block'}), + html.Div(children=[dcc.Input(id='intervention-picker', + placeholder='E.g. SNOMED identifier|SNOMED identifier|...', + type='text', style={'width': '100%', 'height': '54px'})], + style={'width': '49%', 'marginLeft': '2%', 'verticalAlign': 'top', + 'display': 'inline-block'}), + html.Br(), + html.Br(), + html.Div(children=[html.Label('Timepoints:', style={'marginTop': 15}), + dcc.Input(id='number_timepoints', + placeholder='E.g. 2 months|15 days|24 hours...', type='text', + style={'width': '100%', 'height': '35px'})], + style={'width': '49%', 'marginLeft': '0%', 'verticalAlign': 'top', + 'display': 'inline-block'}), + html.Br(), + html.Br(), + html.Div(children=[html.Label('Follows up project:', style={'marginTop': 15}), + dcc.Input(id='related_to', placeholder='Use the Project Identifier (P000000X)', + type='text', style={'width': '100%', 'height': '35px'})], + style={'width': '49%', 'marginLeft': '0%', 'verticalAlign': 'top', + 'display': 'inline-block'}), + html.Br(), + html.Br(), + html.Div(children=[html.Label('Project Description:', style={'marginTop': 15}), + dcc.Textarea(id='project description', placeholder='Enter description...', + style={'width': '100%', 'height': '100px'})]), + html.Br(), + html.Div(children=[html.Label('Starting Date:', style={'marginTop': 10}), + dcc.DatePickerSingle(id='date-picker-start', placeholder='Select date...', + clearable=True)], + style={'width': '30%', 'verticalAlign': 'top', 'marginTop': 10, + 'display': 'inline-block'}), + html.Div(children=[html.Label('Ending Date:', style={'marginTop': 10}), + dcc.DatePickerSingle(id='date-picker-end', placeholder='Select date...', + clearable=True)], + style={'width': '30%', 'verticalAlign': 'top', 'marginTop': 10, + 'display': 'inline-block'}), + html.Div(children=html.Button('Create Project', id='project_button', n_clicks=0, + className="button_link"), + style={'width': '100%', 'padding-left': '87%', 'padding-right': '0%'}), + html.Br(), + html.Div(children=[html.A( + children=html.Button('Download Clinical Data template', id='download_button', n_clicks=0, + style={'fontSize': '16px', 'display': 'block'}), + id='download_link', href='', n_clicks=0)], + style={'width': '100%', 'padding-left': '87%', 'padding-right': '0%'}), + html.Br(), + html.Div(children=[html.H1(id='project-creation')]), + html.Br()]), + html.Hr()])] + + return project_creation_layout + + +def get_data(): + driver = connector.getGraphDatabaseConnectionConfiguration() + if driver is not None: + try: + users = [] + tissues = [] + diseases = [] + user_nodes = connector.find_nodes(driver, node_type='User') + tissue_nodes = connector.find_nodes(driver, node_type='Tissue') + disease_nodes = connector.find_nodes(driver, node_type='Disease') + for user in user_nodes: + users.append((user['n']['name'])) + for tissue in tissue_nodes: + tissues.append((tissue['n']['name'])) + for disease in disease_nodes: + diseases.append((disease['n']['name'])) + return users, tissues, diseases + except Exception as e: + print(f"Error getting data: {e}") + return None + + +def image_formatter(im): + data_im = base64.b64encode(im).decode('ascii') + return f'' + + +@dash.callback([Output('project-creation', 'children'), + Output('update_project_id', 'children'), + Output('update_project_id', 'style'), + Output('download_button', 'style')], + [Input('project_button', 'n_clicks')], + [State('project name', 'value'), + State('project acronym', 'value'), + State('responsible-picker', 'value'), + State('participant-picker', 'value'), + State('data-types-picker', 'value'), + State('number_timepoints', 'value'), + State('related_to', 'value'), + State('disease-picker', 'value'), + State('tissue-picker', 'value'), + State('intervention-picker', 'value'), + # State('number_subjects', 'value'), + State('project description', 'value'), + State('date-picker-start', 'date'), + State('date-picker-end', 'date')]) +def create_project(n_clicks, name, acronym, responsible, participant, datatype, timepoints, related_to, disease, tissue, + intervention, description, start_date, end_date): + config = builder_utils.setup_config('builder') + separator = config["separator"] + if n_clicks > 0: + session_cookie = flask.request.cookies.get('custom-auth-session') + responsible = separator.join(responsible) + participant = separator.join(participant) + datatype = separator.join(datatype) + disease = separator.join(disease) + tissue = separator.join(tissue) + arguments = [name, datatype, disease, tissue, responsible] + driver = connector.getGraphDatabaseConnectionConfiguration() + + if driver is not None: + # Check if clinical variables exist in the database + if intervention is not None: + intervention = intervention.strip() + if intervention != '': + interventions = list() + exist = dict() + for i in intervention.split(separator): + res = projectCreation.check_if_node_exists(driver, 'Clinical_variable', 'id', i) + if res.empty: + exist[i] = True + else: + exist[i] = False + interventions.append('{} ({})'.format(res['n.name'][0], i)) + intervention = separator.join(interventions) + + if any(exist.values()): + response = 'The intervention(s) "{}" specified does(do) not exist.'.format( + ', '.join([k for k, n in exist.items() if n == True])) + return response, None, {'display': 'none'}, {'display': 'none'} + + if any(not arguments[n] for n, i in enumerate(arguments)): + response = "Insufficient information to create project. Fill in all fields with '*'." + return response, None, {'display': 'none'}, {'display': 'none'} + + # Get project data from filled-in fields + projectData = pd.DataFrame( + [name, acronym, description, related_to, datatype, timepoints, disease, tissue, intervention, + responsible, participant, start_date, end_date]).T + projectData.columns = ['name', 'acronym', 'description', 'related_to', 'datatypes', 'timepoints', 'disease', + 'tissue', 'intervention', 'responsible', 'participant', 'start_date', 'end_date'] + projectData['status'] = '' + + projectData.fillna(value=pd.np.nan, inplace=True) + projectData.replace('', np.nan, inplace=True) + + # Generate project internal identifier bsed on timestamp + # Excel file is saved in folder with internal id name + epoch = time.time() + internal_id = "%s%d" % ("CP", epoch) + projectData.insert(loc=0, column='internal_id', value=internal_id) + result = create_new_project.apply_async(args=[internal_id, projectData.to_json(), separator], + task_id='project_creation_' + session_cookie + internal_id, + queue='creation') + result_output = result.get() + if len(result_output) > 0: + external_id = list(result_output.keys())[0] + done_msg = result_output[external_id] + if external_id != '' and done_msg is not None: + response = "Project successfully submitted. Download Clinical Data template." + elif done_msg is None: + response = "There was a problem when creating the project. Please, contact the administrator." + else: + response = 'A project with the same name already exists in the database.' + else: + response = "There was a problem when creating the project. Please, try again or contact the administrator." + external_id = response + else: + response = "The Database is temporarily offline. Contact your administrator or start the datatabase." + + return response, '- ' + external_id, {'display': 'inline-block'}, {'display': 'block'} + else: + return None, None, {'display': 'none'}, {'display': 'none'} + + +@dash.callback(Output('project-creation', 'style'), + [Input('project-creation', 'children')]) +def change_style(style): + if style is not None and 'successfully' in style: + return {'fontSize': '20px', 'marginLeft': '70%', 'color': 'black'} + else: + return {'fontSize': '20px', 'marginLeft': '70%', 'color': 'red'} + + +@dash.callback(Output('download_link', 'href'), + [Input('update_project_id', 'children')]) +def update_download_link(project): + if project is not None and project != '': + return '/apps/templates{}'.format('Design_and_Clinical_templates') + else: + return '' diff --git a/ckg/report_manager/pages/projectPage.py b/ckg/report_manager/pages/projectPage.py new file mode 100644 index 00000000..0027eb46 --- /dev/null +++ b/ckg/report_manager/pages/projectPage.py @@ -0,0 +1,178 @@ +import base64 +import os +import shutil +from datetime import datetime +from uuid import uuid4 + +import dash +from dash import html, dcc, Output, Input, State + +from ckg import ckg_utils +from ckg.report_manager import project +from ckg.report_manager.worker import generate_project_report + +title = "Project details" +subtitle = "You are successfully authorized" +description = "" + +dash.register_page(__name__, path='/apps/project', title=f"{title} - {subtitle}", description=description) + + +def layout(project_id="P0000001", force=0): + print(project_id) + print(force) + + session_id = project_id + datetime.now().strftime('%Y%m-%d%H-%M%S-') + str(uuid4()) + + # inital_layout = [html.H1(children=title), + # html.H2(children=subtitle), + # html.Div(children=description)] + project_layout = build_page(project_id, force, session_id) + return project_layout + + +def build_page(project_id, force, session_id): + """ + Builds project and generates the report. + For each data type in the report (e.g. 'proteomics', 'clinical'), \ + creates a designated tab. + A button to download the entire project and report is added. + """ + print("Build page") + config_files = {} + tmp_dir = ckg_utils.read_ckg_config(key='tmp_directory') + if os.path.exists(tmp_dir): + directory = os.path.join(tmp_dir, project_id) + if os.path.exists(directory): + config_files = {f.split('.')[0]: os.path.join(directory, f) for f in os.listdir(directory) if + os.path.isfile(os.path.join(directory, f))} + + print("Finished zip") + + result = generate_project_report.apply_async(args=[project_id, config_files, force], + task_id='generate_report' + session_id, queue='compute') + result_output = result.get() + print("Project result") + print(result_output == None) + print(result_output) + + p = project.Project(project_id, datasets={}, knowledge=None, report={}, configuration_files=config_files) + p.build_project(False) + + print("Get project") + + # TODO: fix this + if p.name is not None: + title = "Project: {}".format(p.name) + else: + title = '' + plots = p.show_report("app") + print("Plots:") + print(plots) + p = None + tabs = [] + buttons = build_header(project_id, session_id) + print("build header") + + layout = [] + layout.append(buttons) + for data_type in plots: + if len(plots[data_type]) >= 1: + tab_content = [html.Div(plots[data_type])] + tab = dcc.Tab(tab_content, label=data_type) + tabs.append(tab) + lc = dcc.Tabs(tabs) + layout.append(lc) + print(layout) + return layout + + +def build_header(project_id, session_id): + buttons = html.Div([html.Div([html.A('Download Project Report', + id='download-zip', + href=f"/downloads/{project_id}", + target="_blank", + n_clicks=0, + className="button_link" + )]), + html.Div([html.A("Regenerate Project Report", + id='regenerate', + title=project_id, + # TODO: {basic_path}? + # basic_path = '/'.join(pathname.split('/')[0:3]) + href=f"/apps/project?project_id={project_id}&force=1&session={session_id}", + target='', + n_clicks=0, + className="button_link")]), + html.Div([html.H3("Change Analysis' Configuration: "), + dcc.Dropdown( + id='my-dropdown', + options=[ + {'label': '', 'value': project_id + '/defaults'}, + {'label': 'Proteomics configuration', 'value': project_id + '/proteomics'}, + {'label': 'Interactomics configuration', + 'value': project_id + '/interactomics'}, + {'label': 'Phosphoproteomics configuration', + 'value': project_id + '/phosphoproteomics'}, + {'label': 'Clinical data configuration', 'value': project_id + '/clinical'}, + {'label': 'Multiomics configuration', 'value': project_id + '/multiomics'}, + {'label': 'Reset to defaults', 'value': project_id + '/reset'}], + value=project_id + '/defaults', + clearable=False, + style={'width': '50%', 'margin-bottom': '10px'}), + dcc.Upload(id='upload-config', + children=html.Div(['Drag and Drop or ', + html.A('Select Files')]), + max_size=-1, + multiple=False), + html.Div(id='output-data-upload')]) + ]) + + return buttons + + +@dash.callback([Output('upload-config', 'style'), + Output('output-data-upload', 'children'), + Output('upload-config', 'filename')], + [Input('upload-config', 'contents'), + Input('my-dropdown', 'value')], + [State('upload-config', 'filename')]) +def update_output(contents, value, fname): + ckg_config = ckg_utils.read_ckg_config() + display = {'display': 'none'} + uploaded = None + if value is not None: + page_id, dataset = value.split('/') + if not os.path.exists(ckg_config['tmp_directory']): + os.makedirs(ckg_config['tmp_directory']) + directory = os.path.join(ckg_config['tmp_directory'], page_id) + if dataset != "defaults": + display = {'width': '50%', + 'height': '60px', + 'lineHeight': '60px', + 'borderWidth': '2px', + 'borderStyle': 'dashed', + 'borderRadius': '15px', + 'textAlign': 'center', + 'margin-bottom': '20px', + 'display': 'block'} + if not os.path.exists(directory): + os.makedirs(directory) + + if fname is None: + contents = None + if contents is not None: + with open(os.path.join(directory, dataset + '.yml'), 'wb') as out: + content_type, content_string = contents.split(',') + decoded = base64.b64decode(content_string) + out.write(decoded) + uploaded = dcc.Markdown("**{} configuration uploaded: {}** ✅".format(dataset.title(), fname)) + fname = None + contents = None + else: + uploaded = None + elif dataset == 'reset': + display = {'display': 'none'} + if os.path.exists(directory): + shutil.rmtree(directory) + return display, uploaded, fname diff --git a/ckg/report_manager/report.py b/ckg/report_manager/report.py index 7ad75969..0080e427 100644 --- a/ckg/report_manager/report.py +++ b/ckg/report_manager/report.py @@ -1,17 +1,18 @@ +import json import os.path -import pandas as pd -import plotly.io as pio +from collections import defaultdict + import h5py as h5 -import json import natsort -import dash_html_components as html +import pandas as pd +import plotly.io as pio +from dash import html from plotly.offline import iplot -from collections import defaultdict -from cyjupyter import Cytoscape -from ckg.report_manager import utils + from ckg import ckg_utils -from ckg.analytics_core.viz import viz from ckg.analytics_core import utils as acore_utils +from ckg.analytics_core.viz import viz +from ckg.report_manager import utils class Report: @@ -89,7 +90,7 @@ def save_report(self, directory): json_str_edges = ckg_utils.convert_dash_to_json(plot['net_tables_viz'][1]) figure_json["net_tables_viz"] = (json_str_nodes, json_str_edges) figure_json = json.dumps(figure_json, cls=ckg_utils.NumpyEncoder) - figure_id = str(i)+'_net' + figure_id = str(i) + '_net' else: json_str = ckg_utils.convert_dash_to_json(plot) figure_json = json.dumps(json_str, cls=ckg_utils.NumpyEncoder) @@ -110,8 +111,8 @@ def read_report(self, directory): for name in f: plot_id = name.split('~') for figure_id in f[name]: - figure_json = f[name+"/"+figure_id][0] - identifier = f[name+"/"+figure_id].attrs["identifier"] + figure_json = f[name + "/" + figure_id][0] + identifier = f[name + "/" + figure_id].attrs["identifier"] if 'net' in identifier: figure = {} net_json = json.loads(figure_json) @@ -200,25 +201,29 @@ def download_report(self, directory): figure_name = name + "_" + str(i) i += 1 if "net_json" in plot: - with open(os.path.join(directory, name+'.json'), 'w') as out: + with open(os.path.join(directory, name + '.json'), 'w') as out: out.write(json.dumps(plot["net_json"], cls=ckg_utils.NumpyEncoder)) try: - acore_utils.json_network_to_gml(plot["net_json"], os.path.join(directory, name+".gml")) + acore_utils.json_network_to_gml(plot["net_json"], os.path.join(directory, name + ".gml")) except Exception: pass if "net_tables" in plot: nodes_table, edges_table = plot['net_tables'] if isinstance(nodes_table, pd.DataFrame): - nodes_table.to_csv(os.path.join(directory, name+'_node_table.tsv'), sep='\t', header=True, index=False, doublequote=False) + nodes_table.to_csv(os.path.join(directory, name + '_node_table.tsv'), sep='\t', + header=True, index=False, doublequote=False) if isinstance(edges_table, pd.DataFrame): - edges_table.to_csv(os.path.join(directory, name+'_edges_table.tsv'), sep='\t', header=True, index=False, doublequote=False) + edges_table.to_csv(os.path.join(directory, name + '_edges_table.tsv'), sep='\t', + header=True, index=False, doublequote=False) if "app" in plot: plot = plot["app"] if 'props' in plot: if 'figure' in plot['props']: try: - viz.save_DASH_plot(plot['props']['figure'], name=figure_name, plot_format='svg', directory=directory) - viz.save_DASH_plot(plot['props']['figure'], name=figure_name, plot_format='png', directory=directory) + viz.save_DASH_plot(plot['props']['figure'], name=figure_name, plot_format='svg', + directory=directory) + viz.save_DASH_plot(plot['props']['figure'], name=figure_name, plot_format='png', + directory=directory) saved.add(figure_name) except Exception: pass diff --git a/ckg/report_manager/utils.py b/ckg/report_manager/utils.py index ed884c7c..9d9501fe 100644 --- a/ckg/report_manager/utils.py +++ b/ckg/report_manager/utils.py @@ -1,20 +1,19 @@ -import os -import pandas as pd -import dash_html_components as html -import dash_core_components as dcc -from ckg.graphdb_connector import connector -from datetime import datetime -import bs4 as bs -import random -import numpy as np -import chart_studio.plotly as py import base64 -from xhtml2pdf import pisa import json -from urllib import request +import os +import random import shutil import smtplib +from datetime import datetime from email.message import EmailMessage +from urllib import request + +import bs4 as bs +import chart_studio.plotly as py +from dash import dcc +from dash import html +from xhtml2pdf import pisa + from ckg import ckg_utils @@ -28,25 +27,26 @@ def send_message_to_slack_webhook(message, message_to, username='albsantosdel'): if os.path.exists(webhook_file): with open(webhook_file, 'r') as hf: webhook_url = hf.read() - post = {"text": "@{} : {}".format(message_to, message), "username": username, "icon_url": "https://slack.com/img/icons/app-57.png"} + post = {"text": "@{} : {}".format(message_to, message), "username": username, + "icon_url": "https://slack.com/img/icons/app-57.png"} try: json_data = json.dumps(post) req = request.Request(webhook_url, - data=json_data.encode('ascii'), - headers={'Content-Type': 'application/json'}) + data=json_data.encode('ascii'), + headers={'Content-Type': 'application/json'}) resp = request.urlopen(req) except Exception as em: print("EXCEPTION: " + str(em)) def send_email(message, subject, message_from, message_to): - msg = EmailMessage() + msg = EmailMessage() msg.set_content(message) msg['Subject'] = subject msg['From'] = message_from msg['to'] = message_to - s = smtplib.SMTP('localhost:1025') + s = smtplib.SMTP('localhost:1025') s.send_message(msg) s.quit() @@ -54,12 +54,14 @@ def send_email(message, subject, message_from, message_to): def compress_directory(name, directory, compression_format='zip'): try: if os.path.exists(directory) and os.path.isdir(directory): - if os.path.exists(directory+'.zip'): - os.remove(directory+'.zip') + if os.path.exists(directory + '.zip'): + os.remove(directory + '.zip') shutil.make_archive(name, compression_format, directory) shutil.rmtree(directory) else: - print("The directory {} failed. Exists?{} Is dir?{}".format(directory, os.path.exists(directory) and os.path.isdir(directory))) + print("The directory {} failed. Exists?{} Is dir?{}".format(directory, + os.path.exists(directory) and os.path.isdir( + directory))) except Exception as err: print("Could not compress file {} in directory {}. Error: {}".format(name, directory, err)) @@ -94,20 +96,20 @@ def get_image(figure, width, height): def parse_html(html_snippet): html_parsed = bs.BeautifulSoup(html_snippet) - return html_parsed + return html_parsed def hex2rgb(color): hex = color.lstrip('#') - rgb = tuple(int(hex[i:i+2], 16) for i in (0, 2, 4)) + rgb = tuple(int(hex[i:i + 2], 16) for i in (0, 2, 4)) rgba = rgb + (0.6,) return rgba def getNumberText(num): numbers = ["zero", "one", "two", "three", "four", "five", "six", "seven", "eight", - "nine", "ten", "eleven", "twelve", "thirteen", "fourteen", "fifteen", - "sixteen", "seventeen", "eighteen", "nineteen"] + "nine", "ten", "eleven", "twelve", "thirteen", "fourteen", "fifteen", + "sixteen", "seventeen", "eighteen", "nineteen"] if len(numbers) > num: return numbers[num] else: @@ -148,11 +150,11 @@ def convert_html_to_pdf(source_html, output_filename): # convert HTML to PDF pisa_status = pisa.CreatePDF( - source_html, # the HTML to convert - dest=result_file) # file handle to recieve result + source_html, # the HTML to convert + dest=result_file) # file handle to recieve result # close output file - result_file.close() # close output file + result_file.close() # close output file # return True on success and False on errors - return pisa_status.err \ No newline at end of file + return pisa_status.err diff --git a/requirements.txt b/requirements.txt index e479cb6f..5852fd05 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,60 +1,212 @@ +alabaster==0.7.12 +amqp==5.1.1 +appdirs==1.4.4 +argon2-cffi==21.3.0 +argon2-cffi-bindings==21.2.0 +async-timeout==4.0.2 +attrs==22.1.0 +autograd==1.5 +autograd-gamma==0.5.0 +Babel==2.10.3 +backcall==0.2.0 +bcrypt==3.1.7 +beautifulsoup4==4.7.1 +billiard==3.6.4.0 +biopython==1.73 +bioservices==1.10.1 +bleach==5.0.1 +Brotli==1.0.9 +cached-property==1.5.2 +cattrs==22.1.0 +celery==5.2.7 +certifi==2022.9.24 +cffi==1.15.1 +chardet==3.0.4 +chart-studio==1.0.0 +CKG==1.0.0 +click==8.1.3 +click-didyoumean==0.3.0 +click-plugins==1.1.1 +click-repl==0.2.0 +colorama==0.4.5 +colorlog==6.7.0 +combat==0.2.0 +cvxpy==1.1.7 +cycler==0.11.0 +cyjupyter==0.2.0 +dash==2.7.0 +dash-core-components==2.0.0 +dash-cytoscape==0.3.0 +dash-html-components==2.0.0 +dash-table==5.0.0 +debugpy==1.6.3 +decorator==5.1.1 +defusedxml==0.7.1 +Deprecated==1.2.13 +dnspython==2.2.1 +docutils==0.15.2 +easydev==0.12.0 +ecos==2.0.10 +entrypoints==0.4 +et-xmlfile==1.1.0 +eventlet==0.25.1 +exceptiongroup==1.0.0rc9 +fastjsonschema==2.16.2 +Flask==2.2.2 +Flask-Compress==1.13 +fonttools==4.37.3 +future==0.18.2 +gevent==21.12.0 +greenlet==1.1.3 +grequests==0.6.0 +gseapy==0.9.19 +h5py==2.10.0 +html5lib==1.1 +idna==2.8 +imagesize==1.4.1 +importlib-metadata==4.12.0 +importlib-resources==5.9.0 +iniconfig==1.1.1 +ipykernel==6.16.0 +ipython==7.34.0 +ipython-genutils==0.2.0 +ipywidgets==8.0.2 +itsdangerous==2.0.1 +jdcal==1.4.1 +jedi==0.18.1 +Jinja2==3.1.2 +joblib==1.2.0 +jsonschema==4.16.0 +jupyter==1.0.0 +jupyter-console==6.4.4 +jupyter-core==4.11.1 +jupyter_client==7.3.5 +jupyterlab-pygments==0.2.2 +jupyterlab-widgets==3.0.3 +kaleido==0.0.1 +kiwisolver==1.4.4 +kmapper==1.2.0 +kombu==5.2.4 +lifelines==0.23.1 +littleutils==0.2.2 +llvmlite==0.39.1 +lxml==4.3.4 +MarkupSafe==2.1.1 +matplotlib==3.5.3 +matplotlib-inline==0.1.6 +mistune==0.8.4 +monotonic==1.6 +mpmath==1.2.1 +natsort==6.0.0 +nbclient==0.5.13 +nbconvert==6.4.5 +nbformat==5.6.1 +nbsphinx==0.8.9 +neo4j==4.2.0 +nest-asyncio==1.5.6 +networkx==2.5 +nltk==3.5 +notebook==6.4.12 +numba==0.56.2 +numexpr==2.8.3 +numpy==1.19.5 +obonet==0.2.5 +openpyxl==3.0.1 +osqp==0.6.2.post5 +outdated==0.2.1 +packaging==21.3 pandas==0.24.2 -Flask==1.0.3 +pandas-flavor==0.2.0 +pandocfilters==1.5.0 +parso==0.8.3 passlib==1.7.1 -neo4j==4.2.0 +patsy==0.5.2 +pexpect==4.8.0 +pickleshare==0.7.5 +Pillow==9.2.0 +pingouin==0.3.12 +pkgutil_resolve_name==1.3.10 +plotly==5.10.0 +pluggy==1.0.0 +prometheus-client==0.14.1 +prompt-toolkit==3.0.31 +psutil==5.9.2 +ptyprocess==0.7.0 +py==1.11.0 +pycparser==2.21 +Pygments==2.13.0 +pynndescent==0.5.7 +pyparsing==3.0.9 +PyPDF2==2.11.0 +pyrsistent==0.18.1 +pytest==7.1.3 +python-dateutil==2.8.2 +python-louvain==0.13 +pytz==2022.2.1 +pyvis==0.1.7.0 PyYAML==5.1.1 -wget==3.2 -requests==2.22.0 -biopython==1.73 -obonet==0.2.5 +pyzmq==24.0.1 +qdldl==0.1.5.post2 +qtconsole==5.3.2 +QtPy==2.2.0 rarfile==3.1 -dash==1.2.0 -redis>=3.2.1 -matplotlib>=3.1.1 +redis==4.3.4 +regex==2022.9.13 +reportlab==3.6.6 +requests==2.22.0 +requests-cache==0.9.6 +retrying==1.3.3 +rpy2==3.0.5 +rst2pdf==0.98 +scikit-learn==1.0.2 scipy==1.4.1 -cyjupyter==0.2.0 -pyvis==0.1.7.0 -tzlocal==1.5.1 -webweb==0.0.37 -beautifulsoup4==4.7.1 +scs==3.2.0 +seaborn==0.11.2 +Send2Trash==1.8.0 +simplegeneric==0.8.1 +six==1.16.0 sklearn==0.0 -statsmodels==0.10.0 -umap-learn>=0.3.9 -pingouin>=0.3.10 -python-louvain==0.13 +smartypants==2.0.1 snfpy==0.2.2 -kmapper==1.2.0 -natsort==6.0.0 -mpmath==1.2.1 -combat==0.2.0 -lifelines==0.23.1 -cvxpy==1.1.7 +snowballstemmer==2.2.0 +soupsieve==2.3.2.post1 +Sphinx==2.3.1 +sphinxcontrib-applehelp==1.0.2 +sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-htmlhelp==2.0.0 +sphinxcontrib-jsmath==1.0.1 +sphinxcontrib-qthelp==1.0.3 +sphinxcontrib-serializinghtml==1.1.5 +statsmodels==0.10.0 +suds-community==1.1.2 +tables==3.6.1 +tabulate==0.8.10 +tenacity==8.1.0 +terminado==0.16.0 +testpath==0.6.0 +threadpoolctl==3.1.0 +tomli==2.0.1 +tornado==6.2 +tqdm==4.64.1 +traitlets==5.4.0 +typing_extensions==4.3.0 +tzlocal==1.5.1 +umap-learn==0.5.3 +url-normalize==1.4.3 +urllib3==1.25.11 +vine==5.0.0 +wcwidth==0.2.5 +webencodings==0.5.1 +webweb==0.0.37 +Werkzeug==2.2.2 +wget==3.2 +widgetsnbextension==4.0.3 wordcloud==1.8.1 -nltk==3.5 -dash_cytoscape>=0.1.1 -networkx==2.5 -chart-studio==1.0.0 +wrapt==1.14.1 +xarray==0.14.1 xhtml2pdf==0.2.4 -lxml==4.3.4 -celery==5.0.5 -cffi>=1.14.0 -rpy2==3.0.5; platform_system!="Windows" -h5py==2.10.0 -jupyter>=1.0.0 xlrd==1.2.0 -openpyxl==3.0.1 -bcrypt==3.1.7 -tables==3.6.1 -psutil>=5.6.6 -plotly==4.9.0 -kaleido==0.0.1 -gseapy==0.9.19 -numpy==1.19.5 -docutils==0.15.2 -sphinx==2.3.1 -rst2pdf==0.98 -nbsphinx>=0.8.0 -ipykernel>=5.4.3 -eventlet==0.25.1 -itsdangerous==2.0.1 -reportlab==3.6.6 \ No newline at end of file +xmltodict==0.13.0 +zipp==3.8.1 +zope.event==4.5.0 +zope.interface==5.4.0 diff --git a/setup.py b/setup.py index c77ad790..ee80c676 100644 --- a/setup.py +++ b/setup.py @@ -1,31 +1,37 @@ +from subprocess import check_call + import setuptools -from setuptools import setup from setuptools.command.develop import develop from setuptools.command.install import install -from subprocess import check_call -import ckg.init +import ckg.init with open("README.rst", "r") as fh: long_description = fh.read() + class PreInstallCommand(install): """Pre-installation for install mode.""" + def run(self): + # check_call("pip install -r requirements.txt".split()) check_call("pip install -r requirements.txt".split()) ckg.init.installer_script() install.run(self) - + + class PreDevelopCommand(develop): """Pre-installation for install mode.""" + def run(self): + # check_call("pip install -r requirements.txt".split()) check_call("pip install -r requirements.txt".split()) ckg.init.installer_script() develop.run(self) setuptools.setup( - name="CKG", # Replace with your own username + name="CKG", # Replace with your own username version="1.0.0", author="Alberto Santos Delgado", author_email="alberto.santos@sund.ku.dk", @@ -39,7 +45,7 @@ def run(self): 'install': PreInstallCommand, }, entry_points={'console_scripts': [ - 'ckg_app=ckg.report_manager.index:main', + 'ckg_app=ckg.report_manager.app:main', 'ckg_debug=ckg.debug:main', 'ckg_build=ckg.graphdb_builder.builder.builder:run_full_update', 'ckg_update_textmining=ckg.graphdb_builder.builder.builder:update_textmining']}, From fccdf2a4b67c8e8abcd11e786e3e0eb87f165561 Mon Sep 17 00:00:00 2001 From: Nils Krehl Date: Fri, 4 Nov 2022 16:25:48 +0100 Subject: [PATCH 2/6] clean up --- ckg/ckg_utils.py | 1 - ckg/config/analytics_factory_log.config | 45 ------------------------- ckg/config/ckg_config.yml | 23 ------------- ckg/config/graphdb_builder_log.config | 45 ------------------------- ckg/config/graphdb_connector_log.config | 45 ------------------------- ckg/config/report_manager_log.config | 45 ------------------------- ckg/report_manager/app.py | 2 +- ckg/report_manager/pages/projectPage.py | 17 +++++----- 8 files changed, 9 insertions(+), 214 deletions(-) delete mode 100644 ckg/config/analytics_factory_log.config delete mode 100644 ckg/config/ckg_config.yml delete mode 100644 ckg/config/graphdb_builder_log.config delete mode 100644 ckg/config/graphdb_connector_log.config delete mode 100644 ckg/config/report_manager_log.config diff --git a/ckg/ckg_utils.py b/ckg/ckg_utils.py index ee4e37b4..8c2d388c 100644 --- a/ckg/ckg_utils.py +++ b/ckg/ckg_utils.py @@ -3,7 +3,6 @@ import yaml import json import logging -from pathlib import Path def read_ckg_config(key=None): diff --git a/ckg/config/analytics_factory_log.config b/ckg/config/analytics_factory_log.config deleted file mode 100644 index caca363d..00000000 --- a/ckg/config/analytics_factory_log.config +++ /dev/null @@ -1,45 +0,0 @@ -{ "version": 1, - "disable_existing_loggers": false, - "formatters": { - "simple": { - "format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s" - } - }, - "handlers": { - "console": { - "class": "logging.StreamHandler", - "level": "NOTSET", - "formatter": "simple", - "stream": "ext://sys.stdout" - }, - "info_file_handler": { - "class": "logging.handlers.RotatingFileHandler", - "level": "INFO", - "formatter": "simple", - "filename": "/home/st/Schreibtisch/CKG/log/analytics_factory.log", - "maxBytes": 10485760, - "backupCount": 20, - "encoding": "utf8" - }, - "error_file_handler": { - "class": "logging.handlers.SMTPHandler", - "level": "CRITICAL", - "formatter": "simple", - "mailhost": "localhost", - "fromaddr": "error@ckg.com", - "toaddrs": "alberto.santos@cpr.ku.dk", - "subject": "CKG ERROR - Analytics Factory" - } - }, - "loggers": { - "my_module": { - "level": "ERROR", - "handlers": ["console"], - "propagate": "no" - } - }, - "root":{ - "level": "INFO", - "handlers": ["info_file_handler", "error_file_handler"] - } -} diff --git a/ckg/config/ckg_config.yml b/ckg/config/ckg_config.yml deleted file mode 100644 index 5889eccc..00000000 --- a/ckg/config/ckg_config.yml +++ /dev/null @@ -1,23 +0,0 @@ -version: 1.0 -ckg_directory: "/home/st/Schreibtisch/CKG/ckg" -data_directory: "/home/st/Schreibtisch/CKG/data" -archive_directory: "/home/st/Schreibtisch/CKG/data/archive" -databases_directory: "/home/st/Schreibtisch/CKG/data/databases" -experiments_directory: "/home/st/Schreibtisch/CKG/data/experiments" -ontologies_directory: "/home/st/Schreibtisch/CKG/data/ontologies" -users_directory: "/home/st/Schreibtisch/CKG/data/users" -stats_directory: "/home/st/Schreibtisch/CKG/data/stats" -downloads_directory: "/home/st/Schreibtisch/CKG/data/downloads" -reports_directory: "/home/st/Schreibtisch/CKG/data/reports" -tmp_directory: "/home/st/Schreibtisch/CKG/data/tmp" -imports_directory: "/home/st/Schreibtisch/CKG/data/imports" -imports_databases_directory: "/home/st/Schreibtisch/CKG/data/imports/databases" -imports_experiments_directory: "/home/st/Schreibtisch/CKG/data/imports/experiments" -imports_ontologies_directory: "/home/st/Schreibtisch/CKG/data/imports/ontologies" -imports_users_directory: "/home/st/Schreibtisch/CKG/data/imports/users" -imports_curated_directory: "/home/st/Schreibtisch/CKG/data/imports/curated" -log_directory: "/home/st/Schreibtisch/CKG/log" -report_manager_log: "/home/st/Schreibtisch/CKG/ckg/config/report_manager_log.config" -graphdb_connector_log: "/home/st/Schreibtisch/CKG/ckg/config/graphdb_connector_log.config" -graphdb_builder_log: "/home/st/Schreibtisch/CKG/ckg/config/graphdb_builder_log.config" -analytics_factory_log: "/home/st/Schreibtisch/CKG/ckg/config/analytics_factory_log.config" diff --git a/ckg/config/graphdb_builder_log.config b/ckg/config/graphdb_builder_log.config deleted file mode 100644 index 6d4f9f3c..00000000 --- a/ckg/config/graphdb_builder_log.config +++ /dev/null @@ -1,45 +0,0 @@ -{ "version": 1, - "disable_existing_loggers": false, - "formatters": { - "simple": { - "format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s" - } - }, - "handlers": { - "console": { - "class": "logging.StreamHandler", - "level": "NOTSET", - "formatter": "simple", - "stream": "ext://sys.stdout" - }, - "info_file_handler": { - "class": "logging.handlers.RotatingFileHandler", - "level": "INFO", - "formatter": "simple", - "filename": "/home/st/Schreibtisch/CKG/log/graphdb_builder.log", - "maxBytes": 10485760, - "backupCount": 20, - "encoding": "utf8" - }, - "error_file_handler": { - "class": "logging.handlers.SMTPHandler", - "level": "CRITICAL", - "formatter": "simple", - "mailhost": "localhost", - "fromaddr": "error@ckg.com", - "toaddrs": "alberto.santos@cpr.ku.dk", - "subject": "CKG ERROR - GraphDB builder" - } - }, - "loggers": { - "my_module": { - "level": "ERROR", - "handlers": ["console"], - "propagate": "no" - } - }, - "root":{ - "level": "WARNING", - "handlers": ["info_file_handler", "error_file_handler"] - } -} diff --git a/ckg/config/graphdb_connector_log.config b/ckg/config/graphdb_connector_log.config deleted file mode 100644 index ed13b1e2..00000000 --- a/ckg/config/graphdb_connector_log.config +++ /dev/null @@ -1,45 +0,0 @@ -{ "version": 1, - "disable_existing_loggers": false, - "formatters": { - "simple": { - "format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s" - } - }, - "handlers": { - "console": { - "class": "logging.StreamHandler", - "level": "NOTSET", - "formatter": "simple", - "stream": "ext://sys.stdout" - }, - "info_file_handler": { - "class": "logging.handlers.RotatingFileHandler", - "level": "INFO", - "formatter": "simple", - "filename": "/home/st/Schreibtisch/CKG/log/graphdb_connector.log", - "maxBytes": 10485760, - "backupCount": 20, - "encoding": "utf8" - }, - "error_file_handler": { - "class": "logging.handlers.SMTPHandler", - "level": "CRITICAL", - "formatter": "simple", - "mailhost": "localhost", - "fromaddr": "error@ckg.com", - "toaddrs": "alberto.santos@cpr.ku.dk", - "subject": "CKG ERROR - GraphDB connector" - } - }, - "loggers": { - "my_module": { - "level": "ERROR", - "handlers": ["console"], - "propagate": "no" - } - }, - "root":{ - "level": "WARNING", - "handlers": ["info_file_handler", "error_file_handler"] - } -} diff --git a/ckg/config/report_manager_log.config b/ckg/config/report_manager_log.config deleted file mode 100644 index e94b663c..00000000 --- a/ckg/config/report_manager_log.config +++ /dev/null @@ -1,45 +0,0 @@ -{ "version": 1, - "disable_existing_loggers": false, - "formatters": { - "simple": { - "format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s" - } - }, - "handlers": { - "console": { - "class": "logging.StreamHandler", - "level": "NOTSET", - "formatter": "simple", - "stream": "ext://sys.stdout" - }, - "info_file_handler": { - "class": "logging.handlers.RotatingFileHandler", - "level": "INFO", - "formatter": "simple", - "filename": "/home/st/Schreibtisch/CKG/log/report_manager.log", - "maxBytes": 10485760, - "backupCount": 20, - "encoding": "utf8" - }, - "error_file_handler": { - "class": "logging.handlers.SMTPHandler", - "level": "CRITICAL", - "formatter": "simple", - "mailhost": "localhost", - "fromaddr": "error@ckg.com", - "toaddrs": "alberto.santos@cpr.ku.dk", - "subject": "CKG ERROR - Report manager" - } - }, - "loggers": { - "my_module": { - "level": "ERROR", - "handlers": ["console"], - "propagate": "no" - } - }, - "root":{ - "level": "INFO", - "handlers": ["info_file_handler", "error_file_handler"] - } -} diff --git a/ckg/report_manager/app.py b/ckg/report_manager/app.py index c235ed1b..15b5ec91 100644 --- a/ckg/report_manager/app.py +++ b/ckg/report_manager/app.py @@ -19,7 +19,7 @@ cwd = os.path.dirname(os.path.abspath(__file__)) assets_path = os.path.join(cwd, 'assets') os.chdir(cwd) -pages_path = "./pages" # os.path.join(cwd, 'pages') +pages_path = "./pages" app = dash.Dash("app", server=server, assets_folder=assets_path, external_stylesheets=[assets_path + "custom.css"], meta_tags=[{"name": "viewport", "content": "width=device-width, initial-scale=1"}], use_pages=True, pages_folder=pages_path) diff --git a/ckg/report_manager/pages/projectPage.py b/ckg/report_manager/pages/projectPage.py index 0027eb46..de467b9d 100644 --- a/ckg/report_manager/pages/projectPage.py +++ b/ckg/report_manager/pages/projectPage.py @@ -12,7 +12,7 @@ from ckg.report_manager.worker import generate_project_report title = "Project details" -subtitle = "You are successfully authorized" +subtitle = "" description = "" dash.register_page(__name__, path='/apps/project', title=f"{title} - {subtitle}", description=description) @@ -61,17 +61,15 @@ def build_page(project_id, force, session_id): print("Get project") - # TODO: fix this if p.name is not None: title = "Project: {}".format(p.name) - else: - title = '' + plots = p.show_report("app") print("Plots:") print(plots) p = None tabs = [] - buttons = build_header(project_id, session_id) + buttons = build_header(project_id, session_id, title) print("build header") layout = [] @@ -87,8 +85,11 @@ def build_page(project_id, force, session_id): return layout -def build_header(project_id, session_id): - buttons = html.Div([html.Div([html.A('Download Project Report', +def build_header(project_id, session_id, title): + buttons = html.Div([html.H1(children=title), + html.H2(children=subtitle), + html.Div(children=description), + html.Div([html.A('Download Project Report', id='download-zip', href=f"/downloads/{project_id}", target="_blank", @@ -98,8 +99,6 @@ def build_header(project_id, session_id): html.Div([html.A("Regenerate Project Report", id='regenerate', title=project_id, - # TODO: {basic_path}? - # basic_path = '/'.join(pathname.split('/')[0:3]) href=f"/apps/project?project_id={project_id}&force=1&session={session_id}", target='', n_clicks=0, From fc84733bfb3c0501f72043b3300695de86629ee8 Mon Sep 17 00:00:00 2001 From: Nils Krehl Date: Mon, 14 Nov 2022 21:48:40 +0100 Subject: [PATCH 3/6] clean up; minor fixes --- ckg/report_manager/app.py | 211 +---------------------- ckg/report_manager/apps/homepageStats.py | 5 +- ckg/report_manager/index.py | 197 +++++++++++++++++++++ ckg/report_manager/pages/adminPage.py | 38 ++-- ckg/report_manager/pages/homePage.py | 9 +- ckg/report_manager/pages/projectPage.py | 23 +-- ckg/report_manager/project.py | 8 +- requirements.txt | 3 +- setup.py | 4 +- 9 files changed, 251 insertions(+), 247 deletions(-) create mode 100644 ckg/report_manager/index.py diff --git a/ckg/report_manager/app.py b/ckg/report_manager/app.py index 15b5ec91..9573c86f 100644 --- a/ckg/report_manager/app.py +++ b/ckg/report_manager/app.py @@ -1,223 +1,30 @@ import os -import subprocess -from datetime import datetime -from uuid import uuid4 import dash import flask import redis -from dash import html, dcc, Output, Input - -import ckg.report_manager.user as user -from ckg import ckg_utils -from ckg.report_manager import utils -from ckg.report_manager.worker import run_minimal_update_task, \ - run_full_update_task - server = flask.Flask('app') cwd = os.path.dirname(os.path.abspath(__file__)) assets_path = os.path.join(cwd, 'assets') os.chdir(cwd) pages_path = "./pages" -app = dash.Dash("app", server=server, assets_folder=assets_path, external_stylesheets=[assets_path + "custom.css"], - meta_tags=[{"name": "viewport", "content": "width=device-width, initial-scale=1"}], use_pages=True, - pages_folder=pages_path) -ckg_config = ckg_utils.read_ckg_config() - -def main(): - print("IN MAIN") - celery_working_dir = os.path.dirname(os.path.abspath(__file__)) - os.chdir(celery_working_dir) - queues = [('creation', 1, 'INFO'), ('compute', 3, 'INFO'), ('update', 1, 'INFO')] - for queue, processes, log_level in queues: - celery_cmdline = 'celery -A ckg.report_manager.worker worker --loglevel={} --concurrency={} -E -Q {}'.format( - log_level, processes, queue).split(" ") - print("Ready to call {} ".format(celery_cmdline)) - subprocess.Popen(celery_cmdline) - print("Done calling {} ".format(celery_cmdline)) - app.run_server(debug=True, port=8051) #application.run(debug=False, host='0.0.0.0') +application = dash.Dash("app", server=server, assets_folder=assets_path, + external_stylesheets=[assets_path + "custom.css"], + meta_tags=[{"name": "viewport", "content": "width=device-width, initial-scale=1"}], + use_pages=True, + pages_folder=pages_path) r = redis.StrictRedis.from_url('redis://localhost:6379') with open(os.path.join(assets_path, "app_template.html"), 'r', encoding='utf8') as f: template = f.read() -app.index_string = template -app.scripts.config.serve_locally = False -app.config.suppress_callback_exceptions = True +application.index_string = template +application.scripts.config.serve_locally = False +application.config.suppress_callback_exceptions = True external_js = ["http://code.jquery.com/jquery-3.4.1.min.js"] for js in external_js: - app.scripts.append_script({"external_url": js}) - -app.layout = html.Div(children=[ - - html.Div(id="user-status-header"), - html.Hr(), - - html.H1('Multi-page app with Dash Pages'), - html.Div( - [ - html.Div( - dcc.Link( - f"{page['name']} - {page['path']}", href=page["relative_path"], refresh=True - ) - ) - for page in dash.page_registry.values() - ] - ), - html.Hr(), - - dcc.Loading(children=[html.Div([dcc.Location(id='url', refresh=False), - html.Div(id='page-content', - style={'padding-top': 10}, - className='container-fluid'), - dash.page_container])], - style={'text-align': 'center', - 'top': '50%', - 'left': '50%', - 'height': '250px'}, - type='cube', color='#2b8cbe'), -]) - - -@app.callback( - Output("user-status-header", "children"), - Input("url", "pathname"), -) -def update_authentication_status(_): - session_cookie = flask.request.cookies.get('custom-auth-session') - logged_in = session_cookie is not None - if logged_in: - return dcc.Link([html.Form([html.Button('Logout', type='submit')], action='/apps/logout', method='post', - style={'position': 'absolute', 'right': '0px'}, id='logout')], - href="apps/logoutPage") - return dcc.Link(html.Form([html.Button('Login', type='submit')], - style={'position': 'absolute', 'right': '0px'}, id='login'), href="/apps/loginPage") - - -@server.route('/apps/login', methods=['POST', 'GET']) -def route_login(): - data = flask.request.form - username = data.get('username') - password = data.get('password') - if not username or not password: - flask.abort(401) - elif not user.User(username).verify_password(password): - return flask.redirect('/login_error') - else: - rep = flask.redirect('/') - rep.set_cookie('custom-auth-session', - username + '_' + datetime.now().strftime('%Y%m-%d%H-%M%S-') + str(uuid4())) - return rep - - -@app.server.route('/apps/logout', methods=['POST']) -def route_logout(): - # Redirect back to the index and remove the session cookie. - rep = flask.redirect('/') - rep.set_cookie('custom-auth-session', '', expires=0) - return rep - - -@server.route('/create_user', methods=['POST', 'GET']) -def route_create_user(): - data = flask.request.form - name = data.get('name') - surname = data.get('surname') - affiliation = data.get('affiliation') - acronym = data.get('acronym') - email = data.get('email') - alt_email = data.get('alt_email') - phone = data.get('phone') - uname = name[0] + surname - username = uname - - registered = 'error_exists' - iter = 1 - while registered == 'error_exists': - u = user.User(username=username.lower(), name=name, surname=surname, affiliation=affiliation, acronym=acronym, - phone=phone, email=email, alternative_email=alt_email) - registered = u.register() - if registered is None: - rep = flask.redirect('/apps/admin?error_new_user={}'.format('Failed Database')) - elif registered == 'error_exists': - username = uname + str(iter) - iter += 1 - elif registered == 'error_email': - rep = flask.redirect('/apps/admin?error_new_user={}'.format('Email already registered')) - elif registered == 'error_database': - rep = flask.redirect('/apps/admin?error_new_user={}'.format('User could not be saved in the database')) - else: - rep = flask.redirect('/apps/admin?new_user={}'.format(username)) - - return rep - - -@server.route('/update_minimal', methods=['POST', 'GET']) -def route_minimal_update(): - session_cookie = flask.request.cookies.get('custom-auth-session') - username = session_cookie.split('_')[0] - internal_id = datetime.now().strftime('%Y%m-%d%H-%M%S-') - result = run_minimal_update_task.apply_async(args=[username], task_id='run_minimal_' + session_cookie + internal_id, - queue='update') - - rep = flask.redirect('/dashs/admin?running=minimal') - - return rep - - -@server.route('/update_full', methods=['POST', 'GET']) -def route_full_update(): - session_cookie = flask.request.cookies.get('custom-auth-session') - data = flask.request.form - download = data.get('dwn-radio') == 'true' - username = session_cookie.split('_')[0] - internal_id = datetime.now().strftime('%Y%m-%d%H-%M%S-') - result = run_full_update_task.apply_async(args=[username, download], - task_id='run_full_' + session_cookie + internal_id, queue='update') - - rep = flask.redirect('/apps/admin/running=full') - - return rep - - -@server.route('/downloads/') -def route_report_url(value): - uri = os.path.join(ckg_config['downloads_directory'], value + '.zip') - return flask.send_file(uri, download_name=value + '.zip', as_attachment=True, max_age=-1) - - -@server.route('/example_files') -def route_example_files_url(): - uri = os.path.join(ckg_config['data_directory'], 'example_files.zip') - return flask.send_file(uri, download_name='example_files.zip', as_attachment=True, max_age=-1) - - -@server.route('/apps/templates') -def serve_static(value): - cwd = os.path.dirname(os.path.abspath(__file__)) - directory = os.path.join(cwd, 'apps/templates/') - filename = os.path.join(directory, value) - url = filename + '.zip' - if not os.path.isfile(url): - utils.compress_directory(filename, os.path.join(directory, 'files'), compression_format='zip') - - return flask.send_file(url, download_name=f"{value}.zip", as_attachment=True, max_age=-1) - - -@server.route('/tmp/') -def route_upload_url(value): - page_id, project_id = value.split('_') - directory = ckg_config['tmp_directory'] - filename = os.path.join(directory, 'Uploaded_files_' + project_id) - url = filename + '.zip' - - return flask.send_file(url, download_name=filename.split('/')[-1] + '.zip', as_attachment=True, - max_age=-1) - - -if __name__ == '__main__': - main() + application.scripts.append_script({"external_url": js}) diff --git a/ckg/report_manager/apps/homepageStats.py b/ckg/report_manager/apps/homepageStats.py index a459705d..aa831cce 100644 --- a/ckg/report_manager/apps/homepageStats.py +++ b/ckg/report_manager/apps/homepageStats.py @@ -253,9 +253,8 @@ def quick_numbers_panel(): project_ids.append((project['n']['name'], project['n']['id'])) project_links = [html.H4('Available Projects:')] - print("Connected successfully") except Exception: - print("Error connecting to Database") # pass + print("Error connecting to Database") for project_name, project_id in project_ids: project_links.append(html.A(project_name.title(), @@ -323,7 +322,7 @@ def quick_numbers_panel(): def update_project_url(value): if value is not None and len(value) > 1: return html.A(value[0].title(), - href='/apps/project?project_id={}&force=0'.format(value[1]), + href=f"/apps/project?project_id={value}&force=0", target='', n_clicks=0, className="button_link") diff --git a/ckg/report_manager/index.py b/ckg/report_manager/index.py new file mode 100644 index 00000000..ea50338d --- /dev/null +++ b/ckg/report_manager/index.py @@ -0,0 +1,197 @@ +import os +import subprocess +from datetime import datetime +from uuid import uuid4 + +import dash +import flask +from dash import html, dcc, Output, Input + +import ckg.report_manager.user as user +from ckg import ckg_utils +from ckg.graphdb_builder import builder_utils +from ckg.report_manager import utils +from ckg.report_manager.app import application, server +from ckg.report_manager.worker import run_minimal_update_task, \ + run_full_update_task + +try: + ckg_config = ckg_utils.read_ckg_config() + log_config = ckg_config['report_manager_log'] + logger = builder_utils.setup_logging(log_config, key="app") + config = builder_utils.setup_config('builder') + separator = config["separator"] +except Exception as err: + logger.error("Reading configuration > {}.".format(err)) + + +def main(): + logger.info("Starting CKG App") + celery_working_dir = os.path.dirname(os.path.abspath(__file__)) + os.chdir(celery_working_dir) + queues = [('creation', 1, 'INFO'), ('compute', 3, 'INFO'), ('update', 1, 'INFO')] + for queue, processes, log_level in queues: + celery_cmdline = 'celery -A ckg.report_manager.worker worker --loglevel={} --concurrency={} -E -Q {}'.format( + log_level, processes, queue).split(" ") + logger.info("Ready to call {} ".format(celery_cmdline)) + subprocess.Popen(celery_cmdline) + logger.info("Done calling {} ".format(celery_cmdline)) + application.run_server(debug=False, host='0.0.0.0') + + +application.layout = html.Div(children=[ + + html.Div(id="user-status-header"), + html.Hr(), + + dcc.Loading(children=[html.Div([dcc.Location(id='url', refresh=False), + html.Div(id='page-content', + style={'padding-top': 10}, + className='container-fluid'), + dash.page_container])], + style={'text-align': 'center', + 'top': '50%', + 'left': '50%', + 'height': '250px'}, + type='cube', color='#2b8cbe'), +]) + + +@dash.callback( + Output("user-status-header", "children"), + Input("url", "pathname"), +) +def update_authentication_status(_): + session_cookie = flask.request.cookies.get('custom-auth-session') + logged_in = session_cookie is not None + if logged_in: + return dcc.Link([html.Form([html.Button('Logout', type='submit')], action='/apps/logout', method='post', + style={'position': 'absolute', 'right': '0px'}, id='logout')], + href="apps/logoutPage") + return dcc.Link(html.Form([html.Button('Login', type='submit')], + style={'position': 'absolute', 'right': '0px'}, id='login'), href="/apps/loginPage") + + +@server.route('/apps/login', methods=['POST', 'GET']) +def route_login(): + data = flask.request.form + username = data.get('username') + password = data.get('password') + if not username or not password: + flask.abort(401) + elif not user.User(username).verify_password(password): + return flask.redirect('/login_error') + else: + rep = flask.redirect('/') + rep.set_cookie('custom-auth-session', + username + '_' + datetime.now().strftime('%Y%m-%d%H-%M%S-') + str(uuid4())) + return rep + + +@server.route('/apps/logout', methods=['POST']) +def route_logout(): + # Redirect back to the index and remove the session cookie. + rep = flask.redirect('/') + rep.set_cookie('custom-auth-session', '', expires=0) + return rep + + +@server.route('/create_user', methods=['POST', 'GET']) +def route_create_user(): + data = flask.request.form + name = data.get('name') + surname = data.get('surname') + affiliation = data.get('affiliation') + acronym = data.get('acronym') + email = data.get('email') + alt_email = data.get('alt_email') + phone = data.get('phone') + uname = name[0] + surname + username = uname + + registered = 'error_exists' + iter = 1 + while registered == 'error_exists': + u = user.User(username=username.lower(), name=name, surname=surname, affiliation=affiliation, acronym=acronym, + phone=phone, email=email, alternative_email=alt_email) + registered = u.register() + if registered is None: + rep = flask.redirect('/apps/admin?error_new_user={}'.format('Failed Database')) + elif registered == 'error_exists': + username = uname + str(iter) + iter += 1 + elif registered == 'error_email': + rep = flask.redirect('/apps/admin?error_new_user={}'.format('Email already registered')) + elif registered == 'error_database': + rep = flask.redirect('/apps/admin?error_new_user={}'.format('User could not be saved in the database')) + else: + rep = flask.redirect('/apps/admin?new_user={}'.format(username)) + + return rep + + +@server.route('/update_minimal', methods=['POST', 'GET']) +def route_minimal_update(): + session_cookie = flask.request.cookies.get('custom-auth-session') + username = session_cookie.split('_')[0] + internal_id = datetime.now().strftime('%Y%m-%d%H-%M%S-') + result = run_minimal_update_task.apply_async(args=[username], task_id='run_minimal_' + session_cookie + internal_id, + queue='update') + + rep = flask.redirect('/dashs/admin?running=minimal') + + return rep + + +@server.route('/update_full', methods=['POST', 'GET']) +def route_full_update(): + session_cookie = flask.request.cookies.get('custom-auth-session') + data = flask.request.form + download = data.get('dwn-radio') == 'true' + username = session_cookie.split('_')[0] + internal_id = datetime.now().strftime('%Y%m-%d%H-%M%S-') + result = run_full_update_task.apply_async(args=[username, download], + task_id='run_full_' + session_cookie + internal_id, queue='update') + + rep = flask.redirect('/apps/admin/running=full') + + return rep + + +@server.route('/downloads/') +def route_report_url(value): + uri = os.path.join(ckg_config['downloads_directory'], value + '.zip') + return flask.send_file(uri, download_name=value + '.zip', as_attachment=True, max_age=-1) + + +@server.route('/example_files') +def route_example_files_url(): + uri = os.path.join(ckg_config['data_directory'], 'example_files.zip') + return flask.send_file(uri, download_name='example_files.zip', as_attachment=True, max_age=-1) + + +@server.route('/apps/templates') +def serve_static(value): + cwd = os.path.dirname(os.path.abspath(__file__)) + directory = os.path.join(cwd, 'apps/templates/') + filename = os.path.join(directory, value) + url = filename + '.zip' + if not os.path.isfile(url): + utils.compress_directory(filename, os.path.join(directory, 'files'), compression_format='zip') + + return flask.send_file(url, download_name=f"{value}.zip", as_attachment=True, max_age=-1) + + +@server.route('/tmp/') +def route_upload_url(value): + page_id, project_id = value.split('_') + directory = ckg_config['tmp_directory'] + filename = os.path.join(directory, 'Uploaded_files_' + project_id) + url = filename + '.zip' + + return flask.send_file(url, download_name=filename.split('/')[-1] + '.zip', as_attachment=True, + max_age=-1) + + +if __name__ == '__main__': + main() diff --git a/ckg/report_manager/pages/adminPage.py b/ckg/report_manager/pages/adminPage.py index 35dcb32e..82b20b0c 100644 --- a/ckg/report_manager/pages/adminPage.py +++ b/ckg/report_manager/pages/adminPage.py @@ -9,13 +9,28 @@ dash.register_page(__name__, path='/apps/admin', title=f"{title} - {subtitle}", description=description) -def layout(): - # TODO: adapt authentication for admin +def layout(new_user=None, error_new_user=None, running=None): session_cookie = flask.request.cookies.get('custom-auth-session') logged_in = session_cookie is not None if logged_in == False: return html.Div(["Please ", dcc.Link("login", href="/apps/loginPage"), " to continue"]) + action_status_layout = [] + if new_user != None: + if error_new_user != None: + action_status_layout.append( + html.Div(children=[html.H3("– Error creating new user: {} – ".format(new_user.replace('%20', ' ')))], + className='error_panel')) + else: + action_status_layout.append( + html.Div(children=[html.H3("– New user successfully created: {} –".format(new_user))], + className='info_panel')) + + if running != None: + action_status_layout.append(html.Div(children=[html.H3( + "– The {} update is running. This will take a while, check the logs: graphdb_builder.log for more information –".format( + running))], className='info_panel')) + create_user_form = [html.H3("Create CKG User"), html.Form([ html.Div(children=[html.Label('Name'), dcc.Input(placeholder='name', name='name', type='text', required=True), @@ -45,16 +60,12 @@ def layout(): html.Div(children=[ html.Form([html.Button('Full Update', type='submit', className='button_link'), html.Div(children=[html.H4("Download:"), - html.Label("Yes", className='radioitem'), - dcc.Input(id='Yes', - name='dwn-radio', - value=True, - type='radio'), - html.Label("No", className='radioitem'), - dcc.Input(id='No', - name='dwn-radio', - value=False, - type='radio')])], action='/update_full', + dcc.RadioItems(options=[ + {'label': 'Yes', 'value': True}, + {'label': 'No', 'value': False}], + value=False, inline=True, className='dwn-radio'), + ])], + action='/update_full', method='post'), html.P( "This option will regenerate the entire database, downloading data from the different Ontologies and Databases (Download=Yes) and loading them and all existing projects into CKG's graph database.", @@ -68,5 +79,6 @@ def layout(): html.Div(children=description), ])] - admin_layout.extend(admin_options) + admin_layout.append(admin_options) + admin_layout.append(html.Div(children=action_status_layout)) return admin_layout diff --git a/ckg/report_manager/pages/homePage.py b/ckg/report_manager/pages/homePage.py index 9d1fcccf..88adacb7 100644 --- a/ckg/report_manager/pages/homePage.py +++ b/ckg/report_manager/pages/homePage.py @@ -3,8 +3,13 @@ import pandas as pd from dash import html, dcc, Output, Input +from ckg import ckg_utils from ckg.report_manager.apps import homepageStats as hpstats +ckg_config = ckg_utils.read_ckg_config() +log_config = ckg_config['report_manager_log'] +logger = ckg_utils.setup_logging(log_config, key="homePage") + title = "CKG homepage" subtitle = "Database Stats" description = "" @@ -13,11 +18,10 @@ def layout(): + logger.info("Load home page") session_cookie = flask.request.cookies.get('custom-auth-session') logged_in = session_cookie is not None - print(logged_in) - if logged_in == False: return html.Div(["Please ", dcc.Link("login", href="/apps/loginPage"), " to continue"]) else: @@ -86,7 +90,6 @@ def update_db_date(df): ], [Input("db_stats_df", "data")]) def number_panel_update(df): - print("Update func") updates = [] if 'projects' in df: projects = pd.read_json(df['projects'], orient='records') diff --git a/ckg/report_manager/pages/projectPage.py b/ckg/report_manager/pages/projectPage.py index de467b9d..998a26a6 100644 --- a/ckg/report_manager/pages/projectPage.py +++ b/ckg/report_manager/pages/projectPage.py @@ -11,6 +11,10 @@ from ckg.report_manager import project from ckg.report_manager.worker import generate_project_report +ckg_config = ckg_utils.read_ckg_config() +log_config = ckg_config['report_manager_log'] +logger = ckg_utils.setup_logging(log_config, key="project") + title = "Project details" subtitle = "" description = "" @@ -19,14 +23,7 @@ def layout(project_id="P0000001", force=0): - print(project_id) - print(force) - session_id = project_id + datetime.now().strftime('%Y%m-%d%H-%M%S-') + str(uuid4()) - - # inital_layout = [html.H1(children=title), - # html.H2(children=subtitle), - # html.Div(children=description)] project_layout = build_page(project_id, force, session_id) return project_layout @@ -38,7 +35,6 @@ def build_page(project_id, force, session_id): creates a designated tab. A button to download the entire project and report is added. """ - print("Build page") config_files = {} tmp_dir = ckg_utils.read_ckg_config(key='tmp_directory') if os.path.exists(tmp_dir): @@ -47,30 +43,20 @@ def build_page(project_id, force, session_id): config_files = {f.split('.')[0]: os.path.join(directory, f) for f in os.listdir(directory) if os.path.isfile(os.path.join(directory, f))} - print("Finished zip") - result = generate_project_report.apply_async(args=[project_id, config_files, force], task_id='generate_report' + session_id, queue='compute') result_output = result.get() - print("Project result") - print(result_output == None) - print(result_output) p = project.Project(project_id, datasets={}, knowledge=None, report={}, configuration_files=config_files) p.build_project(False) - print("Get project") - if p.name is not None: title = "Project: {}".format(p.name) plots = p.show_report("app") - print("Plots:") - print(plots) p = None tabs = [] buttons = build_header(project_id, session_id, title) - print("build header") layout = [] layout.append(buttons) @@ -81,7 +67,6 @@ def build_page(project_id, force, session_id): tabs.append(tab) lc = dcc.Tabs(tabs) layout.append(lc) - print(layout) return layout diff --git a/ckg/report_manager/project.py b/ckg/report_manager/project.py index aa386fe8..1b7feeb7 100644 --- a/ckg/report_manager/project.py +++ b/ckg/report_manager/project.py @@ -385,13 +385,16 @@ def load_project_data(self): self.update_dataset({data_type: dataset}) def build_project(self, force=False): + logger.info(f"Build project with id={self._identifier} and force={force} started") if self.check_report_exists() and not force: + logger.info("load existing project report") self.load_project_report() elif force: self.report = {} self.datasets = {} if len(self.report) == 0 or len(self.datasets) == 0: + logger.info("recreate project report") project_info = self.query_data() if len(project_info) > 0: self.set_attributes(project_info) @@ -441,6 +444,7 @@ def build_project(self, force=False): else: logger.error("Project {} could not be built. Error retrieving information for this project or no information associated to this project".format(self.identifier)) print("Project {} could not be built. Error retrieving information for this project or no information associated to this project".format(self.identifier)) + logger.info(f"Build project with id={self._identifier} finished") def get_projects_overlap(self, project_info): if 'overlap' in project_info: @@ -626,7 +630,7 @@ def save_project_report(self): self.save_project_datasets_reports() self.knowledge.save_report(directory) - print('save report', time.time() - start) + logger.info(f"Saving project report finished in {time.time() - start}") def save_project_datasets_reports(self): start = time.time() @@ -637,7 +641,7 @@ def save_project_datasets_reports(self): if isinstance(dataset, Dataset): dataset.save_report(dataset_directory) dataset = None - print('save dataset report', time.time() - start) + logger.info(f"Saving project datasets reports finished in {time.time() - start}") def save_project(self): directory = os.path.join(self.get_report_directory(), "Project information") diff --git a/requirements.txt b/requirements.txt index 5852fd05..5e0eb74e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -23,7 +23,6 @@ certifi==2022.9.24 cffi==1.15.1 chardet==3.0.4 chart-studio==1.0.0 -CKG==1.0.0 click==8.1.3 click-didyoumean==0.3.0 click-plugins==1.1.1 @@ -36,7 +35,7 @@ cycler==0.11.0 cyjupyter==0.2.0 dash==2.7.0 dash-core-components==2.0.0 -dash-cytoscape==0.3.0 +dash-cytoscape==0.2.0 dash-html-components==2.0.0 dash-table==5.0.0 debugpy==1.6.3 diff --git a/setup.py b/setup.py index ee80c676..2be6382e 100644 --- a/setup.py +++ b/setup.py @@ -14,7 +14,6 @@ class PreInstallCommand(install): """Pre-installation for install mode.""" def run(self): - # check_call("pip install -r requirements.txt".split()) check_call("pip install -r requirements.txt".split()) ckg.init.installer_script() install.run(self) @@ -24,7 +23,6 @@ class PreDevelopCommand(develop): """Pre-installation for install mode.""" def run(self): - # check_call("pip install -r requirements.txt".split()) check_call("pip install -r requirements.txt".split()) ckg.init.installer_script() develop.run(self) @@ -45,7 +43,7 @@ def run(self): 'install': PreInstallCommand, }, entry_points={'console_scripts': [ - 'ckg_app=ckg.report_manager.app:main', + 'ckg_app=ckg.report_manager.index:main', 'ckg_debug=ckg.debug:main', 'ckg_build=ckg.graphdb_builder.builder.builder:run_full_update', 'ckg_update_textmining=ckg.graphdb_builder.builder.builder:update_textmining']}, From 03606947aa1ed23fa95fa0e35fb66894da03b6d7 Mon Sep 17 00:00:00 2001 From: Nils Krehl Date: Mon, 14 Nov 2022 22:16:11 +0100 Subject: [PATCH 4/6] ngnix bad gateway fix --- ckg/report_manager/index.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/ckg/report_manager/index.py b/ckg/report_manager/index.py index ea50338d..0c533b60 100644 --- a/ckg/report_manager/index.py +++ b/ckg/report_manager/index.py @@ -11,7 +11,7 @@ from ckg import ckg_utils from ckg.graphdb_builder import builder_utils from ckg.report_manager import utils -from ckg.report_manager.app import application, server +from ckg.report_manager.app import server as application from ckg.report_manager.worker import run_minimal_update_task, \ run_full_update_task @@ -72,7 +72,7 @@ def update_authentication_status(_): style={'position': 'absolute', 'right': '0px'}, id='login'), href="/apps/loginPage") -@server.route('/apps/login', methods=['POST', 'GET']) +@application.route('/apps/login', methods=['POST', 'GET']) def route_login(): data = flask.request.form username = data.get('username') @@ -88,7 +88,7 @@ def route_login(): return rep -@server.route('/apps/logout', methods=['POST']) +@application.route('/apps/logout', methods=['POST']) def route_logout(): # Redirect back to the index and remove the session cookie. rep = flask.redirect('/') @@ -96,7 +96,7 @@ def route_logout(): return rep -@server.route('/create_user', methods=['POST', 'GET']) +@application.route('/create_user', methods=['POST', 'GET']) def route_create_user(): data = flask.request.form name = data.get('name') @@ -130,7 +130,7 @@ def route_create_user(): return rep -@server.route('/update_minimal', methods=['POST', 'GET']) +@application.route('/update_minimal', methods=['POST', 'GET']) def route_minimal_update(): session_cookie = flask.request.cookies.get('custom-auth-session') username = session_cookie.split('_')[0] @@ -143,7 +143,7 @@ def route_minimal_update(): return rep -@server.route('/update_full', methods=['POST', 'GET']) +@application.route('/update_full', methods=['POST', 'GET']) def route_full_update(): session_cookie = flask.request.cookies.get('custom-auth-session') data = flask.request.form @@ -158,19 +158,19 @@ def route_full_update(): return rep -@server.route('/downloads/') +@application.route('/downloads/') def route_report_url(value): uri = os.path.join(ckg_config['downloads_directory'], value + '.zip') return flask.send_file(uri, download_name=value + '.zip', as_attachment=True, max_age=-1) -@server.route('/example_files') +@application.route('/example_files') def route_example_files_url(): uri = os.path.join(ckg_config['data_directory'], 'example_files.zip') return flask.send_file(uri, download_name='example_files.zip', as_attachment=True, max_age=-1) -@server.route('/apps/templates') +@application.route('/apps/templates') def serve_static(value): cwd = os.path.dirname(os.path.abspath(__file__)) directory = os.path.join(cwd, 'apps/templates/') @@ -182,7 +182,7 @@ def serve_static(value): return flask.send_file(url, download_name=f"{value}.zip", as_attachment=True, max_age=-1) -@server.route('/tmp/') +@application.route('/tmp/') def route_upload_url(value): page_id, project_id = value.split('_') directory = ckg_config['tmp_directory'] From cad5b9830e6ba88a51779be5a7f6d43ee7e11534 Mon Sep 17 00:00:00 2001 From: Nils Krehl Date: Tue, 15 Nov 2022 09:06:20 +0100 Subject: [PATCH 5/6] fix loading spinner --- ckg/report_manager/app.py | 192 ++++++++++++++++++++++++++++++++++-- ckg/report_manager/index.py | 170 +------------------------------ 2 files changed, 185 insertions(+), 177 deletions(-) diff --git a/ckg/report_manager/app.py b/ckg/report_manager/app.py index 9573c86f..25b22135 100644 --- a/ckg/report_manager/app.py +++ b/ckg/report_manager/app.py @@ -1,30 +1,202 @@ import os +from datetime import datetime +from uuid import uuid4 import dash import flask import redis +from dash import html, dcc, Output, Input -server = flask.Flask('app') +import ckg.report_manager.user as user +from ckg import ckg_utils +from ckg.graphdb_builder import builder_utils +from ckg.report_manager import utils +from ckg.report_manager.worker import run_minimal_update_task, \ + run_full_update_task + +ckg_config = ckg_utils.read_ckg_config() +log_config = ckg_config['report_manager_log'] +logger = builder_utils.setup_logging(log_config, key="app") +config = builder_utils.setup_config('builder') +separator = config["separator"] + +flask_server = flask.Flask('app') cwd = os.path.dirname(os.path.abspath(__file__)) assets_path = os.path.join(cwd, 'assets') os.chdir(cwd) pages_path = "./pages" -application = dash.Dash("app", server=server, assets_folder=assets_path, - external_stylesheets=[assets_path + "custom.css"], - meta_tags=[{"name": "viewport", "content": "width=device-width, initial-scale=1"}], - use_pages=True, - pages_folder=pages_path) +app = dash.Dash("app", server=flask_server, assets_folder=assets_path, + external_stylesheets=[assets_path + "custom.css"], + meta_tags=[{"name": "viewport", "content": "width=device-width, initial-scale=1"}], + use_pages=True, + pages_folder=pages_path) r = redis.StrictRedis.from_url('redis://localhost:6379') with open(os.path.join(assets_path, "app_template.html"), 'r', encoding='utf8') as f: template = f.read() -application.index_string = template -application.scripts.config.serve_locally = False -application.config.suppress_callback_exceptions = True +app.index_string = template +app.scripts.config.serve_locally = False +app.config.suppress_callback_exceptions = True external_js = ["http://code.jquery.com/jquery-3.4.1.min.js"] for js in external_js: - application.scripts.append_script({"external_url": js}) + app.scripts.append_script({"external_url": js}) + +app.layout = html.Div(children=[ + html.Div(id="user-status-header"), + html.Hr(), + + dcc.Loading(children=[html.Div([dcc.Location(id='url', refresh=False), + html.Div(id='page-content', + style={'padding-top': 10}, + className='container-fluid'), + dash.page_container])], + style={'text-align': 'center', + 'top': '50%', + 'left': '50%', + 'height': '250px'}, + type='cube', color='#2b8cbe'), +]) + + +@dash.callback( + Output("user-status-header", "children"), + Input("url", "pathname"), +) +def update_authentication_status(_): + session_cookie = flask.request.cookies.get('custom-auth-session') + logged_in = session_cookie is not None + if logged_in: + return dcc.Link([html.Form([html.Button('Logout', type='submit')], action='/apps/logout', method='post', + style={'position': 'absolute', 'right': '0px'}, id='logout')], + href="apps/logoutPage") + return dcc.Link(html.Form([html.Button('Login', type='submit')], + style={'position': 'absolute', 'right': '0px'}, id='login'), href="/apps/loginPage") + + +@flask_server.route('/apps/login', methods=['POST', 'GET']) +def route_login(): + data = flask.request.form + username = data.get('username') + password = data.get('password') + if not username or not password: + flask.abort(401) + elif not user.User(username).verify_password(password): + return flask.redirect('/login_error') + else: + rep = flask.redirect('/') + rep.set_cookie('custom-auth-session', + username + '_' + datetime.now().strftime('%Y%m-%d%H-%M%S-') + str(uuid4())) + return rep + + +@flask_server.route('/apps/logout', methods=['POST']) +def route_logout(): + # Redirect back to the index and remove the session cookie. + rep = flask.redirect('/') + rep.set_cookie('custom-auth-session', '', expires=0) + return rep + + +@flask_server.route('/create_user', methods=['POST', 'GET']) +def route_create_user(): + data = flask.request.form + name = data.get('name') + surname = data.get('surname') + affiliation = data.get('affiliation') + acronym = data.get('acronym') + email = data.get('email') + alt_email = data.get('alt_email') + phone = data.get('phone') + uname = name[0] + surname + username = uname + + registered = 'error_exists' + iter = 1 + while registered == 'error_exists': + u = user.User(username=username.lower(), name=name, surname=surname, affiliation=affiliation, acronym=acronym, + phone=phone, email=email, alternative_email=alt_email) + registered = u.register() + if registered is None: + rep = flask.redirect('/apps/admin?error_new_user={}'.format('Failed Database')) + elif registered == 'error_exists': + username = uname + str(iter) + iter += 1 + elif registered == 'error_email': + rep = flask.redirect('/apps/admin?error_new_user={}'.format('Email already registered')) + elif registered == 'error_database': + rep = flask.redirect('/apps/admin?error_new_user={}'.format('User could not be saved in the database')) + else: + rep = flask.redirect('/apps/admin?new_user={}'.format(username)) + + return rep + + +@flask_server.route('/update_minimal', methods=['POST', 'GET']) +def route_minimal_update(): + session_cookie = flask.request.cookies.get('custom-auth-session') + username = session_cookie.split('_')[0] + internal_id = datetime.now().strftime('%Y%m-%d%H-%M%S-') + result = run_minimal_update_task.apply_async(args=[username], task_id='run_minimal_' + session_cookie + internal_id, + queue='update') + + rep = flask.redirect('/dashs/admin?running=minimal') + + return rep + + +@flask_server.route('/update_full', methods=['POST', 'GET']) +def route_full_update(): + session_cookie = flask.request.cookies.get('custom-auth-session') + data = flask.request.form + download = data.get('dwn-radio') == 'true' + username = session_cookie.split('_')[0] + internal_id = datetime.now().strftime('%Y%m-%d%H-%M%S-') + result = run_full_update_task.apply_async(args=[username, download], + task_id='run_full_' + session_cookie + internal_id, queue='update') + + rep = flask.redirect('/apps/admin/running=full') + + return rep + + +@flask_server.route('/downloads/') +def route_report_url(value): + uri = os.path.join(ckg_config['downloads_directory'], value + '.zip') + return flask.send_file(uri, download_name=value + '.zip', as_attachment=True, max_age=-1) + + +@flask_server.route('/example_files') +def route_example_files_url(): + uri = os.path.join(ckg_config['data_directory'], 'example_files.zip') + return flask.send_file(uri, download_name='example_files.zip', as_attachment=True, max_age=-1) + + +@flask_server.route('/apps/templates') +def serve_static(value): + cwd = os.path.dirname(os.path.abspath(__file__)) + directory = os.path.join(cwd, 'apps/templates/') + filename = os.path.join(directory, value) + url = filename + '.zip' + if not os.path.isfile(url): + utils.compress_directory(filename, os.path.join(directory, 'files'), compression_format='zip') + + return flask.send_file(url, download_name=f"{value}.zip", as_attachment=True, max_age=-1) + + +@flask_server.route('/tmp/') +def route_upload_url(value): + page_id, project_id = value.split('_') + directory = ckg_config['tmp_directory'] + filename = os.path.join(directory, 'Uploaded_files_' + project_id) + url = filename + '.zip' + + return flask.send_file(url, download_name=filename.split('/')[-1] + '.zip', as_attachment=True, + max_age=-1) + + +def start_app(): + app.run_server(debug=False, host='0.0.0.0', port="8051") diff --git a/ckg/report_manager/index.py b/ckg/report_manager/index.py index 0c533b60..0433ff44 100644 --- a/ckg/report_manager/index.py +++ b/ckg/report_manager/index.py @@ -1,24 +1,14 @@ import os import subprocess -from datetime import datetime -from uuid import uuid4 -import dash -import flask -from dash import html, dcc, Output, Input - -import ckg.report_manager.user as user from ckg import ckg_utils from ckg.graphdb_builder import builder_utils -from ckg.report_manager import utils -from ckg.report_manager.app import server as application -from ckg.report_manager.worker import run_minimal_update_task, \ - run_full_update_task +from ckg.report_manager.app import start_app try: ckg_config = ckg_utils.read_ckg_config() log_config = ckg_config['report_manager_log'] - logger = builder_utils.setup_logging(log_config, key="app") + logger = builder_utils.setup_logging(log_config, key="index") config = builder_utils.setup_config('builder') separator = config["separator"] except Exception as err: @@ -36,161 +26,7 @@ def main(): logger.info("Ready to call {} ".format(celery_cmdline)) subprocess.Popen(celery_cmdline) logger.info("Done calling {} ".format(celery_cmdline)) - application.run_server(debug=False, host='0.0.0.0') - - -application.layout = html.Div(children=[ - - html.Div(id="user-status-header"), - html.Hr(), - - dcc.Loading(children=[html.Div([dcc.Location(id='url', refresh=False), - html.Div(id='page-content', - style={'padding-top': 10}, - className='container-fluid'), - dash.page_container])], - style={'text-align': 'center', - 'top': '50%', - 'left': '50%', - 'height': '250px'}, - type='cube', color='#2b8cbe'), -]) - - -@dash.callback( - Output("user-status-header", "children"), - Input("url", "pathname"), -) -def update_authentication_status(_): - session_cookie = flask.request.cookies.get('custom-auth-session') - logged_in = session_cookie is not None - if logged_in: - return dcc.Link([html.Form([html.Button('Logout', type='submit')], action='/apps/logout', method='post', - style={'position': 'absolute', 'right': '0px'}, id='logout')], - href="apps/logoutPage") - return dcc.Link(html.Form([html.Button('Login', type='submit')], - style={'position': 'absolute', 'right': '0px'}, id='login'), href="/apps/loginPage") - - -@application.route('/apps/login', methods=['POST', 'GET']) -def route_login(): - data = flask.request.form - username = data.get('username') - password = data.get('password') - if not username or not password: - flask.abort(401) - elif not user.User(username).verify_password(password): - return flask.redirect('/login_error') - else: - rep = flask.redirect('/') - rep.set_cookie('custom-auth-session', - username + '_' + datetime.now().strftime('%Y%m-%d%H-%M%S-') + str(uuid4())) - return rep - - -@application.route('/apps/logout', methods=['POST']) -def route_logout(): - # Redirect back to the index and remove the session cookie. - rep = flask.redirect('/') - rep.set_cookie('custom-auth-session', '', expires=0) - return rep - - -@application.route('/create_user', methods=['POST', 'GET']) -def route_create_user(): - data = flask.request.form - name = data.get('name') - surname = data.get('surname') - affiliation = data.get('affiliation') - acronym = data.get('acronym') - email = data.get('email') - alt_email = data.get('alt_email') - phone = data.get('phone') - uname = name[0] + surname - username = uname - - registered = 'error_exists' - iter = 1 - while registered == 'error_exists': - u = user.User(username=username.lower(), name=name, surname=surname, affiliation=affiliation, acronym=acronym, - phone=phone, email=email, alternative_email=alt_email) - registered = u.register() - if registered is None: - rep = flask.redirect('/apps/admin?error_new_user={}'.format('Failed Database')) - elif registered == 'error_exists': - username = uname + str(iter) - iter += 1 - elif registered == 'error_email': - rep = flask.redirect('/apps/admin?error_new_user={}'.format('Email already registered')) - elif registered == 'error_database': - rep = flask.redirect('/apps/admin?error_new_user={}'.format('User could not be saved in the database')) - else: - rep = flask.redirect('/apps/admin?new_user={}'.format(username)) - - return rep - - -@application.route('/update_minimal', methods=['POST', 'GET']) -def route_minimal_update(): - session_cookie = flask.request.cookies.get('custom-auth-session') - username = session_cookie.split('_')[0] - internal_id = datetime.now().strftime('%Y%m-%d%H-%M%S-') - result = run_minimal_update_task.apply_async(args=[username], task_id='run_minimal_' + session_cookie + internal_id, - queue='update') - - rep = flask.redirect('/dashs/admin?running=minimal') - - return rep - - -@application.route('/update_full', methods=['POST', 'GET']) -def route_full_update(): - session_cookie = flask.request.cookies.get('custom-auth-session') - data = flask.request.form - download = data.get('dwn-radio') == 'true' - username = session_cookie.split('_')[0] - internal_id = datetime.now().strftime('%Y%m-%d%H-%M%S-') - result = run_full_update_task.apply_async(args=[username, download], - task_id='run_full_' + session_cookie + internal_id, queue='update') - - rep = flask.redirect('/apps/admin/running=full') - - return rep - - -@application.route('/downloads/') -def route_report_url(value): - uri = os.path.join(ckg_config['downloads_directory'], value + '.zip') - return flask.send_file(uri, download_name=value + '.zip', as_attachment=True, max_age=-1) - - -@application.route('/example_files') -def route_example_files_url(): - uri = os.path.join(ckg_config['data_directory'], 'example_files.zip') - return flask.send_file(uri, download_name='example_files.zip', as_attachment=True, max_age=-1) - - -@application.route('/apps/templates') -def serve_static(value): - cwd = os.path.dirname(os.path.abspath(__file__)) - directory = os.path.join(cwd, 'apps/templates/') - filename = os.path.join(directory, value) - url = filename + '.zip' - if not os.path.isfile(url): - utils.compress_directory(filename, os.path.join(directory, 'files'), compression_format='zip') - - return flask.send_file(url, download_name=f"{value}.zip", as_attachment=True, max_age=-1) - - -@application.route('/tmp/') -def route_upload_url(value): - page_id, project_id = value.split('_') - directory = ckg_config['tmp_directory'] - filename = os.path.join(directory, 'Uploaded_files_' + project_id) - url = filename + '.zip' - - return flask.send_file(url, download_name=filename.split('/')[-1] + '.zip', as_attachment=True, - max_age=-1) + start_app() if __name__ == '__main__': From 0a0dc6f714d1337a7b15162a730ca83883be58c8 Mon Sep 17 00:00:00 2001 From: Nils Krehl Date: Tue, 15 Nov 2022 14:51:22 +0100 Subject: [PATCH 6/6] minor fixes and logging --- ckg/report_manager/app.py | 25 +++++++++++++++++++++---- ckg/report_manager/index.py | 33 --------------------------------- ckg/report_manager/project.py | 1 + ckg/report_manager/worker.py | 13 +++++++++---- resources/uwsgi.ini | 2 +- setup.py | 2 +- 6 files changed, 33 insertions(+), 43 deletions(-) delete mode 100644 ckg/report_manager/index.py diff --git a/ckg/report_manager/app.py b/ckg/report_manager/app.py index 25b22135..45ca145c 100644 --- a/ckg/report_manager/app.py +++ b/ckg/report_manager/app.py @@ -1,4 +1,5 @@ import os +import subprocess from datetime import datetime from uuid import uuid4 @@ -72,7 +73,7 @@ def update_authentication_status(_): if logged_in: return dcc.Link([html.Form([html.Button('Logout', type='submit')], action='/apps/logout', method='post', style={'position': 'absolute', 'right': '0px'}, id='logout')], - href="apps/logoutPage") + href="/apps/logoutPage") return dcc.Link(html.Form([html.Button('Login', type='submit')], style={'position': 'absolute', 'right': '0px'}, id='login'), href="/apps/loginPage") @@ -143,7 +144,7 @@ def route_minimal_update(): result = run_minimal_update_task.apply_async(args=[username], task_id='run_minimal_' + session_cookie + internal_id, queue='update') - rep = flask.redirect('/dashs/admin?running=minimal') + rep = flask.redirect('/apps/admin/running=minimal') return rep @@ -198,5 +199,21 @@ def route_upload_url(value): max_age=-1) -def start_app(): - app.run_server(debug=False, host='0.0.0.0', port="8051") +def main(): + logger.info("Starting CKG App") + celery_working_dir = os.path.dirname(os.path.abspath(__file__)) + os.chdir(celery_working_dir) + queues = [('creation', 1, 'INFO'), ('compute', 3, 'INFO'), ('update', 1, 'INFO')] + print(type(ckg_config)) + print(ckg_config["log_directory"] + "/celery.log") + for queue, processes, log_level in queues: + celery_cmdline = 'celery -A ckg.report_manager.worker worker --loglevel={} --logfile={} --concurrency={} -E -Q {}'.format( + log_level, ckg_config["log_directory"] + "/celery.log", processes, queue).split(" ") + logger.info("Ready to call {} ".format(celery_cmdline)) + subprocess.Popen(celery_cmdline) + logger.info("Done calling {} ".format(celery_cmdline)) + app.run_server(debug=False, host='0.0.0.0') + + +if __name__ == '__main__': + main() diff --git a/ckg/report_manager/index.py b/ckg/report_manager/index.py deleted file mode 100644 index 0433ff44..00000000 --- a/ckg/report_manager/index.py +++ /dev/null @@ -1,33 +0,0 @@ -import os -import subprocess - -from ckg import ckg_utils -from ckg.graphdb_builder import builder_utils -from ckg.report_manager.app import start_app - -try: - ckg_config = ckg_utils.read_ckg_config() - log_config = ckg_config['report_manager_log'] - logger = builder_utils.setup_logging(log_config, key="index") - config = builder_utils.setup_config('builder') - separator = config["separator"] -except Exception as err: - logger.error("Reading configuration > {}.".format(err)) - - -def main(): - logger.info("Starting CKG App") - celery_working_dir = os.path.dirname(os.path.abspath(__file__)) - os.chdir(celery_working_dir) - queues = [('creation', 1, 'INFO'), ('compute', 3, 'INFO'), ('update', 1, 'INFO')] - for queue, processes, log_level in queues: - celery_cmdline = 'celery -A ckg.report_manager.worker worker --loglevel={} --concurrency={} -E -Q {}'.format( - log_level, processes, queue).split(" ") - logger.info("Ready to call {} ".format(celery_cmdline)) - subprocess.Popen(celery_cmdline) - logger.info("Done calling {} ".format(celery_cmdline)) - start_app() - - -if __name__ == '__main__': - main() diff --git a/ckg/report_manager/project.py b/ckg/report_manager/project.py index 1b7feeb7..b8dde5d7 100644 --- a/ckg/report_manager/project.py +++ b/ckg/report_manager/project.py @@ -33,6 +33,7 @@ class Project: """ def __init__(self, identifier, configuration_files={}, datasets={}, knowledge=None, report={}): + logger.info(f"Instantiate project with identifier {identifier}") self._identifier = identifier self._queries_file = 'queries/project_cypher.yml' self.configuration_files = configuration_files diff --git a/ckg/report_manager/worker.py b/ckg/report_manager/worker.py index 486709a0..d44a949a 100644 --- a/ckg/report_manager/worker.py +++ b/ckg/report_manager/worker.py @@ -1,19 +1,21 @@ import pandas as pd from celery import Celery -from ckg.report_manager.apps import projectCreation, dataUpload + from ckg.graphdb_builder.builder import builder from ckg.graphdb_connector import connector from ckg.report_manager import project - +from ckg.report_manager.apps import projectCreation, dataUpload celery_app = Celery('create_new_project') celery_app.conf.update(broker_url='redis://127.0.0.1:6379', result_backend='redis://127.0.0.1:6379/0') + @celery_app.task def create_new_project(identifier, data, separator='|'): driver = connector.getGraphDatabaseConnectionConfiguration() - project_result, projectId = projectCreation.create_new_project(driver, identifier, pd.read_json(data), separator=separator) + project_result, projectId = projectCreation.create_new_project(driver, identifier, pd.read_json(data), + separator=separator) if projectId is not None: result = {str(projectId): str(project_result)} else: @@ -25,7 +27,9 @@ def create_new_project(identifier, data, separator='|'): @celery_app.task def create_new_identifiers(project_id, data, directory, filename): driver = connector.getGraphDatabaseConnectionConfiguration() - upload_result = dataUpload.create_experiment_internal_identifiers(driver, project_id, pd.read_json(data, dtype={'subject external_id': object, 'biological_sample external_id': object, 'analytical_sample external_id': object}), directory, filename) + upload_result = dataUpload.create_experiment_internal_identifiers(driver, project_id, pd.read_json(data, dtype={ + 'subject external_id': object, 'biological_sample external_id': object, + 'analytical_sample external_id': object}), directory, filename) res_n = dataUpload.check_samples_in_project(driver, project_id) return {str(project_id): str(upload_result), 'res_n': res_n.to_dict()} @@ -46,6 +50,7 @@ def run_minimal_update_task(username): return {'response': str(response)} + @celery_app.task def run_full_update_task(username, download): response = builder.run_full_update(user=username, download=download) diff --git a/resources/uwsgi.ini b/resources/uwsgi.ini index 567a60ee..ef4c9450 100644 --- a/resources/uwsgi.ini +++ b/resources/uwsgi.ini @@ -2,7 +2,7 @@ plugins-dir = /usr/lib/uwsgi pythonpath = /usr/local/bin/python3 chdir = /CKG/ckg/report_manager -module = index:application +module = app:flask_server uid = nginx gid = nginx logto = /var/log/uwsgi/%n.log diff --git a/setup.py b/setup.py index 2be6382e..f32371c3 100644 --- a/setup.py +++ b/setup.py @@ -43,7 +43,7 @@ def run(self): 'install': PreInstallCommand, }, entry_points={'console_scripts': [ - 'ckg_app=ckg.report_manager.index:main', + 'ckg_app=ckg.report_manager.app:main', 'ckg_debug=ckg.debug:main', 'ckg_build=ckg.graphdb_builder.builder.builder:run_full_update', 'ckg_update_textmining=ckg.graphdb_builder.builder.builder:update_textmining']},