Skip to content

Commit

Permalink
Merge pull request #137 from hubmapconsortium/yuanzhou/log
Browse files Browse the repository at this point in the history
Yuanzhou/log
  • Loading branch information
yuanzhou authored Nov 1, 2023
2 parents 448df6e + 9529bea commit b35d711
Show file tree
Hide file tree
Showing 7 changed files with 40 additions and 22 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,9 @@ app.cfg

reload_from_neo4j/reload.properties

# Ignore docker env file
hubmap-docker/.env

BUILD
# mounted BUILD/VERSION file
src/BUILD
Expand Down
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
2.3.11
2.3.12
7 changes: 7 additions & 0 deletions hubmap-docker/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,13 @@ services:
- "./uuid-api/nginx/conf.d:/etc/nginx/conf.d"
networks:
- gateway_hubmap
# Send docker logs to AWS CloudWatch
logging:
driver: awslogs
options:
awslogs-region: us-east-1
awslogs-group: uuid-api-docker-logs
awslogs-stream: ${LOG_STREAM}

networks:
# This is the network created by gateway to enable communicaton between multiple docker-compose projects
Expand Down
1 change: 1 addition & 0 deletions hubmap-docker/example.env
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
LOG_STREAM=DEV
37 changes: 19 additions & 18 deletions src/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,19 @@
from hubmap_commons.hm_auth import secured
from hubmap_commons.string_helper import isBlank

# Root logger configuration
global logger

# Set logging format and level (default is warning)
# All the API logging is forwarded to the uWSGI server and gets written into the log file `log/uwsgi-uuid-api.log`
# Log rotation is handled via logrotate on the host system with a configuration file
# Do NOT handle log file and rotation via the Python logging to avoid issues with multi-worker processes
logging.basicConfig(format='[%(asctime)s] %(levelname)s in %(module)s: %(message)s', level=logging.DEBUG, datefmt='%Y-%m-%d %H:%M:%S')

# Use `getLogger()` instead of `getLogger(__name__)` to apply the config to the root logger
# will be inherited by the sub-module loggers
logger = logging.getLogger()

# Specify the absolute path of the instance folder and use the config file relative to the instance path
app = Flask(__name__, instance_path=os.path.join(os.path.abspath(os.path.dirname(__file__)), 'instance'),
instance_relative_config=True)
Expand All @@ -21,23 +34,12 @@
except Exception as e:
raise Exception("Failed to get configuration from instance/app.cfg")

LOG_FILE_NAME = "../log/uuid-" + time.strftime("%d-%m-%Y-%H-%M-%S") + ".log"
logger = None
worker = None

@app.before_first_request
def init():
global logger
global worker
try:
logger = logging.getLogger(name='uuid.service')
logger.setLevel(level=logging.INFO)
logFH = logging.FileHandler(filename=LOG_FILE_NAME)
# Set logging format and level (default is warning)
logging_formatter = logging.Formatter(fmt='[%(asctime)s] %(levelname)s in %(module)s:%(lineno)d: %(message)s'
,datefmt='%Y-%m-%d %H:%M:%S')
logFH.setFormatter(logging_formatter)
logger.addHandler(logFH)
logger.info("started")
except Exception as e:
print("Error opening log file during startup")
Expand All @@ -61,7 +63,6 @@ def index():
@app.route('/status', methods=['GET'])
def status():
global worker
global logger

response_data = {
# Use strip() to remove leading and trailing spaces, newlines, and tabs
Expand Down Expand Up @@ -129,7 +130,7 @@ def status():
@secured(has_write=True)
def add_uuid():
global worker
global logger

try:
if request.method == "POST":
if 'entity_count' in request.args:
Expand All @@ -156,7 +157,7 @@ def add_uuid():
@app.route('/uuid/<uuid>', methods=["GET"])
def get_uuid(uuid):
global worker
global logger

try:
if request.method == "GET":
# The info is a pretty print json string
Expand All @@ -181,7 +182,7 @@ def get_uuid(uuid):
@secured(has_read=True)
def is_uuid(uuid):
global worker
global logger

try:
if request.method == "GET":
exists = worker.getIdExists(uuid)
Expand All @@ -200,7 +201,7 @@ def is_uuid(uuid):
@secured(has_read=True)
def get_file_id(file_uuid):
global worker
global logger

try:
file_id_info = worker.getFileIdInfo(file_uuid)
if isinstance(file_id_info, Response): return file_id_info
Expand All @@ -215,7 +216,7 @@ def get_file_id(file_uuid):
@secured(has_read=True)
def get_ancestors(uuid):
global worker
global logger

try:
ancestors = worker.getAncestors(uuid)
if isinstance(ancestors, Response): return ancestors
Expand Down Expand Up @@ -287,7 +288,7 @@ def get_ancestors(uuid):
@secured(has_read=True)
def get_file_info(entity_id):
global worker
global logger

try:
file_info = worker.get_file_info(entity_id)
if isinstance(file_info, Response): return file_info
Expand Down
7 changes: 6 additions & 1 deletion src/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
Flask==2.1.3

# Flask 2.1.3 installs the latest Werkzeug==3.0.0 (released on 9/30/2023) and causing import issues
# Use a pinned version 2.3.7 (the latest release before 3.0.0) to mitigate temporaryly
# Will upgrade Flask to newer version later on across all APIs. 10/3/2023 - Zhou
Werkzeug==2.3.7

# To match the AWS RDS MySQL server 8.0.23
mysql-connector-python==8.0.23

Expand All @@ -10,4 +15,4 @@ requests==2.25.1
# Use the branch name of commons from github for testing new changes made in commons
# Default is master branch specified in docker-compose.yml if not set
# git+https://github.com/hubmapconsortium/commons.git@${COMMONS_BRANCH}#egg=hubmap-commons
hubmap-commons==2.1.8
hubmap-commons==2.1.12
5 changes: 3 additions & 2 deletions src/uwsgi.ini
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,9 @@ chdir = /usr/src/app/src
# Application's callbale
module = wsgi:application

# Location of uwsgi log file
logto = /usr/src/app/log/uwsgi-uuid-api.log
# Delegate the logging to the master process
# Send logs to stdout instead of file so docker picks it up and writes to AWS CloudWatch
log-master=true

# Master with 2 worker process (based on CPU number)
master = true
Expand Down

0 comments on commit b35d711

Please sign in to comment.