diff --git a/roles/hxr.postgres-connection/defaults/main.yml b/roles/hxr.postgres-connection/defaults/main.yml index 82acd4ac8..45bc3fe74 100644 --- a/roles/hxr.postgres-connection/defaults/main.yml +++ b/roles/hxr.postgres-connection/defaults/main.yml @@ -1,4 +1,7 @@ -pgc_user: - name: "{{ galaxy_user.name }}" - home: "{{ galaxy_user.home }}" - group_name: "{{ galaxy_group.name | default(galaxy_group) }}" +pgc_users: + - uname: "{{ galaxy_user.name }}" + uhome: "{{ galaxy_user.home }}" + gname: "{{ galaxy_group.name | default(galaxy_group) }}" + pguser: "{{ postgres_user }}" + pgpass: "{{ postgres_pass }}" + pgdatabase: galaxy diff --git a/roles/hxr.postgres-connection/tasks/main.yml b/roles/hxr.postgres-connection/tasks/main.yml index eec7611ce..7cef58153 100644 --- a/roles/hxr.postgres-connection/tasks/main.yml +++ b/roles/hxr.postgres-connection/tasks/main.yml @@ -1,20 +1,16 @@ --- -- name: Add env vars in bashrc - lineinfile: - path: "{{ pgc_user.home }}/.bashrc" - regexp: "^export {{ item.var }}" - line: "export {{ item.var }}='{{ item.val }}'" - with_items: - - var: PGUSER - val: "{{ postgres_user }}" - - var: PGHOST - val: "{{ postgres_host }}" +- name: Get all users + getent: + database: passwd + split: ':' -- name: Copy using the 'content' for inline data - copy: - content: | - {{ postgres_host }}:{{ postgres_port }}:*:{{ postgres_user }}:{{ postgres_pass }} - dest: "{{ pgc_user.home }}/.pgpass" - mode: 0600 - owner: "{{ pgc_user.name }}" - group: "{{ pgc_user.group_name }}" +- name: Debug info when user does not exist + debug: + msg: "INFO: User {{ item.uname }} does not exist" + loop: "{{ pgc_users }}" + when: (not item.uname in getent_passwd.keys()) + +- name: Run postgres tasks + include_tasks: postgres_tasks.yml + loop: "{{ pgc_users }}" + when: (item.uname in getent_passwd.keys()) diff --git a/roles/hxr.postgres-connection/tasks/postgres_tasks.yml b/roles/hxr.postgres-connection/tasks/postgres_tasks.yml new file mode 100644 index 000000000..b7999fb8e --- /dev/null +++ b/roles/hxr.postgres-connection/tasks/postgres_tasks.yml @@ -0,0 +1,26 @@ +--- +- name: Add postgres connection configuration + block: + - name: Add env vars in bashrc + lineinfile: + path: "{{ item.uhome }}/.bashrc" + regexp: "^export {{ task_item.var }}" + line: "export {{ task_item.var }}='{{ task_item.val }}'" + with_items: + - var: PGUSER + val: "{{ item.pguser }}" + - var: PGHOST + val: "{{ postgres_host }}" + - var: PGDATABASE + val: "{{ item.pgdatabase }}" + loop_control: + loop_var: task_item + + - name: Copy using the 'content' for inline data + copy: + content: | + {{ postgres_host }}:{{ postgres_port }}:*:{{ item.pguser }}:{{ item.pgpass }} + dest: "{{ item.uhome }}/.pgpass" + mode: 0600 + owner: "{{ item.uname }}" + group: "{{ item.gname }}" diff --git a/roles/usegalaxy-eu.bashrc/defaults/main.yml b/roles/usegalaxy-eu.bashrc/defaults/main.yml new file mode 100644 index 000000000..468eb06aa --- /dev/null +++ b/roles/usegalaxy-eu.bashrc/defaults/main.yml @@ -0,0 +1,5 @@ +--- +bashrc_users: + - uname: "{{ galaxy_user.name }}" + uhome: "{{ galaxy_user.home }}" + gname: "{{ galaxy_group.name }}" diff --git a/roles/usegalaxy-eu.bashrc/files/galaxy_jwd.py b/roles/usegalaxy-eu.bashrc/files/galaxy_jwd.py new file mode 100644 index 000000000..67c969377 --- /dev/null +++ b/roles/usegalaxy-eu.bashrc/files/galaxy_jwd.py @@ -0,0 +1,347 @@ +#!/usr/bin/env python +# Description: Galaxy jobs's job working directory (JWD) script. Can get you +# the path of a JWD and can delete JWD's of job failed within last X days. + +import argparse +import os +import shutil +import sys +from datetime import datetime +from xml.dom.minidom import parse +import psycopg2 + + +def main(): + """ + JWD script + 1. Can get you the path of a JWD + 2. Can delete JWD's of job failed within last X days + """ + parser = argparse.ArgumentParser() + subparsers = parser.add_subparsers( + dest="subcommand", + required=True, + title=""" + Use one of the following subcommands: + get_jwd: Get JWD path of a given Galaxy job id + clean_jwds: Clean JWD's of jobs failed within last X days + + The following ENVs (same as gxadmin's) should be set: + GALAXY_CONFIG_FILE: Path to the galaxy.yml file + GALAXY_LOG_DIR: Path to the Galaxy log directory + PGDATABASE: Name of the Galaxy database + PGUSER: Galaxy database user + PGHOST: Galaxy database host + We also need a ~/.pgpass file (same as gxadmin's) in format: + :5432:*:: + + Example: + python galaxy_jwd.py get_jwd 12345678 + python galaxy_jwd.py clean_jwds --dry_run True --days 30 + """, + ) + + # Parser for the get_jwd subcommand + get_jwd_parser = subparsers.add_parser("get_jwd", help="Get JWD path of a given Galaxy job id") + get_jwd_parser.add_argument( + "job_id", + help="Galaxy job id", + ) + + # Parser for the clean_jwds subcommand + clean_jwds_parser = subparsers.add_parser("clean_jwds", help="Clean JWD's of jobs failed within last X days") + clean_jwds_parser.add_argument( + "--dry_run", + help="If True, do NOT delete JWD's; only print them (default: True)", + default=True, + ) + clean_jwds_parser.add_argument( + "--days", + help="Number of days within which the jobs were last updated to be considered for deletion (default: 5)", + default=5, + ) + + args = parser.parse_args(args=None if sys.argv[1:] else ["--help"]) + + # Check if environment variables are set + if not os.environ.get("GALAXY_CONFIG_FILE"): + raise ValueError("Please set ENV GALAXY_CONFIG_FILE") + if not os.environ.get("GALAXY_LOG_DIR"): + raise ValueError("Please set ENV GALAXY_LOG_DIR") + if not os.environ.get("PGDATABASE"): + raise ValueError("Please set ENV PGDATABASE") + if not os.environ.get("PGUSER"): + raise ValueError("Please set ENV PGUSER") + if not os.environ.get("PGHOST"): + raise ValueError("Please set ENV PGHOST") + + # Check if ~/.pgpass file exists and is not empty + if not os.path.isfile(os.path.expanduser("~/.pgpass")) or os.stat(os.path.expanduser("~/.pgpass")).st_size == 0: + raise ValueError("Please create a ~/.pgpass file in format: :5432:*::") + + # Check if the given galaxy.yml file exists + if not os.path.isfile(os.environ.get("GALAXY_CONFIG_FILE")): + raise ValueError(f"The given galaxy.yml file {os.environ.get('GALAXY_CONFIG_FILE')} does not exist") + + # Set variables + galaxy_config_file = os.environ.get("GALAXY_CONFIG_FILE").strip() + galaxy_log_dir = os.environ.get("GALAXY_LOG_DIR").strip() + db_name = os.environ.get("PGDATABASE").strip() + db_user = os.environ.get("PGUSER").strip() + db_host = os.environ.get("PGHOST").strip() + db_password = extract_password_from_pgpass(pgpass_file=os.path.expanduser("~/.pgpass")) + object_store_conf = get_object_store_conf_path(galaxy_config_file) + backends = parse_object_store(object_store_conf) + + # Connect to Galaxy database + db = Database( + dbname=db_name, + dbuser=db_user, + dbhost=db_host, + dbpassword=db_password, + ) + + # For the get_jwd subcommand + if args.subcommand == "get_jwd": + job_id = args.job_id + object_store_id = db.get_object_store_id(job_id) + jwd_path = decode_path(job_id, [object_store_id], backends) + + # Check + if jwd_path: + print(jwd_path) + else: + print(f"INFO: Job working directory (of {job_id}) does not exist") + sys.exit(1) + + # For the clean_jwds subcommand + if args.subcommand == "clean_jwds": + # Check if the given Galaxy log directory exists + if not os.path.isdir(galaxy_log_dir): + raise ValueError(f"The given Galaxy log directory {galaxy_log_dir} does not exist") + + # Set variables + dry_run = args.dry_run + days = args.days + jwd_cleanup_log = f"{galaxy_log_dir}/jwd_cleanup" f"_{datetime.now().strftime('%d_%m_%Y-%I_%M_%S')}.log" + failed_jobs = db.get_failed_jobs(days=days) + + # Delete JWD folders if dry_run is False + # Log the folders that will be deleted + if not dry_run: + with open(jwd_cleanup_log, "w") as jwd_log: + jwd_log.write( + "The following job working directories (JWDs) belonging " + "to the failed jobs are deleted\nJob id: JWD path\n" + ) + for job_id, metadata in failed_jobs.items(): + # Delete JWD folders older than X days + jwd_path = decode_path(job_id, metadata, backends) + if jwd_path: + jwd_log.write(f"{job_id}: {jwd_path}") + delete_jwd(jwd_path) + else: + # Print JWD folders older than X days + for job_id, metadata in failed_jobs.items(): + jwd_path = decode_path(job_id, metadata, backends) + if jwd_path: + print(f"{job_id}: {jwd_path}") + + +def extract_password_from_pgpass(pgpass_file): + """Extract the password from the ~/.pgpass file + + The ~/.pgpass file should have the following format: + :5432:*:: + + Args: + pgpass_file (str): Path to the ~/.pgpass file + + Returns: + str: Password for the given pg_host + """ + pgpass_format = ":5432:*::" + with open(pgpass_file, "r") as pgpass: + for line in pgpass: + if line.startswith(os.environ.get("PGHOST")): + return line.split(":")[4].strip() + else: + raise ValueError( + f"Please add the password for '{os.environ.get('PGHOST')}' to the ~/.pgpass file in format: {pgpass_format} " + ) + + +def get_object_store_conf_path(galaxy_config_file): + """Get the path to the object_store_conf.xml file + + Args: + galaxy_config_file (str): Path to the galaxy.yml file + + Returns: + str: Path to the object_store_conf.xml file + """ + object_store_conf = "" + with open(galaxy_config_file, "r") as config: + for line in config: + if line.strip().startswith("object_store_config_file"): + object_store_conf = line.split(":")[1].strip() + + # Check if the object_store_conf.xml file exists + if not os.path.isfile(object_store_conf): + raise ValueError(f"{object_store_conf} does not exist") + + return object_store_conf + + +def parse_object_store(object_store_conf): + """Get the path of type 'job_work' from the extra_dir's for each backend + + Args: + object_store_conf (str): Path to the object_store_conf.xml file + + Returns: + dict: Dictionary of backend id and path of type 'job_work' + """ + dom = parse(object_store_conf) + backends = {} + for backend in dom.getElementsByTagName("backend"): + backend_id = backend.getAttribute("id") + backends[backend_id] = {} + # Get the extra_dir's path for each backend if type is "job_work" + for extra_dir in backend.getElementsByTagName("extra_dir"): + if extra_dir.getAttribute("type") == "job_work": + backends[backend_id] = extra_dir.getAttribute("path") + return backends + + +def decode_path(job_id, metadata, backends_dict): + """Decode the path of JWD's and check if the path exists + + Args: + job_id (int): Job id + metadata (list): List of object_store_id and update_time + backends_dict (dict): Dictionary of backend id and path of type 'job_work' + + Returns: + str: Path to the JWD + """ + job_id = str(job_id) + + # Check if object_store_id exists in our object store config + if metadata[0] not in backends_dict.keys(): + raise ValueError(f"Object store id '{metadata[0]}' does not exist in the object_store_conf.xml file") + + jwd_path = f"{backends_dict[metadata[0]]}/0{job_id[0:2]}/{job_id[2:5]}/{job_id}" + + # Validate that the path is a JWD + # It is a JWD if the following conditions are true: + # 1. Check if tool_script.sh exists + # 2. Check if directories 'inputs', and 'outputs' exist + # 3. Additionally, we can also try and find the file '__instrument_core_epoch_end' + # and compare the timestamp in that with the 'update_time' (metadata[1]) of the job. + if ( + os.path.exists(jwd_path) + and os.path.exists(f"{jwd_path}/tool_script.sh") + and os.path.exists(f"{jwd_path}/inputs") + and os.path.exists(f"{jwd_path}/outputs") + ): + return jwd_path + else: + return None + + +def delete_jwd(jwd_path): + """Delete JWD folder and all its contents + + Args: + jwd_path (str): Path to the JWD folder + """ + try: + shutil.rmtree(jwd_path) + except OSError as e: + print(f"Error deleting JWD: {jwd_path} : {e.strerror}") + + +class Database: + """Class to connect to the database and query DB + + Args: + dbname (str): Name of the database + dbuser (str): Name of the database user + dbhost (str): Hostname of the database + dbpassword (str): Password of the database user + """ + + def __init__(self, dbname, dbuser, dbhost, dbpassword): + try: + self.conn = psycopg2.connect(dbname=dbname, user=dbuser, host=dbhost, password=dbpassword) + except psycopg2.OperationalError as e: + print(f"Unable to connect to database: {e}") + + def get_failed_jobs(self, days): + """Get failed jobs for DB + + Args: + days (int): Number of days to look back for failed jobs + + Returns: + dict: Dictionary with job_id as key and object_store_id, and update_time as list of values + + + """ + cur = self.conn.cursor() + cur.execute( + f""" + SELECT id, object_store_id, update_time + FROM job + WHERE state = 'error' + AND update_time IS NOT NULL + AND object_store_id IS NOT NULL + AND update_time > NOW() - INTERVAL '{days} days' + """ + ) + failed_jobs = cur.fetchall() + cur.close() + self.conn.close() + + # Create a dictionary with job_id as key and object_store_id, and update_time as values + failed_jobs_dict = {} + for job_id, object_store_id, update_time in failed_jobs: + failed_jobs_dict[job_id] = [object_store_id, update_time] + + if not failed_jobs_dict: + print(f"No failed jobs found within the last {days} days") + sys.exit(1) + + return failed_jobs_dict + + def get_object_store_id(self, job_id): + """Get object_store_id for a job id + + Args: + job_id (int): Job id + + Returns: + object_store_id (str): Object store id + """ + cur = self.conn.cursor() + cur.execute( + f""" + SELECT object_store_id + FROM job + WHERE id = '{job_id}' AND object_store_id IS NOT NULL + """ + ) + object_store_id = cur.fetchone()[0] + cur.close() + self.conn.close() + + if not object_store_id: + print(f"Job id {job_id} not found in the database") + sys.exit(1) + + return object_store_id + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/roles/usegalaxy-eu.bashrc/tasks/bashrc_tasks.yml b/roles/usegalaxy-eu.bashrc/tasks/bashrc_tasks.yml new file mode 100644 index 000000000..81a1c20f6 --- /dev/null +++ b/roles/usegalaxy-eu.bashrc/tasks/bashrc_tasks.yml @@ -0,0 +1,118 @@ +--- +- name: Check and add/update bashrc when user user exists + block: + - name: Check for bashrc + stat: + path: "{{ item.uhome }}/.bashrc" + register: bashrc_stat_out + + - name: Copy default bashrc when not existing + copy: + src: /etc/skel/.bashrc + dest: "{{ item.uhome }}/.bashrc" + remote_src: yes + mode: 0640 + owner: "{{ item.uname }}" + group: "{{ item.gname }}" + when: not bashrc_stat_out.stat.exists + + - name: Check for bashprofile + stat: + path: "{{ item.uhome }}/.bash_profile" + register: bashprofile_stat_out + + - name: Check for profile + stat: + path: "{{ item.uhome }}/.profile" + register: profile_stat_out + + - name: Copy default bashprofile when not existing + copy: + src: /etc/skel/.bash_profile + dest: "{{ item.uhome }}/.bash_profile" + remote_src: yes + mode: 0640 + owner: "{{ item.uname }}" + group: "{{ item.gname }}" + when: not bashprofile_stat_out.stat.exists and not profile_stat_out.stat.exists + + - name: Copy galaxy_jwd python script + copy: + src: galaxy_jwd.py + dest: /usr/local/bin/galaxy_jwd + mode: 0755 + owner: galaxy + group: galaxy + + - name: Insert some aliases and functions + blockinfile: + path: "{{ item.uhome }}/.bashrc" + marker: "# {mark} ANSIBLE MANAGED BLOCK" + content: | + # User specific aliases and functions + function change_to_wd() { + USAGE="Please provide a Galaxy job ID or a Condor job ID" + if (( $# == 0 )); then + echo $USAGE + return 0; + fi + for i in "$@"; do + if [[ "$i" = --help || "$i" = -h ]]; then + echo $USAGE + return 0; + fi + done + JID=$1 + WD=$(dirname `condor_q -autoformat Cmd ClusterId | grep ${JID} | cut -f1 -d' '` || dirname `condor_history -autoformat Cmd ClusterId | grep ${JID} | cut -f1 -d' '` || find "{{ galaxy_config['galaxy']['job_working_directory'] }}""/0"${JID:0:2}"/"${JID:2:3} -maxdepth 1 -type d -name ${JID}) + cd $WD + } + + # Uses the /usr/local/bin/galaxy_jwd python script to change to the job working directory + function change_to_jwd() { + USAGE="Please provide a Galaxy job ID" + if (( $# == 0 )); then + echo $USAGE + return 0; + fi + + JID=$1 + JWD=$(python /usr/local/bin/galaxy_jwd get_jwd $JID) + cd $JWD + } + + alias gl='journalctl -f -u galaxy-*' + alias glg='journalctl -fu galaxy-gunicorn@* | grep -v -e "/api/upload/hooks" -e "/history/current_history_json"' + alias glh='journalctl -f -u galaxy-handler@*' + alias glw='journalctl -f -u galaxy-workflow-scheduler@*' + alias cu='journalctl -u galaxy-gunicorn@*.service --since "10 minutes ago" | grep "/history/current_history_json" | awk "{print \$11}" | sort -u | wc -l' + alias chg2wd='change_to_wd' + alias chg2jwd='change_to_jwd' + + - name: Insert some export vars + lineinfile: + path: "{{ item.uhome }}/.bashrc" + line: "{{ task_item }}" + loop: + # ENV's for gxadmin + - "export GALAXY_CONFIG_DIR={{ galaxy_config_dir }}" + - "export GALAXY_CONFIG_FILE={{ galaxy_config_file }}" + - "export GALAXY_LOG_DIR={{ galaxy_log_dir }}" + - "export GALAXY_MUTABLE_CONFIG_DIR={{ galaxy_mutable_config_dir }}" + - "export GALAXY_ROOT={{ galaxy_server_dir }}" + - "export VIRTUAL_ENV={{ galaxy_venv_dir }}" + loop_control: + loop_var: task_item + + - name: Check for bash_history + stat: + path: "{{ item.uhome }}/.bash_history" + register: bashhistory_stat_out + + - name: Create bash_history + file: + path: "{{ item.uhome }}/.bash_history" + state: touch + mode: 0640 + owner: "{{ item.uname }}" + group: "{{ item.gname }}" + when: not bashhistory_stat_out.stat.exists diff --git a/roles/usegalaxy-eu.bashrc/tasks/main.yml b/roles/usegalaxy-eu.bashrc/tasks/main.yml index 782371e26..bae152bb9 100644 --- a/roles/usegalaxy-eu.bashrc/tasks/main.yml +++ b/roles/usegalaxy-eu.bashrc/tasks/main.yml @@ -1,85 +1,16 @@ --- -- name: Check for bashrc - stat: - path: "{{ galaxy_user.home }}/.bashrc" - register: bashrc_stat_out - -- name: Copy default bashrc when not existing - copy: - src: /etc/skel/.bashrc - dest: "{{ galaxy_user.home }}/.bashrc" - remote_src: yes - mode: 0640 - owner: "{{ galaxy_user.name }}" - group: "{{ galaxy_group.name | default(galaxy_group) }}" - when: not bashrc_stat_out.stat.exists - -- name: Check for bashprofile - stat: - path: "{{ galaxy_user.home }}/.bash_profile" - register: bashprofile_stat_out - -- name: Check for profile - stat: - path: "{{ galaxy_user.home }}/.profile" - register: profile_stat_out - -- name: Copy default bashprofile when not existing - copy: - src: /etc/skel/.bash_profile - dest: "{{ galaxy_user.home }}/.bash_profile" - remote_src: yes - mode: 0640 - owner: "{{ galaxy_user.name }}" - group: "{{ galaxy_group.name | default(galaxy_group) }}" - when: not bashprofile_stat_out.stat.exists and not profile_stat_out.stat.exists - -- name: Insert some aliases - blockinfile: - path: "{{ galaxy_user.home }}/.bashrc" - marker: "# {mark} ANSIBLE MANAGED BLOCK" - content: | - # User specific aliases and functions - function change_to_wd() { - USAGE="Please provide a Galaxy job ID or a Condor job ID" - if (( $# == 0 )); then - echo $USAGE - return 0; - fi - for i in "$@"; do - if [[ "$i" = --help || "$i" = -h ]]; then - echo $USAGE - return 0; - fi - done - JID=$1 - WD=$(dirname `condor_q -autoformat Cmd ClusterId | grep ${JID} | cut -f1 -d' '` || dirname `condor_history -autoformat Cmd ClusterId | grep ${JID} | cut -f1 -d' '` || find "{{ galaxy_config['galaxy']['job_working_directory'] }}""/0"${JID:0:2}"/"${JID:2:3} -maxdepth 1 -type d -name ${JID}) - cd $WD - } - - alias gl='journalctl -f -u galaxy-*' - alias glg='journalctl -fu galaxy-gunicorn@* | grep -v -e "/api/upload/hooks" -e "/history/current_history_json"' - alias glh='journalctl -f -u galaxy-handler@*' - alias glw='journalctl -f -u galaxy-workflow-scheduler@*' - alias glc='journalctl -fu galaxy-celery@*' - alias cu='journalctl -u galaxy-gunicorn@*.service --since "10 minutes ago" | grep "/history/current_history_json" | awk "{print \$11}" | sort -u | wc -l' - alias chg2wd='change_to_wd' - -- name: Insert some export vars - lineinfile: - path: "{{ galaxy_user.home }}/.bashrc" - line: "export GALAXY_CONFIG_FILE={{ galaxy_config_file }}" - -- name: Check for bash_history - stat: - path: "{{ galaxy_user.home }}/.bash_history" - register: bashhistory_stat_out - -- name: Create bash_history - file: - path: "{{ galaxy_user.home }}/.bash_history" - state: touch - mode: 0640 - owner: "{{ galaxy_user.name }}" - group: "{{ galaxy_group.name | default(galaxy_group) }}" - when: not bashhistory_stat_out.stat.exists +- name: Get all users + getent: + database: passwd + split: ':' + +- name: Debug info when user does not exist + debug: + msg: "INFO: User {{ item.uname }} does not exist" + loop: "{{ bashrc_users }}" + when: (not item.uname in getent_passwd.keys()) + +- name: Add/Update bashrc + include_tasks: bashrc_tasks.yml + loop: "{{ bashrc_users }}" + when: (item.uname in getent_passwd.keys()) diff --git a/secret_group_vars/db-main.yml b/secret_group_vars/db-main.yml index db64e1c49..0629bd046 100644 --- a/secret_group_vars/db-main.yml +++ b/secret_group_vars/db-main.yml @@ -1,28 +1,34 @@ $ANSIBLE_VAULT;1.1;AES256 -61613835356463393566373136623532326163663230316361653034656263376562663635303734 -3338393061663432663639313764383839653735326335310a306535333533643539643532326539 -34373036353331626433386336646466626265646630333061306639346164623936393336373631 -6164333538396139640a326664643666333963613938623539353962313666336262666134623832 -65373038613633323337346363323564356464303765636431346365323933336437323866393734 -37353731333637613135373338666430363865393663636335313536333139376432653734633031 -33323132623964366534646132316163666234616539333932323231613537663937633732373865 -66373165643563353739323736323032613237653663653437653263353164623535383035613636 -37633033613863303030626631333765306462623561373433313664633265363864303062663231 -62383430333233333435393762396439313766306363616433333461633666353232343865623634 -37343038363334333433666462313465343865653365323231643661626566386535343261306636 -35393933333739643532636461346665366666313139353730333330656363623636393165303433 -65646638396565333238343233663061363261303738306430376633633465376633363562326536 -38666433306666666362393465666637613666343731646139356638343463363538343234633530 -31343732656462663062303036366230366365623334386665393238636136323635663739623131 -34633335643934613135613361346563363862666132316335366366383438353030343333386431 -35366661343464363530643634323138663962643436376136366263376462366463393462303838 -35383735326164393730613434363133366537386265393130343031633837383632396233623164 -66393633363639383638303736616564633266333937343461326262386564376137393533393634 -35663439336539623433633931336164393665386439643631633963623963313263616163326236 -66636263653064336265386432373338373630316337616532613563343434363264626465633639 -35306365646163383832333063333134346261623364306432343536323530646638353138313437 -31343965626565363166356632346562336435326438316332373163333765646366366166653536 -38666532613665363163616336323236646338646635643464386264313234313965346335306263 -31386362663535353334383864313236633834666439323038383766363831373462663034373832 -62613134643766643961663736636664366232653238643437326461316664353937326163623630 -663637666664386464653339616336383739 +32363835343162383736393837653534326237313362343234363037623661386634323939343839 +6434343662383832613038643834396634376135633164660a623032383231383664666531393465 +39616363313136373636616536616634313432363732306538366462383536373764316266626166 +3339666466313036370a333337646531393166646666343963323734323630396366333162626638 +39633139363435626337363235353364623731643965313830303239316265616165333838656562 +39336461626130366139636664323762616330373032326562653630396233656337303966386139 +36366161336563646666343438613834363734613861373737303165323838376564666361633937 +65613833666638303433626537663563653538623962343537363033636166323038626236346262 +31663830343532343430366231393139356264636134303131613132643435343834356336626437 +37373934613035333333383836353762313866343733396133323133663635626561323437653038 +32613236316362613436323961633666363639343135383862353764643639663837636538346162 +37336136386263333534356162383638613761396462393265336339346264306465316565323561 +66343239336562373137303538396230383836343831656535313063663137666365616434366337 +39383337343266326263643637353033623032396461373439323661313134303266663263316635 +32396362393935333963346237633239653936363263313239336130623039356565343034343435 +32656533363864393463373839343662666562616233336262326463316333356262376365636464 +64393931346532336438333564313838666432383434393531306563653335653166633565616137 +32386438623061656434353430326630313736663237326265386133383461333930393266383234 +64393433613832316533303661616334386136373336316137323636396530353365363263663966 +33386233343630336562303062313362313364353762366633323136393264656239633561633761 +39386338396132333862303530316339333531363066393637353263633663636365616261353639 +63643038323062373964376562313139653333396661636265623435613963653966666661333763 +38383663363532316636383961653338653864653361346131633364323863626566616265383238 +33383865623531663135616561643530613932376532336534343139366433383334306434363364 +38353964393466633635316262626339343333633636643265313562353432666262643130336638 +30636264313131653264373237333062323637623439366632353933666330396165663466326533 +62383762663332666432373262633737663365313063643838316163656533376439656438346430 +66656462616437313430663364303434373738346636366439646563663237376532656439373166 +31366436363866383638646435336461396665636430616365643563646566386565383435306566 +38333331386439356264333534353934633035323232623233313935356238343765613362386638 +64383261616466396532393834356263383662376539306662666437333938333434383531393033 +39316137343738616166303861616136636638383338383035623138323437666630303965326237 +6561