From e5287d18feb50b80570bafef69b4d9bab8fc803d Mon Sep 17 00:00:00 2001 From: Geoff Date: Thu, 7 Dec 2017 10:14:29 +1100 Subject: [PATCH 1/7] 'notar' export now saved to cdn_export dir --- CHANGELOG.md | 2 ++ sat_export.py | 7 ++++++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 21cc742..a73d09b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). ## [Unreleased] +### Changed +- --notar export saved in /cdn_export dir rather than /export to prevent it being deleted ## [1.1.1] - 2017-10-25 diff --git a/sat_export.py b/sat_export.py index 566a1d7..9a579ff 100644 --- a/sat_export.py +++ b/sat_export.py @@ -14,6 +14,7 @@ import fnmatch, subprocess, tarfile import simplejson as json from glob import glob +from distutils.dir_util import copy_tree import helpers try: @@ -1065,7 +1066,6 @@ def main(args): pickle.dump(exported_repos, open(export_dir + '/exported_repos.pkl', 'wb')) pickle.dump(package_count, open(export_dir + '/package_count.pkl', 'wb')) - # Run GPG Checks on the exported RPMs if not args.nogpg: do_gpg_check(export_dir) @@ -1085,6 +1085,11 @@ def main(args): os.system("rm -f " + helpers.EXPORTDIR + "/*.pkl") os.system("rm -f " + export_dir + "/*.pkl") + # Copy export_dir to cdn_export to prevent blowing it away next time we export + copy_tree(export_dir,helpers.EXPORTDIR + "/cdn_export") + # Cleanup + shutil.rmtree(helpers.EXPORTDIR + "/cdn_export/manifest", ignore_errors=True, onerror=None) + shutil.rmtree(export_dir) # We're done. Write the start timestamp to file for next time os.chdir(script_dir) From 4dc87e7944c37e4c3d9f1e70ca8e97166c3a600b Mon Sep 17 00:00:00 2001 From: Geoff Date: Thu, 7 Dec 2017 13:16:25 +1100 Subject: [PATCH 2/7] Check for missing import datasets --- CHANGELOG.md | 4 ++++ README.md | 25 ++++++++++++++++------- man/sat_import.8 | 16 +++++++++++++++ sat_import.py | 53 ++++++++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 91 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a73d09b..231a8ea 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,10 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). ## [Unreleased] +### Added +- sat_import now checks for exports that have not been imported (missed/skipped) +- --fixhistory option in sat_import to align import/export histories + ### Changed - --notar export saved in /cdn_export dir rather than /export to prevent it being deleted diff --git a/README.md b/README.md index ba389e5..6b8dce1 100644 --- a/README.md +++ b/README.md @@ -28,7 +28,7 @@ hammer user create --login svc-api-user --firstname API --lastname User \ --organization-ids 1 --default-organization-id 1 --admin true ``` -Foreman needs to be configured to export content to the location you require. By default the path is +Foreman needs to be configured to export content to the location you require. By default the path is /var/lib/pulp/katello-export - this will result in you probably filling your /var/lib/pulp partition! The configs in these scripts assume that the exports are going to /var/sat-export - this should be a dedicated partition or even better dedicated disk just for export content. @@ -223,11 +223,14 @@ This companion script to sat_export, running on the Disconnected Satellite performs a sha256sum verification of each part of the specified archive prior to extracting the transferred content to disk. -Once the content has been extracted, a sync is triggered of each repository -in the import set. Note that repositories MUST be enabled on the disconnected -satellite prior to the sync working - for this reason a `nosync` option (-n) -exists so that the repos can be extracted to disk and then enabled before the -sync occurs. In order to not overload the Satellite during the sync, the +Once the content has been extracted, a check is performed to see if any exports +performed have not yet been imported. This is to assist with data integrity on +the disconnected Satellite system. Any missing imports will be displayed and the +option to continue or abort will be presented. Upon continuing, a sync is triggered +of each repository in the import set. Note that repositories MUST be enabled on the +disconnected satellite prior to the sync working - for this reason a `nosync` +option (-n) exists so that the repos can be extracted to disk and then enabled +before the sync occurs. In order to not overload the Satellite during the sync, the repositories will be synced in smaller batches, the number of repos in a batch being defined in the config.yml file. (It has been observed on systems with a large number of repos that triggering a sync on all repos at once pretty much @@ -245,6 +248,13 @@ All previously imported datasets can be shown with the (-L) flag. Note that a dataset can normally only be imported ONCE. To force an import of an already completed dataset, use the (-f) flag. +In the event that missing import datasets are detected, they should be imported to +ensure data integrity and consistency. There may however be cases that result in +the missing imports being included by other means, or no longer required at all. +In these cases, the --fixhistory flag can be used to 'reset' the import history +so that it matches the export history of the current import dataset, clearing +these warnings. + ### Help Output ``` usage: sat_import.py [-h] [-o ORG] -d DATE [-n] [-r] [-l] [-L] [-f] @@ -260,7 +270,8 @@ optional arguments: -l, --last Show the last successfully completed import date -L, --list List all successfully completed imports -c, --count Display all package counts after import - -f, --force Force import of data if it has previously been done + -f, --force Force import of data if it has previously been done + --fixhistory Force import history to match export history ``` ### Examples diff --git a/man/sat_import.8 b/man/sat_import.8 index 174579f..c8762e7 100644 --- a/man/sat_import.8 +++ b/man/sat_import.8 @@ -30,6 +30,12 @@ Any repositories found in the import that do not exist in Satellite will be indi .RE .RE .RS 3 +- A check is performed to see if any exports performed have not yet been imported. +.RS 2 +Any missing imports will be displayed and the option to continue or abort will be presented. +.RE +.RE +.RS 3 - Satellite will perform a bulk repository sync of the repositories within the import dataset. .RE .RS 3 @@ -107,6 +113,16 @@ repositories to be shown, even if there is no mis-match. Normally the script will prevent the importing of a dataset that has already been imported. However, using this option will force an import of the dataset to be performed. .RE +.PP +.BR "--fixhistory" +.RS 3 +In the event that missing import datasets are detected, they should be imported to +ensure data integrity and consistency. There may however be cases that result in +the missing imports being included by other means, or no longer required at all. +In these cases, this flag can be used to 'reset' the import history +so that it matches the export history of the current import dataset, clearing +these warnings. +.RE .SH EXAMPLES Check when the last import was performed: diff --git a/sat_import.py b/sat_import.py index 297b042..fc40a1c 100644 --- a/sat_import.py +++ b/sat_import.py @@ -264,6 +264,34 @@ def check_counts(org_id, package_count, count): print '\n' +def check_missing(imports, exports, dataset, fixhistory, vardir): + """ + Compare export history with import history to find any datasets that have not been imported + """ + missing = False + + if fixhistory: + # Remove the last element (this import) before saving - we haven't imported yet! + exports = exports[:-1] + pickle.dump(exports, open(vardir + '/imports.pkl', "wb")) + + # Copy the current 'exporthistory' over the 'importhistory' to 'fix' current mismatches + msg = "Saved export history as import history. Please re-run this import." + helpers.log_msg(msg, 'INFO') + print msg + sys.exit(2) + + else: + for ds in exports: + if not ds in imports: + if not dataset in ds: + msg = "Import dataset " + ds + " has not been imported" + helpers.log_msg(msg, 'WARNING') + missing = True + + return(missing) + + def main(args): """ Main Routine @@ -307,6 +335,8 @@ def main(args): required=False, action="store_true") parser.add_argument('-f', '--force', help='Force import of data if it has previously been done', required=False, action="store_true") + parser.add_argument('--fixhistory', help='Force import history to match export history', + required=False, action="store_true") args = parser.parse_args() # Set our script variables from the input args @@ -316,6 +346,11 @@ def main(args): org_name = helpers.ORG_NAME dataset = args.dataset + if args.fixhistory: + fixhistory = True + else: + fixhistory = False + # Record where we are running from script_dir = str(os.getcwd()) @@ -367,6 +402,24 @@ def main(args): # Extract the input files extract_content(basename) + # Read in the export history from the input dataset + dsname = dataset.split('_')[1] + exports = pickle.load(open(helpers.IMPORTDIR + '/exporthistory_' + dsname + '.pkl', 'rb')) + + # Check for and let the user decide if they want to continue with missing imports + missing_imports = check_missing(imports, exports, dataset, fixhistory, vardir) + if missing_imports: + print "Run sat_import with the --fixhistory flag to reset the import history to this export" + answer = helpers.query_yes_no("Continue with import?", "no") + if not answer: + msg = "Import Aborted" + helpers.log_msg(msg, 'ERROR') + sys.exit(1) + else: + msg = "Import continued by user" + helpers.log_msg(msg, 'INFO') + + # Trigger a sync of the content into the Library if args.nosync: #print helpers.GREEN + "Import complete.\n" + helpers.ENDC From 3ce4463f0350f2290ae84772d0beee180a87de74 Mon Sep 17 00:00:00 2001 From: Geoff Date: Fri, 8 Dec 2017 10:52:17 +1100 Subject: [PATCH 3/7] Add email capability, unattended option, general tidyup --- CHANGELOG.md | 2 + README.md | 7 ++++ config/config.yml.example | 5 +++ helpers.py | 36 ++++++++++++++++- import_auto.py | 82 +++++++++++++++++++++++++++++++++++++++ sat_export.py | 79 ++++++++++++++++++++++++++++++------- sat_import.py | 68 ++++++++++++++++++++++++++------ 7 files changed, 253 insertions(+), 26 deletions(-) create mode 100644 import_auto.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 231a8ea..904a0c3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Added - sat_import now checks for exports that have not been imported (missed/skipped) - --fixhistory option in sat_import to align import/export histories +- Email output capability for notifications when automating scripts +- Add unattended option to allow scripts to be automated ### Changed - --notar export saved in /cdn_export dir rather than /export to prevent it being deleted diff --git a/README.md b/README.md index 6b8dce1..e1bc28e 100644 --- a/README.md +++ b/README.md @@ -68,6 +68,11 @@ logging: dir: /var/log/sat6-scripts (Directory to use for logging) debug: [True|False] +email: + mailout: True + mailfrom: Satellite 6 + mailto: sysadmin@example.org + export: dir: /var/sat-export (Directory to export content to - Connected Satellite) @@ -199,6 +204,7 @@ optional arguments: -l, --last Display time of last export -L, --list List all successfully completed exports --nogpg Skip GPG checking + -u, --unattended Answer any prompts safely, allowing automated usage -r, --repodata Include repodata for repos with no incremental content -p, --puppetforge Include puppet-forge-server format Puppet Forge repo --notar Do not archive the extracted content @@ -271,6 +277,7 @@ optional arguments: -L, --list List all successfully completed imports -c, --count Display all package counts after import -f, --force Force import of data if it has previously been done + -u, --unattended Answer any prompts safely, allowing automated usage --fixhistory Force import history to match export history ``` diff --git a/config/config.yml.example b/config/config.yml.example index 7f823a1..4903e16 100644 --- a/config/config.yml.example +++ b/config/config.yml.example @@ -11,6 +11,11 @@ logging: dir: /var/log/satellite debug: False +email: + mailout: True + mailfrom: Satellite 6 + mailto: sysadmin@example.org + export: dir: /var/sat-export diff --git a/helpers.py b/helpers.py index 6007e03..3106465 100644 --- a/helpers.py +++ b/helpers.py @@ -10,9 +10,10 @@ """Functions common to various Satellite 6 scripts""" import sys, os, time, datetime, argparse -import logging +import logging, tempfile from time import sleep from hashlib import sha256 +import smtplib try: import requests @@ -59,6 +60,14 @@ PROMOTEBATCH = CONFIG['promotion']['batch'] else: PROMOTEBATCH = 255 +if 'mailout' in CONFIG['email']: + MAILOUT = CONFIG['email']['mailout'] +else: + MAILOUT = False +if 'mailfrom' in CONFIG['email']: + MAILFROM = CONFIG['email']['mailfrom'] +if 'mailto' in CONFIG['email']: + MAILTO = CONFIG['email']['mailto'] if 'hostname' in CONFIG['puppet-forge-server']: PFSERVER = CONFIG['puppet-forge-server']['hostname'] @@ -85,6 +94,11 @@ BOLD = '\033[1m' UNDERLINE = '\033[4m' +# Mailout pre-canned subjects +MAILSUBJ_FI = "Satellite 6 import failure" +MAILSUBJ_SI = "Satellite 6 import successful" +MAILSUBJ_FP = "Satellite 6 publish failure" +MAILSUBJ_SP = "Satellite 6 publish successful" def who_is_running(): """ Return the OS user that is running the script """ @@ -435,6 +449,20 @@ def query_yes_no(question, default="yes"): "(or 'y' or 'n').\n") +def mailout(subject, message): + """ + Function to handle simple SMTP mailouts for alerting. + Assumes localhost is configured for SMTP forwarding (postfix) + """ + sender = MAILFROM + receivers = [MAILTO] + + body = 'From: {}\nSubject: {}\n\n{}'.format(sender, subject, message) + + smtpObj = smtplib.SMTP('localhost') + smtpObj.sendmail(sender, receivers, body) + + #----------------------- # Configure logging if not os.path.exists(LOGDIR): @@ -453,6 +481,9 @@ def query_yes_no(question, default="yes"): logging.getLogger("requests").setLevel(logging.WARNING) logging.getLogger("urllib3").setLevel(logging.WARNING) +# Open a temp file to hold the email output +tf = tempfile.NamedTemporaryFile() + def log_msg(msg, level): """Write message to logfile""" @@ -463,10 +494,13 @@ def log_msg(msg, level): print BOLD + "DEBUG: " + msg + ENDC elif level == 'ERROR': logging.error(msg) + tf.write('ERROR:' + msg + '\n') print ERROR + "ERROR: " + msg + ENDC elif level == 'WARNING': logging.warning(msg) + tf.write('WARNING:' + msg + '\n') print WARNING + "WARNING: " + msg + ENDC # Otherwise if we ARE in debug, write everything to the log AND stdout else: logging.info(msg) + tf.write(msg + '\n') diff --git a/import_auto.py b/import_auto.py new file mode 100644 index 0000000..7236865 --- /dev/null +++ b/import_auto.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python + +import sys, os, glob +import subprocess +import argparse +import helpers + + +def run_imports(dryrun): + print "Processing Imports..." + + # Find any sha256 files in the import dir + infiles = glob.glob(helpers.IMPORTDIR + '/*.sha256') + + # Extract the dataset timestamp/name from the filename and add to a new list + # Assumes naming standard sat6_export_YYYYMMDD-HHMM_NAME.sha256 + # 'sorted' function should result in imports being done in correct order by filename + tslist = [] + for f in sorted(infiles): + dstime = f.split('_')[-2] + dsname = (f.split('_')[-1]).split('.')[-2] + tslist.append(dstime + '_' + dsname) + + if tslist: + msg = 'Found import datasets on disk...\n' + '\n'.join(tslist) + else: + msg = 'No import datasets to process' + helpers.log_msg(msg, 'INFO') + print msg + + # Now for each import file in the list, run the import script in unattended mode:-) + if tslist: + if not dryrun: + for dataset in tslist: + rc = subprocess.call(['/usr/local/bin/sat_import', '-u', '-r', '-d', dataset]) + print rc + else: + msg = "Dry run - not actually performing import" + helpers.log_msg(msg, 'WARNING') + + +def main(args): + + ### Run import/publish on scheduled day + + # Check for sane input + parser = argparse.ArgumentParser( + description='Imports, Publishes and Promotes content views.') + parser.add_argument('-d', '--dryrun', help='Dry Run - Only show what will be done', + required=False, action="store_true") + + args = parser.parse_args() + + if args.dryrun: + dryrun = True + else: + dryrun = False + + + # Check if there are any imports in our input dir and import them + run_imports(dryrun) + + # If all imports successful run publish + + + ### Run promote on scheduled display + + + + ### Run cleanup on scheduled day + + + + + + +if __name__ == "__main__": + try: + main(sys.argv[1:]) + except KeyboardInterrupt, e: + print >> sys.stderr, ("\n\nExiting on user cancel.") + sys.exit(1) diff --git a/sat_export.py b/sat_export.py index 9a579ff..1b7d1a1 100644 --- a/sat_export.py +++ b/sat_export.py @@ -11,7 +11,7 @@ """ import sys, argparse, datetime, os, shutil, pickle, re -import fnmatch, subprocess, tarfile +import fnmatch, subprocess, tarfile, tempfile import simplejson as json from glob import glob from distutils.dir_util import copy_tree @@ -82,12 +82,22 @@ def export_cv(dov_ver, last_export, export_type): except: # pylint: disable-msg=W0702 msg = "Unable to start export - Conflicting Sync or Export already in progress" helpers.log_msg(msg, 'ERROR') + if helpers.MAILOUT: + helpers.tf.seek(0) + output = "{}".format(helpers.tf.read()) + subject = "Satellite 6 export failure" + helpers.mailout(subject, output) sys.exit(1) # Trap some other error conditions if "Required lock is already taken" in str(task_id): msg = "Unable to start export - Sync in progress" helpers.log_msg(msg, 'ERROR') + if helpers.MAILOUT: + helpers.tf.seek(0) + output = "{}".format(helpers.tf.read()) + subject = "Satellite 6 export failure" + helpers.mailout(subject, output) sys.exit(1) msg = "Export started, task_id = " + str(task_id) @@ -105,7 +115,7 @@ def export_repo(repo_id, last_export, export_type): msg = "Exporting repository id " + str(repo_id) else: msg = "Exporting repository id " + str(repo_id) + " from start date " + last_export - helpers.log_msg(msg, 'INFO') + helpers.log_msg(msg, 'DEBUG') try: if export_type == 'full': @@ -126,12 +136,22 @@ def export_repo(repo_id, last_export, export_type): except: # pylint: disable-msg=W0702 msg = "Unable to start export - Conflicting Sync or Export already in progress" helpers.log_msg(msg, 'ERROR') + if helpers.MAILOUT: + helpers.tf.seek(0) + output = "{}".format(helpers.tf.read()) + subject = "Satellite 6 export failure" + helpers.mailout(subject, output) sys.exit(1) # Trap some other error conditions if "Required lock is already taken" in str(task_id): msg = "Unable to start export - Sync in progress" helpers.log_msg(msg, 'ERROR') + if helpers.MAILOUT: + helpers.tf.seek(0) + output = "{}".format(helpers.tf.read()) + subject = "Satellite 6 export failure" + helpers.mailout(subject, output) sys.exit(1) msg = "Export started, task_id = " + str(task_id) @@ -404,14 +424,19 @@ def check_incomplete_sync(): if incomplete_sync: msg = "Incomplete sync jobs detected" helpers.log_msg(msg, 'WARNING') - answer = helpers.query_yes_no("Continue with export?", "no") - if not answer: + if not args.unattended: + answer = helpers.query_yes_no("Continue with export?", "no") + if not answer: + msg = "Export Aborted" + helpers.log_msg(msg, 'ERROR') + sys.exit(3) + else: + msg = "Export continued by user" + helpers.log_msg(msg, 'INFO') + else: msg = "Export Aborted" helpers.log_msg(msg, 'ERROR') - sys.exit(1) - else: - msg = "Export continued by user" - helpers.log_msg(msg, 'INFO') + sys.exit(3) def check_disk_space(export_type): @@ -423,14 +448,19 @@ def check_disk_space(export_type): if export_type == 'full' and int(float(pulp_used)) > 50: msg = "Insufficient space in /var/lib/pulp for a full export. >50% free space is required." helpers.log_msg(msg, 'WARNING') - answer = helpers.query_yes_no("Continue with export?", "no") - if not answer: + if not args.unattended: + answer = helpers.query_yes_no("Continue with export?", "no") + if not answer: + msg = "Export Aborted" + helpers.log_msg(msg, 'ERROR') + sys.exit(3) + else: + msg = "Export continued by user" + helpers.log_msg(msg, 'INFO') + else: msg = "Export Aborted" helpers.log_msg(msg, 'ERROR') - sys.exit(1) - else: - msg = "Export continued by user" - helpers.log_msg(msg, 'INFO') + sys.exit(3) def locate(pattern, root=os.curdir): @@ -473,6 +503,12 @@ def do_gpg_check(export_dir): helpers.log_msg(msg, 'ERROR') msg = "------ Export Aborted ------" helpers.log_msg(msg, 'INFO') + if helpers.MAILOUT: + helpers.tf.seek(0) + output = "{}".format(helpers.tf.read()) + subject = "Satellite 6 export failure - GPG checksum failure" + message = "GPG check of exported RPMs failed. Check logs for details\n\n" + output + helpers.mailout(subject, message) sys.exit(1) else: msg = "GPG check completed successfully" @@ -651,6 +687,8 @@ def main(args): action="store_true") parser.add_argument('-n', '--nogpg', help='Skip GPG checking', required=False, action="store_true") + parser.add_argument('-u', '--unattended', help='Answer any prompts safely, allowing automated usage', + required=False, action="store_true") parser.add_argument('--notar', help='Skip TAR creation', required=False, action="store_true") parser.add_argument('--forcexport', help='Force export on import-only satellite', required=False, @@ -775,6 +813,7 @@ def main(args): msg = "------ " + ename + " Content export started by " + runuser + " ---------" helpers.log_msg(msg, 'INFO') + # Get the current time - this will be the 'last export' time if the export is OK start_time = datetime.datetime.strftime(datetime.datetime.now(), '%Y-%m-%d %H:%M:%S') print "START: " + start_time + " (" + ename + " export)" @@ -922,6 +961,11 @@ def main(args): if not os.path.exists(exportpath): msg = exportpath + " was not created.\nCheck permissions/SELinux on export dir" helpers.log_msg(msg, 'ERROR') + if helpers.MAILOUT: + helpers.tf.seek(0) + output = "{}".format(helpers.tf.read()) + subject = "Satellite 6 export failure" + helpers.mailout(subject, output) sys.exit(1) os.chdir(exportpath) @@ -1105,6 +1149,13 @@ def main(args): msg = "Export complete" helpers.log_msg(msg, 'INFO') + if helpers.MAILOUT: + helpers.tf.seek(0) + output = "{}".format(helpers.tf.read()) + subject = "Satellite 6 export complete" + message = "Export of " + ename + " successfully completed\n\n" + output + helpers.mailout(subject, message) + # Exit cleanly sys.exit(0) diff --git a/sat_import.py b/sat_import.py index fc40a1c..d62468e 100644 --- a/sat_import.py +++ b/sat_import.py @@ -26,6 +26,10 @@ def get_inputfiles(dataset): if not os.path.exists(helpers.IMPORTDIR + '/' + basename + '.sha256'): msg = "Cannot continue - missing sha256sum file " + helpers.IMPORTDIR + '/' + shafile helpers.log_msg(msg, 'ERROR') + if helpers.MAILOUT: + helpers.tf.seek(0) + output = "{}".format(helpers.tf.read()) + helpers.mailout(helpers.MAILSUBJ_FI, output) sys.exit(1) # Verify the checksum of each part of the import @@ -39,6 +43,10 @@ def get_inputfiles(dataset): if result != 0: msg = "Import Aborted - Tarfile checksum verification failed" helpers.log_msg(msg, 'ERROR') + if helpers.MAILOUT: + helpers.tf.seek(0) + output = "{}".format(helpers.tf.read()) + helpers.mailout(helpers.MAILSUBJ_FI, output) sys.exit(1) # We're good @@ -69,6 +77,7 @@ def sync_content(org_id, imported_repos): """ repos_to_sync = [] delete_override = False + newrepos = False # Get a listing of repositories in this Satellite enabled_repos = helpers.get_p_json( @@ -107,6 +116,7 @@ def sync_content(org_id, imported_repos): helpers.log_msg(msg, 'DEBUG') else: msg = "Repo " + repo + " is not enabled in Satellite" + newrepos = True # If the repo is not enabled, don't delete the input files. # This gives the admin a chance to manually enable the repo and re-import delete_override = True @@ -119,7 +129,7 @@ def sync_content(org_id, imported_repos): if not repos_to_sync: msg = "No updates in imported content - skipping sync" helpers.log_msg(msg, 'WARNING') - return + return (delete_override, newrepos) else: msg = "Repo ids to sync: " + str(repos_to_sync) helpers.log_msg(msg, 'DEBUG') @@ -158,7 +168,7 @@ def sync_content(org_id, imported_repos): msg = "Batch sync has errors" helpers.log_msg(msg, 'WARNING') - return delete_override + return (delete_override, newrepos) def count_packages(repo_id): @@ -301,6 +311,10 @@ def main(args): if not helpers.DISCONNECTED: msg = "Import cannot be run on the connected Satellite (Sync) host" helpers.log_msg(msg, 'ERROR') + if helpers.MAILOUT: + helpers.tf.seek(0) + output = "{}".format(helpers.tf.read()) + helpers.mailout(helpers.MAILSUBJ_FI, output) sys.exit(1) # Who is running this script? @@ -335,6 +349,8 @@ def main(args): required=False, action="store_true") parser.add_argument('-f', '--force', help='Force import of data if it has previously been done', required=False, action="store_true") + parser.add_argument('-u', '--unattended', help='Answer any prompts safely, allowing automated usage', + required=False, action="store_true") parser.add_argument('--fixhistory', help='Force import history to match export history', required=False, action="store_true") args = parser.parse_args() @@ -391,7 +407,7 @@ def main(args): if not args.force: msg = "Dataset " + dataset + " has already been imported. Use --force if you really want to do this." helpers.log_msg(msg, 'WARNING') - sys.exit(1) + sys.exit(2) # Figure out if we have the specified input fileset basename = get_inputfiles(dataset) @@ -409,15 +425,25 @@ def main(args): # Check for and let the user decide if they want to continue with missing imports missing_imports = check_missing(imports, exports, dataset, fixhistory, vardir) if missing_imports: - print "Run sat_import with the --fixhistory flag to reset the import history to this export" - answer = helpers.query_yes_no("Continue with import?", "no") - if not answer: + msg = "Run sat_import with the --fixhistory flag to reset the import history to this export" + helpers.log_msg(msg, 'INFO') + if not args.unattended: + answer = helpers.query_yes_no("Continue with import?", "no") + if not answer: + msg = "Import Aborted" + helpers.log_msg(msg, 'ERROR') + sys.exit(3) + else: + msg = "Import continued by user" + helpers.log_msg(msg, 'INFO') + else: msg = "Import Aborted" helpers.log_msg(msg, 'ERROR') - sys.exit(1) - else: - msg = "Import continued by user" - helpers.log_msg(msg, 'INFO') + if helpers.MAILOUT: + helpers.tf.seek(0) + output = "{}".format(helpers.tf.read()) + helpers.mailout(helpers.MAILSUBJ_FI, output) + sys.exit(3) # Trigger a sync of the content into the Library @@ -434,7 +460,7 @@ def main(args): package_count = pickle.load(open('package_count.pkl', 'rb')) # Run a repo sync on each imported repo - (delete_override) = sync_content(org_id, imported_repos) + (delete_override, newrepos) = sync_content(org_id, imported_repos) print helpers.GREEN + "Import complete.\n" + helpers.ENDC print 'Please publish content views to make new content available.' @@ -475,6 +501,26 @@ def main(args): imports.append(dataset) pickle.dump(imports, open(vardir + '/imports.pkl', "wb")) + # Run the mailout + if helpers.MAILOUT: + helpers.tf.seek(0) + output = "{}".format(helpers.tf.read()) + if missing_imports: + message = "Import of dataset " + dataset + " completed successfully.\n\n \ + Missing datasets were detected during the import - please check the logs\n\n" + output + subject = "Satellite 6 import completed: Missing datasets" + + elif newrepos: + message = "Import of dataset " + dataset + " completed successfully.\n\n \ + New repos found that need to be imported manually - please check the logs \n\n" + output + subject = "Satellite 6 import completed: New repos require manual intervention" + + else: + message = "Import of dataset " + dataset + " completed successfully\n\n" + output + subject = "Satellite 6 import completed" + + helpers.mailout(subject, message) + # And exit. sys.exit(excode) From 587cc0349b8913a40db9993c0f6e7ec2a8b70823 Mon Sep 17 00:00:00 2001 From: Geoff Date: Fri, 8 Dec 2017 13:15:09 +1100 Subject: [PATCH 4/7] Added import_auto script, add email to publish/promote --- helpers.py | 4 +- import_auto.py | 83 +++++++++++++++++++++++++++++++++++++--- promote_content_views.py | 20 ++++++++++ publish_content_views.py | 16 ++++++++ 4 files changed, 114 insertions(+), 9 deletions(-) diff --git a/helpers.py b/helpers.py index 3106465..1ff8b8d 100644 --- a/helpers.py +++ b/helpers.py @@ -96,9 +96,7 @@ # Mailout pre-canned subjects MAILSUBJ_FI = "Satellite 6 import failure" -MAILSUBJ_SI = "Satellite 6 import successful" -MAILSUBJ_FP = "Satellite 6 publish failure" -MAILSUBJ_SP = "Satellite 6 publish successful" +MAILSUBJ_FP = "Satellite 6 publish/promote failure" def who_is_running(): """ Return the OS user that is running the script """ diff --git a/import_auto.py b/import_auto.py index 7236865..8656605 100644 --- a/import_auto.py +++ b/import_auto.py @@ -3,9 +3,22 @@ import sys, os, glob import subprocess import argparse +import datetime import helpers +def dates(): + # What day is it? (0=Sun -> 6=Sat) + dayofweek = datetime.datetime.today().weekday() + + # Figure out which week of the month we are in + weekofmonth = (datetime.datetime.now().day-1)/7+1 + + print "Day %s of week %s" % (dayofweek, weekofmonth) + + return(dayofweek,weekofmonth) + + def run_imports(dryrun): print "Processing Imports..." @@ -16,6 +29,7 @@ def run_imports(dryrun): # Assumes naming standard sat6_export_YYYYMMDD-HHMM_NAME.sha256 # 'sorted' function should result in imports being done in correct order by filename tslist = [] + good_imports = False for f in sorted(infiles): dstime = f.split('_')[-2] dsname = (f.split('_')[-1]).split('.')[-2] @@ -33,11 +47,53 @@ def run_imports(dryrun): if not dryrun: for dataset in tslist: rc = subprocess.call(['/usr/local/bin/sat_import', '-u', '-r', '-d', dataset]) - print rc + + # If the import is successful + if rc == 0: + good_imports = True + + # Check the 'good import' state - this triggers publish if we are good here + print good_imports + else: msg = "Dry run - not actually performing import" helpers.log_msg(msg, 'WARNING') + return good_imports + + +def publish_cv(dryrun): + print "Running Content View Publish..." + + if not dryrun: + rc = subprocess.call(['/usr/local/bin/publish_content_views', '-q', '-a']) + else: + msg = "Dry run - not actually performing publish" + helpers.log_msg(msg, 'WARNING') + rc = subprocess.call(['/usr/local/bin/publish_content_views', '-q', '-a', '-d']) + + +def promote_cv(dryrun, lifecycle): + print "Running Content View Promotion..." + + if not dryrun: + rc = subprocess.call(['/usr/local/bin/promote_content_views', '-q', '-e', lifecycle]) + else: + msg = "Dry run - not actually performing promotion" + helpers.log_msg(msg, 'WARNING') + rc = subprocess.call(['/usr/local/bin/promote_content_views', '-q', '-d', '-e', lifecycle]) + + +def clean_cv(dryrun): + print "Running Content View Cleanup..." + + if not dryrun: + rc = subprocess.call(['/usr/local/bin/clean_content_views', '-a', '-c']) + else: + msg = "Dry run - not actually performing cleanup" + helpers.log_msg(msg, 'WARNING') + rc = subprocess.call(['/usr/local/bin/clean_content_views', '-a', '-c', '-d']) + def main(args): @@ -51,26 +107,41 @@ def main(args): args = parser.parse_args() + # Set default flags and read in options given to us if args.dryrun: dryrun = True else: dryrun = False + run_publish = False - # Check if there are any imports in our input dir and import them - run_imports(dryrun) + # Determine the day of week and week of month for use in our scheduling + (dayofweek, weekofmonth) = dates() - # If all imports successful run publish + # Run promotion first - this ensures content consistency (QA->Prod, Library->QA) + if dayofweek == 1: + if weekofmonth == 4: + promote_cv(dryrun, 'Production') - ### Run promote on scheduled display + if weekofmonth == 2: + promote_cv(dryrun, 'Quality') + # Every day, check if there are any imports in our input dir and import them. + # run_publish will be returned as 'True' if any successful imports were performed. + # If no imports are performed, or they fail, publish will not be triggered. + run_publish = run_imports(dryrun) - ### Run cleanup on scheduled day + if run_publish: + publish_cv(dryrun) + ### Run cleanup on scheduled day + if dayofweek == 3: + if weekofmonth == 4: + clean_cv(dryrun) diff --git a/promote_content_views.py b/promote_content_views.py index c1aba1a..dd45dbf 100644 --- a/promote_content_views.py +++ b/promote_content_views.py @@ -58,6 +58,10 @@ def get_cv(org_id, target_env, env_list, prior_list, promote_list): if not target_env in env_list: msg = "Target environment '" + target_env + "' not found" helpers.log_msg(msg, 'ERROR') + if helpers.MAILOUT: + helpers.tf.seek(0) + output = "{}".format(helpers.tf.read()) + helpers.mailout(helpers.MAILSUBJ_FP, output) sys.exit(1) else: target_env_id = env_list[target_env] @@ -131,6 +135,10 @@ def promote(target_env, ver_list, ver_descr, ver_version, env_list, prior_list, if not ver_list: msg = "No content view versions found matching promotion criteria" helpers.log_msg(msg, 'WARNING') + if helpers.MAILOUT: + helpers.tf.seek(0) + output = "{}".format(helpers.tf.read()) + helpers.mailout(helpers.MAILSUBJ_FP, output) sys.exit(1) # Break repos to promote into batches as configured in config.yml @@ -259,6 +267,10 @@ def main(args): if not promote_list: msg = "Cannot find promotion configuration for '" + target_env + "'" helpers.log_msg(msg, 'ERROR') + if helpers.MAILOUT: + helpers.tf.seek(0) + output = "{}".format(helpers.tf.read()) + helpers.mailout(helpers.MAILSUBJ_FP, output) sys.exit(1) msg = "Config found for CV's " + str(promote_list) @@ -281,6 +293,14 @@ def main(args): phistory[target_env] = datetime.datetime.strftime(datetime.datetime.now(), '%Y-%m-%d') pickle.dump(phistory, open(vardir + '/promotions.pkl', 'wb')) + # Run the mailout + if helpers.MAILOUT: + helpers.tf.seek(0) + output = "{}".format(helpers.tf.read()) + message = "Promotion completed successfully\n\n" + output + subject = "Satellite 6 promotion completed" + helpers.mailout(subject, message) + # Exit cleanly sys.exit(0) diff --git a/publish_content_views.py b/publish_content_views.py index 766cdcd..38e01e1 100644 --- a/publish_content_views.py +++ b/publish_content_views.py @@ -67,6 +67,10 @@ def publish(ver_list, ver_descr, ver_version, dry_run, runuser, quiet): if not ver_list: msg = "No content view versions found matching publication criteria" helpers.log_msg(msg, 'ERROR') + if helpers.MAILOUT: + helpers.tf.seek(0) + output = "{}".format(helpers.tf.read()) + helpers.mailout(helpers.MAILSUBJ_FP, output) sys.exit(1) # Break repos to publish into batches as configured in config.yml @@ -189,6 +193,10 @@ def main(args): if not publish_list: msg = "Cannot find publish configuration" helpers.log_msg(msg, 'ERROR') + if helpers.MAILOUT: + helpers.tf.seek(0) + output = "{}".format(helpers.tf.read()) + helpers.mailout(helpers.MAILSUBJ_FP, output) sys.exit(1) msg = "Config found for CV's " + str(publish_list) @@ -207,6 +215,14 @@ def main(args): phistory['Library'] = datetime.datetime.strftime(datetime.datetime.now(), '%Y-%m-%d') pickle.dump(phistory, open(vardir + '/promotions.pkl', 'wb')) + # Run the mailout + if helpers.MAILOUT: + helpers.tf.seek(0) + output = "{}".format(helpers.tf.read()) + message = "Publish completed successfully\n\n" + output + subject = "Satellite 6 publish completed" + helpers.mailout(subject, message) + # Exit cleanly sys.exit(0) From f81adcd7e39141aa2c913cc1686403843a968ca9 Mon Sep 17 00:00:00 2001 From: Geoff Date: Fri, 8 Dec 2017 15:54:26 +1100 Subject: [PATCH 5/7] Update for auto_content scripts --- import_auto.py => auto_content.py | 32 ++++++++++++++++++++++--------- bin/auto_content | 10 ++++++++++ rel-eng/sat6_scripts.spec | 4 ++++ 3 files changed, 37 insertions(+), 9 deletions(-) rename import_auto.py => auto_content.py (87%) create mode 100644 bin/auto_content diff --git a/import_auto.py b/auto_content.py similarity index 87% rename from import_auto.py rename to auto_content.py index 8656605..9f85e32 100644 --- a/import_auto.py +++ b/auto_content.py @@ -52,9 +52,6 @@ def run_imports(dryrun): if rc == 0: good_imports = True - # Check the 'good import' state - this triggers publish if we are good here - print good_imports - else: msg = "Dry run - not actually performing import" helpers.log_msg(msg, 'WARNING') @@ -65,6 +62,9 @@ def run_imports(dryrun): def publish_cv(dryrun): print "Running Content View Publish..." + # Set the initial state + good_publish = False + if not dryrun: rc = subprocess.call(['/usr/local/bin/publish_content_views', '-q', '-a']) else: @@ -72,9 +72,17 @@ def publish_cv(dryrun): helpers.log_msg(msg, 'WARNING') rc = subprocess.call(['/usr/local/bin/publish_content_views', '-q', '-a', '-d']) + if rc == 0: + good_publish = True + + return good_publish + def promote_cv(dryrun, lifecycle): - print "Running Content View Promotion..." + print "Running Content View Promotion to " + lifecycle + "..." + + # Set the initial state + good_promote = False if not dryrun: rc = subprocess.call(['/usr/local/bin/promote_content_views', '-q', '-e', lifecycle]) @@ -83,6 +91,11 @@ def promote_cv(dryrun, lifecycle): helpers.log_msg(msg, 'WARNING') rc = subprocess.call(['/usr/local/bin/promote_content_views', '-q', '-d', '-e', lifecycle]) + if rc == 0: + good_promote = True + + return good_promote + def clean_cv(dryrun): print "Running Content View Cleanup..." @@ -114,6 +127,7 @@ def main(args): dryrun = False run_publish = False + run_promote = True # Determine the day of week and week of month for use in our scheduling (dayofweek, weekofmonth) = dates() @@ -122,10 +136,12 @@ def main(args): # Run promotion first - this ensures content consistency (QA->Prod, Library->QA) if dayofweek == 1: if weekofmonth == 4: - promote_cv(dryrun, 'Production') + run_promote = promote_cv(dryrun, 'Production') - if weekofmonth == 2: - promote_cv(dryrun, 'Quality') + # Run QA promotion on 2nd and 4th Monday. Conditional on Prod promotion success + if weekofmonth == 2 or weekofmonth == 4: + if run_promote: + run_promote = promote_cv(dryrun, 'Quality') # Every day, check if there are any imports in our input dir and import them. @@ -137,14 +153,12 @@ def main(args): publish_cv(dryrun) - ### Run cleanup on scheduled day if dayofweek == 3: if weekofmonth == 4: clean_cv(dryrun) - if __name__ == "__main__": try: main(sys.argv[1:]) diff --git a/bin/auto_content b/bin/auto_content new file mode 100644 index 0000000..4dbdffd --- /dev/null +++ b/bin/auto_content @@ -0,0 +1,10 @@ +#!/usr/bin/python +import sys + +sys.path.insert(0, '/usr/share/sat6_scripts') +try: + import auto_content + auto_content.main(sys.argv[1:]) +except KeyboardInterrupt, e: + print >> sys.stderr, "\n\nExiting on user cancel." + sys.exit(1) diff --git a/rel-eng/sat6_scripts.spec b/rel-eng/sat6_scripts.spec index 713e5ca..b046166 100755 --- a/rel-eng/sat6_scripts.spec +++ b/rel-eng/sat6_scripts.spec @@ -37,12 +37,14 @@ install -m 0644 config/exports.yml.example %{buildroot}/usr/share/sat6_scripts/c install -m 0755 bin/check_sync %{buildroot}/usr/local/bin/check_sync install -m 0755 bin/sat_export %{buildroot}/usr/local/bin/sat_export install -m 0755 bin/sat_import %{buildroot}/usr/local/bin/sat_import +install -m 0755 bin/auto_content %{buildroot}/usr/local/bin/auto_content install -m 0755 bin/clean_content_views %{buildroot}/usr/local/bin/clean_content_views install -m 0755 bin/publish_content_views %{buildroot}/usr/local/bin/publish_content_views install -m 0755 bin/promote_content_views %{buildroot}/usr/local/bin/promote_content_views install -m 0755 bin/download_manifest %{buildroot}/usr/local/bin/download_manifest install -m 0755 bin/push_puppetforge %{buildroot}/usr/local/bin/push_puppetforge install -m 0644 helpers.py %{buildroot}/usr/share/sat6_scripts/helpers.py +install -m 0644 auto_content.py %{buildroot}/usr/share/sat6_scripts/auto_content.py install -m 0644 check_sync.py %{buildroot}/usr/share/sat6_scripts/check_sync.py install -m 0644 sat_export.py %{buildroot}/usr/share/sat6_scripts/sat_export.py install -m 0644 sat_import.py %{buildroot}/usr/share/sat6_scripts/sat_import.py @@ -84,6 +86,7 @@ gzip -9c man/sat_import.8 > %{buildroot}/usr/local/share/man/man8/sat_import.8.g /usr/local/share/man/man8/sat_import.8.gz /usr/share/sat6_scripts/helpers.py +/usr/share/sat6_scripts/auto_content.py /usr/share/sat6_scripts/check_sync.py /usr/share/sat6_scripts/sat_export.py /usr/share/sat6_scripts/sat_import.py @@ -93,6 +96,7 @@ gzip -9c man/sat_import.8 > %{buildroot}/usr/local/share/man/man8/sat_import.8.g /usr/share/sat6_scripts/download_manifest.py /usr/share/sat6_scripts/push_puppetforge.py +/usr/local/bin/auto_content /usr/local/bin/check_sync /usr/local/bin/sat_export /usr/local/bin/sat_import From 75e26575676658bcf26a7b1aad11603413c04a03 Mon Sep 17 00:00:00 2001 From: Geoff Date: Fri, 8 Dec 2017 16:59:03 +1100 Subject: [PATCH 6/7] Tweaks after further testing --- CHANGELOG.md | 9 +++++++-- README.md | 11 +++++++++++ auto_content.py | 34 ++++++++++++++++++++++++++++++---- man/sat6_scripts.8 | 18 ++++++++++++++++++ rel-eng/sat6_scripts.spec | 3 +++ sat_export.py | 2 +- 6 files changed, 70 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 904a0c3..7759d5e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,14 +5,19 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] ### Added +- push_puppetforge now supports jFrog Artifiactory repository via HTTP POST - sat_import now checks for exports that have not been imported (missed/skipped) -- --fixhistory option in sat_import to align import/export histories -- Email output capability for notifications when automating scripts +- sat_import --fixhistory option to force align import/export histories +- Email notification capability for use when automating content scripts - Add unattended option to allow scripts to be automated +- auto_content scripts to allow unattended import/publish/promote/clean activity ### Changed - --notar export saved in /cdn_export dir rather than /export to prevent it being deleted +### Removed +- Skip GPG short option (-n) + ## [1.1.1] - 2017-10-25 ### Added diff --git a/README.md b/README.md index e1bc28e..482bd99 100644 --- a/README.md +++ b/README.md @@ -495,3 +495,14 @@ optional arguments: ./promote_content_view.py -e Production -a # Promote all views to Production ./promote_content_view.py -e Quality -d # See what would be done for Quality ``` + + +# auto_content +Sample script that allows for the unattended automation of content management. +This script will find any import datasets present and import them (in order). +Successful import of the content then triggers a publish. On nominated days/weeks +content is promoted to various lifecycle stages, and content view cleanup is also +performed. Like the other scripts it calls, it supports a dry run (-d) option to +show what would be performed without actually doing it. + +This script can be copied and extended to support custom automation requirements. diff --git a/auto_content.py b/auto_content.py index 9f85e32..6464a7c 100644 --- a/auto_content.py +++ b/auto_content.py @@ -97,6 +97,27 @@ def promote_cv(dryrun, lifecycle): return good_promote +def push_puppet(dryrun): + print "Pushing puppet modules to puppet-forge server..." + + # Set the initial state + good_puppetpush = False + + if not dryrun: + for dataset in tslist: + rc = subprocess.call(['/usr/local/bin/push-puppetforge', '-r', 'puppet-forge']) + + # If the import is successful + if rc == 0: + good_puppetpush = True + + else: + msg = "Dry run - not actually performing module push" + helpers.log_msg(msg, 'WARNING') + + return good_puppetpush + + def clean_cv(dryrun): print "Running Content View Cleanup..." @@ -117,6 +138,8 @@ def main(args): description='Imports, Publishes and Promotes content views.') parser.add_argument('-d', '--dryrun', help='Dry Run - Only show what will be done', required=False, action="store_true") + parser.add_argument('-p', '--puppet', help='Include puppet-forge module push', + required=False, action="store_true") args = parser.parse_args() @@ -146,15 +169,18 @@ def main(args): # Every day, check if there are any imports in our input dir and import them. # run_publish will be returned as 'True' if any successful imports were performed. - # If no imports are performed, or they fail, publish will not be triggered. + # If no imports are performed, or they fail, publish can't be triggered. run_publish = run_imports(dryrun) + # If the imports succeeded, we can go ahead and publish the new content to Library if run_publish: publish_cv(dryrun) + # Push any new puppet-forge modules if we have requested that + if args.puppet: + push_puppet(dryrun) - - ### Run cleanup on scheduled day - if dayofweek == 3: + # Run content view cleanup once a month, after we have done all promotions for the month. + if dayofweek == 4: if weekofmonth == 4: clean_cv(dryrun) diff --git a/man/sat6_scripts.8 b/man/sat6_scripts.8 index f56cd5c..68bd581 100644 --- a/man/sat6_scripts.8 +++ b/man/sat6_scripts.8 @@ -89,6 +89,24 @@ Debugging enabled. .RE +.B email: +.br +.B " mailout: False" +.RS +Enable/Disable mailout functionality. +.RE + +.B " mailfrom: Satellite 6 " +.RS +Sender email address envelope. +.RE + +.B " mailto: satadmin@example.org +.RS +Recipient email address. +.RE + + .B export: .br .B " dir: /var/sat-export" diff --git a/rel-eng/sat6_scripts.spec b/rel-eng/sat6_scripts.spec index b046166..f707978 100755 --- a/rel-eng/sat6_scripts.spec +++ b/rel-eng/sat6_scripts.spec @@ -130,6 +130,9 @@ mandb -q %changelog +* Fri Dec 8 2017 Geoff Gatward 1.2.0 +- Refer https://github.com/ggatward/sat6_scripts/blob/1.2.0/CHANGELOG.md + * Thu Oct 25 2017 Geoff Gatward 1.1.1 - Refer https://github.com/ggatward/sat6_scripts/blob/1.1.1/CHANGELOG.md diff --git a/sat_export.py b/sat_export.py index 1b7d1a1..e746d9d 100644 --- a/sat_export.py +++ b/sat_export.py @@ -685,7 +685,7 @@ def main(args): action="store_true") parser.add_argument('-L', '--list', help='Display export history', required=False, action="store_true") - parser.add_argument('-n', '--nogpg', help='Skip GPG checking', required=False, + parser.add_argument('--nogpg', help='Skip GPG checking', required=False, action="store_true") parser.add_argument('-u', '--unattended', help='Answer any prompts safely, allowing automated usage', required=False, action="store_true") From 64491b34f3f029674b9b2fb115adb8f9321665b5 Mon Sep 17 00:00:00 2001 From: Geoff Date: Sun, 10 Dec 2017 20:05:04 +1100 Subject: [PATCH 7/7] remove tempfile dependency --- sat_export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sat_export.py b/sat_export.py index e746d9d..f61d90e 100644 --- a/sat_export.py +++ b/sat_export.py @@ -11,7 +11,7 @@ """ import sys, argparse, datetime, os, shutil, pickle, re -import fnmatch, subprocess, tarfile, tempfile +import fnmatch, subprocess, tarfile import simplejson as json from glob import glob from distutils.dir_util import copy_tree