From a9b8b80c0b9c360752c4f9d0b5b4761c78ea4fef Mon Sep 17 00:00:00 2001 From: Geoff Gatward Date: Mon, 12 Mar 2018 14:43:00 +1100 Subject: [PATCH 1/3] Support Satellite 6.3 (#39) * Sat63 fixes (#38) * New methods for Sat 6.3 yum export * Fix to puppet exporter to handle backend_id * Fix for 6.3 file exports * Fix DoV export for 6.3 * Count DRPMs as well as RPMs * Update README * Version bump to 1.2.3 --- CHANGELOG.md | 11 +++- README.md | 11 +++- rel-eng/sat6_scripts.spec | 12 +++- sat_export.py | 125 +++++++++++++++++++++++++++++--------- 4 files changed, 123 insertions(+), 36 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7ef3e4e..e241f4c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,14 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] +## [1.2.3] - 2018-03-12 +### Changed +- Export package count now counts DRPM packages exported by Sat 6.3 + +### Fixed +- sat_export did not handle new backend_identifier value generated by Sat 6.3 + + ## [1.2.2] - 2018-02-25 ### Added - Option to tag a published content view with a custom comment @@ -77,7 +85,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## 0.6 - 2017-02-27 - Last of a series of pre-release betas -[Unreleased]: https://github.com/RedHatSatellite/sat6_scripts/compare/1.2.2...HEAD +[Unreleased]: https://github.com/RedHatSatellite/sat6_scripts/compare/1.2.3...HEAD +[1.2.3]: https://github.com/RedHatSatellite/sat6_scripts/compare/1.2.2...1.2.3 [1.2.2]: https://github.com/RedHatSatellite/sat6_scripts/compare/1.2.1...1.2.2 [1.2.1]: https://github.com/RedHatSatellite/sat6_scripts/compare/1.2.0...1.2.1 [1.2.0]: https://github.com/RedHatSatellite/sat6_scripts/compare/1.1.1...1.2.0 diff --git a/README.md b/README.md index 330b1e2..dadadc5 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,12 @@ Importing content in a disconnected environment can be a challenge. These scripts make use of the Inter-Satellite Sync capability in Satellite 6.2 to allow for full and incremental export/import of content between environments. -These scripts have been written and tested using Satellite 6.2 on RHEL7 +These scripts have been written and tested using Satellite 6.x on RHEL7. (RHEL6 not supported) +Export/Import testing has been performed on the following version combinations: +* 6.2 -> 6.2 +* 6.2 -> 6.3 +* 6.3 -> 6.2 +* 6.3 -> 6.3 ## Definitions Throughout these scripts the following references are used: @@ -14,8 +19,8 @@ Throughout these scripts the following references are used: # Requirements -* Satellite >= 6.2.x -* Python >= 2.7 +* Satellite >= 6.2.9 +* Python = 2.7 * PyYAML The Export and Import scripts are intended to be run on the Satellite servers directly. diff --git a/rel-eng/sat6_scripts.spec b/rel-eng/sat6_scripts.spec index cca6e35..b6317f9 100755 --- a/rel-eng/sat6_scripts.spec +++ b/rel-eng/sat6_scripts.spec @@ -1,11 +1,11 @@ Name: sat6_scripts -Version: 1.2.1 -Release: 2%{?dist} +Version: 1.2.3 +Release: 1%{?dist} Summary: Scripts to automate Satellite 6 tasks License: GPL URL: https://github.com/ggatward/sat6_scripts -Source0: sat6_scripts-1.2.1.tar.gz +Source0: sat6_scripts-1.2.3.tar.gz Requires: python >= 2.7, PyYAML @@ -130,6 +130,12 @@ mandb -q %changelog +* Mon Mar 12 2018 Geoff Gatward 1.2.3 +- Refer https://github.com/ggatward/sat6_scripts/blob/1.2.3/CHANGELOG.md + +* Sun Feb 25 2018 Geoff Gatward 1.2.2 +- Refer https://github.com/ggatward/sat6_scripts/blob/1.2.2/CHANGELOG.md + * Mon Dec 11 2017 Geoff Gatward 1.2.1 - Refer https://github.com/ggatward/sat6_scripts/blob/1.2.1/CHANGELOG.md diff --git a/sat_export.py b/sat_export.py index d344217..c3fcec4 100644 --- a/sat_export.py +++ b/sat_export.py @@ -48,8 +48,8 @@ def get_cv(org_id): msg = " Version ID: " + str(ver['id']) helpers.log_msg(msg, 'DEBUG') - # There will only ever be one DOV - return cv_result['id'] + # Return the ID (should be '1') and the label (forms part of the export path name) + return cv_result['id'], cv_result['label'] # Promote a content view version def export_cv(dov_ver, last_export, export_type): @@ -160,7 +160,7 @@ def export_repo(repo_id, last_export, export_type): return str(task_id) -def export_iso(repo_id, repo_label, repo_relative, last_export, export_type): +def export_iso(repo_id, repo_path, repo_label, repo_relative, last_export, export_type, satver): """ Export iso repository Takes the repository id and a start time (find newer than value) @@ -192,25 +192,45 @@ def export_iso(repo_id, repo_label, repo_relative, last_export, export_type): sys.stdout.flush() if export_type == 'full': - os.system('find -L /var/lib/pulp/published/http/isos/*' + repo_label \ + os.system('find -L /var/lib/pulp/published/http/isos/*' + repo_path \ + ' -type f -exec cp --parents -Lrp {} ' + ISOEXPORTDIR + " \;") else: - os.system('find -L /var/lib/pulp/published/http/isos/*' + repo_label \ + os.system('find -L /var/lib/pulp/published/http/isos/*' + repo_path \ + ' -type f -newerct $(date +%Y-%m-%d -d "' + last_export + '") -exec cp --parents -Lrp {} ' \ + ISOEXPORTDIR + ' \;') # We need to copy the manifest anyway, otherwise we'll cause import issues if we have an empty repo - os.system('find -L /var/lib/pulp/published/http/isos/*' + repo_label \ + os.system('find -L /var/lib/pulp/published/http/isos/*' + repo_path \ + ' -name PULP_MANIFEST -exec cp --parents -Lrp {} ' + ISOEXPORTDIR + ' \;') # At this point the iso/ export dir will contain individual repos - we need to 'normalise' them - for dirpath, subdirs, files in os.walk(ISOEXPORTDIR): - for tdir in subdirs: - if repo_label in tdir: - # This is where the exported ISOs for our repo are located - INDIR = os.path.join(dirpath, tdir) - # And this is where we want them to be moved to so we can export them in Satellite format - # We need to knock off '/Library/' from beginning of repo_relative and replace with export/ + if satver == '6.2': + for dirpath, subdirs, files in os.walk(ISOEXPORTDIR): + for tdir in subdirs: + if repo_label in tdir: + # This is where the exported ISOs for our repo are located + INDIR = os.path.join(dirpath, tdir) + # And this is where we want them to be moved to so we can export them in Satellite format + # We need to knock off '/Library/' from beginning of repo_relative and replace with export/ + exportpath = "/".join(repo_relative.strip("/").split('/')[2:]) + OUTDIR = helpers.EXPORTDIR + '/export/' + exportpath + + # Move the files into the final export tree + if not os.path.exists(OUTDIR): + shutil.move(INDIR, OUTDIR) + + os.chdir(OUTDIR) + numfiles = len([f for f in os.walk(".").next()[2] if f[ -8: ] != "MANIFEST"]) + + msg = "File Export OK (" + str(numfiles) + " new files)" + helpers.log_msg(msg, 'INFO') + print helpers.GREEN + msg + helpers.ENDC + + else: + # Satellite 6.3 changed the published file structure + for dirpath, subdirs, files in os.walk(ISOEXPORTDIR): + if repo_relative in dirpath: + INDIR = dirpath exportpath = "/".join(repo_relative.strip("/").split('/')[2:]) OUTDIR = helpers.EXPORTDIR + '/export/' + exportpath @@ -289,7 +309,7 @@ def export_puppet(repo_id, repo_label, repo_relative, last_export, export_type, # Subtract the manifest from the number of files: numfiles = numfiles - 1 - msg = "Puppet Export OK (" + str(numfiles) + " new files)" + msg = "Puppet Export OK (" + str(numfiles) + " new modules)" helpers.log_msg(msg, 'INFO') print helpers.GREEN + msg + helpers.ENDC @@ -571,7 +591,7 @@ def create_tar(export_dir, name, export_history): os.system('sha256sum ' + short_tarfile + '_* > ' + short_tarfile + '.sha256') -def prep_export_tree(org_name): +def prep_export_tree(org_name, basepaths): """ Function to combine individual export directories into single export tree Export top level contains /content and /custom directories with 'listing' @@ -583,11 +603,16 @@ def prep_export_tree(org_name): devnull = open(os.devnull, 'wb') if not os.path.exists(helpers.EXPORTDIR + "/export"): os.makedirs(helpers.EXPORTDIR + "/export") - # Haven't found a nice python way to do this - yet... - subprocess.call("cp -rp " + helpers.EXPORTDIR + "/" + org_name + "*/" + org_name + \ - "/Library/* " + helpers.EXPORTDIR + "/export", shell=True, stdout=devnull, stderr=devnull) - # Remove original directores - os.system("rm -rf " + helpers.EXPORTDIR + "/" + org_name + "*/") + + # Copy the content from each exported repo into a common /export structure + for basepath in basepaths: + msg = "Processing " + basepath + helpers.log_msg(msg, 'DEBUG') + subprocess.call("cp -rp " + basepath + "*/" + org_name + \ + "/Library/* " + helpers.EXPORTDIR + "/export", shell=True, stdout=devnull, stderr=devnull) + + # Remove original directores + os.system("rm -rf " + basepath + "*/") # We need to re-generate the 'listing' files as we will have overwritten some during the merge msg = "Rebuilding listing files..." @@ -725,6 +750,7 @@ def main(args): org_id = helpers.get_org_id(org_name) exported_repos = [] export_history = [] + basepaths = [] package_count = {} # If a specific environment is requested, find and read that config file repocfg = os.path.join(dir, confdir + '/exports.yml') @@ -864,7 +890,13 @@ def main(args): check_running_tasks(label, ename) # Get the version of the CV (Default Org View) to export - dov_ver = get_cv(org_id) + dov_ver, dov_label = get_cv(org_id) + + # Set the basepath of the export (needed due to Satellite 6.3 changes in other exports) + # 6.3 provides a 'latest_version' in the API that gives us '1.0' however this is not available + # in 6.2 so we must build the string manually for compatibility + basepath = helpers.EXPORTDIR + "/" + org_name + "-" + dov_label + "-v" + str(dov_ver) + ".0" + basepaths.append(basepath) # Now we have a CV ID and a starting date, and no conflicting tasks, we can export export_id = export_cv(dov_ver, last_export, export_type) @@ -951,7 +983,17 @@ def main(args): # First resolve the product label - this forms part of the export path product = get_product(org_id, repo_result['product']['cp_id']) # Now we can build the export path itself - basepath = helpers.EXPORTDIR + "/" + org_name + "-" + product + "-" + repo_result['label'] + + # Satellite 6.3 uses a new backend_identifier key in the API result + if 'backend_identifier' in repo_result: + basepath = helpers.EXPORTDIR + "/" + repo_result['backend_identifier'] + else: + basepath = helpers.EXPORTDIR + "/" + org_name + "-" + product + "-" + repo_result['label'] + + # Add to the basepath list so we can use specific paths later + # (Introduced due to path name changes in Sat6.3) + basepaths.append(basepath) + if export_type == 'incr': basepath = basepath + "-incremental" exportpath = basepath + "/" + repo_result['relative_path'] @@ -968,10 +1010,21 @@ def main(args): helpers.mailout(subject, output) sys.exit(1) - os.chdir(exportpath) - numrpms = len([f for f in os.walk(".").next()[2] if f[ -4: ] == ".rpm"]) - - msg = "Repository Export OK (" + str(numrpms) + " new packages)" + # Count the number of .rpm files in the exported repo (recursively) + numrpms = 0 + numdrpms = 0 + for dirpath, dirs, files in os.walk(exportpath): + for filename in files: + fname = os.path.join(dirpath,filename) + if fname.endswith('.rpm'): + numrpms = numrpms + 1 + if fname.endswith('.drpm'): + numdrpms = numdrpms + 1 + + if numdrpms == 0: + msg = "Repository Export OK (" + str(numrpms) + " new rpms)" + else: + msg = "Repository Export OK (" + str(numrpms) + " new rpms + " + str(numdrpms) + " drpms)" helpers.log_msg(msg, 'INFO') print helpers.GREEN + msg + helpers.ENDC @@ -1026,8 +1079,16 @@ def main(args): ok_to_export = check_running_tasks(repo_result['label'], ename) if ok_to_export: + # Satellite 6.3 uses a different path for published file content + if 'backend_identifier' in repo_result: + repo_path = repo_result['relative_path'] + satver = '6.3' + else: + repo_path = repo_result['label'] + satver = '6.2' + # Trigger export on the repo - numfiles = export_iso(repo_result['id'], repo_result['label'], repo_result['relative_path'], last_export, export_type) + numfiles = export_iso(repo_result['id'], repo_path, repo_result['label'], repo_result['relative_path'], last_export, export_type, satver) # Reset the export type to the user specified, in case we overrode it. export_type = orig_export_type @@ -1074,9 +1135,15 @@ def main(args): # Check if there are any currently running tasks that will conflict ok_to_export = check_running_tasks(repo_result['label'], ename) + # Satellite 6.3 uses a new backend_identifier key in the API result + if 'backend_identifier' in repo_result: + backend_id = repo_result['backend_identifier'] + else: + backend_id = repo_result['label'] + if ok_to_export: # Trigger export on the repo - numfiles = export_puppet(repo_result['id'], repo_result['label'], repo_result['relative_path'], last_export, export_type, pforge) + numfiles = export_puppet(repo_result['id'], backend_id, repo_result['relative_path'], last_export, export_type, pforge) # Reset the export type to the user specified, in case we overrode it. export_type = orig_export_type @@ -1099,7 +1166,7 @@ def main(args): # Combine resulting directory structures into a single repo format (top level = /content) - prep_export_tree(org_name) + prep_export_tree(org_name, basepaths) # Now we need to process the on-disk export data. # Define the location of our exported data. From 1de89ad8ff6e2b1b690baa491ea6df83339945c6 Mon Sep 17 00:00:00 2001 From: Andreas Nowak <3660291+4nn0@users.noreply.github.com> Date: Tue, 13 Mar 2018 10:51:05 +0100 Subject: [PATCH 2/3] skip version which are part of composite content views (#40) --- clean_content_views.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/clean_content_views.py b/clean_content_views.py index 3e8a7ca..8a595fe 100755 --- a/clean_content_views.py +++ b/clean_content_views.py @@ -62,6 +62,11 @@ def get_cv(org_id, cleanup_list, keep): return ver_list, ver_descr, ver_keep +def get_content_view_version(cvid): + cvv = helpers.get_json( + helpers.KATELLO_API + "content_view_versions/" + str(cvid)) + + return cvv def get_content_view_info(cvid): """ @@ -135,8 +140,10 @@ def cleanup(ver_list, ver_descr, dry_run, runuser, ver_keep, cleanall, ignorefir helpers.log_msg(msg, 'DEBUG') for version in cvinfo['versions']: - # Find versions that are not in any environment - if not version['environment_ids']: + # Get composite content views for version + cvv = get_content_view_version(version['id']) + # Find versions that are not in any environment and not in any composite content view + if not version['environment_ids'] and not cvv['composite_content_view_ids']: if not locked: msg = "Orphan view version " + str(version['version']) + " found in '" +\ str(ver_descr[cvid]) + "'" From 54e469e31a1f312035fc60f99b34c633eb428613 Mon Sep 17 00:00:00 2001 From: Geoff Gatward Date: Mon, 15 Oct 2018 08:57:06 +1100 Subject: [PATCH 3/3] Multiple issues in 6.3 resolved (#45) * Sat63 fixes (#38) * New methods for Sat 6.3 yum export * Fix to puppet exporter to handle backend_id * Fix for 6.3 file exports * Fix DoV export for 6.3 * Count DRPMs as well as RPMs * Update README * Version bump to 1.2.3 * Update CHANGELOG.md * Patch applied for Issues 42 and 43 * Add split size option * Remove diff file --- CHANGELOG.md | 15 +++++++++++++++ README.md | 6 +++++- helpers.py | 19 +++++++++++++++++++ man/sat_export.8 | 11 +++++++++-- sat_export.py | 35 ++++++++++++++++++++++------------- 5 files changed, 70 insertions(+), 16 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e241f4c..217d931 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,21 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). ## [Unreleased] +### Fixed +- clean_content_views raised an exception if a CV version was included in a composite view. +- Default org view was assumed to be version 1.0. Correct version is now extracted (Issue #43) +- Org name and label do not always match. Issue with mixed case and spaces in org name (Issue #42) + +### Added +- Option to define the tar split size (Issue #44) + + +## [1.2.3] - 2018-03-12 +### Changed +- Export package count now counts DRPM packages exported by Sat 6.3 + +### Fixed +- sat_export did not handle new backend_identifier value generated by Sat 6.3 ## [1.2.3] - 2018-03-12 diff --git a/README.md b/README.md index dadadc5..4736f94 100644 --- a/README.md +++ b/README.md @@ -146,6 +146,9 @@ for import sync, however this behaviour can be overridden with the (-r) flag. Th will be useful to periodically ensure that the disconnected satellite repos are consistent - the repodata will indicate mismatches with synced content. +The exported content will be archived in TAR format, with a chunk size specified +by the (-S) option. The default is 4200Mb. + To export a selected repository set, the exports.yml config file must exist in the config directory. The format of this file is shown below, and contains one or more 'env' stanzas, containing a list of repositories to export. The repository name is @@ -195,7 +198,7 @@ directory being written to the import directory during the sat_import process. ### Help Output ``` -usage: sat_export.py [-h] [-o ORG] [-e ENV] [-a | -i | -s SINCE] [-l] [-n] +usage: sat_export.py [-h] [-o ORG] [-e ENV] [-a | -i | -s SINCE] [-l] [-n] [-S SIZE] Performs Export of Default Content View. @@ -212,6 +215,7 @@ optional arguments: --nogpg Skip GPG checking -u, --unattended Answer any prompts safely, allowing automated usage -r, --repodata Include repodata for repos with no incremental content + -S, --splitsize Size of split files in Megabytes, defaults to 4200 -p, --puppetforge Include puppet-forge-server format Puppet Forge repo --notar Do not archive the extracted content --forcexport Force export from an import-only (Disconnected) Satellite diff --git a/helpers.py b/helpers.py index f48a286..48365f7 100644 --- a/helpers.py +++ b/helpers.py @@ -219,6 +219,25 @@ def get_org_id(org_name): return org_id +def get_org_label(org_name): + """ + Return the Organisation label for a given Org Name + """ + # Check if our organization exists, and extract its label + org = get_json(SAT_API + "organizations/" + org_name) + # If the requested organization is not found, exit + if org.get('error', None): + msg = "Organization '%s' does not exist." % org_name + log_msg(msg, 'ERROR') + sys.exit(1) + else: + # Our organization exists, so let's grab the label and write some debug + org_label = org['label'] + msg = "Organisation '" + org_name + "' found with label " + org['label'] + log_msg(msg, 'DEBUG') + + return org_label + class ProgressBar: def __init__(self, duration): diff --git a/man/sat_export.8 b/man/sat_export.8 index 445f3ba..5d1dc81 100644 --- a/man/sat_export.8 +++ b/man/sat_export.8 @@ -49,9 +49,9 @@ can import directly from it. .BR download_manifest (8). .RE .RS 3 -- All content is archived into a chunked tar file, with each part being 4Gb to allow +- All content is archived into a chunked tar file, with each part being a default of 4Gb to allow .RS 2 -transfer via DVD if required. +transfer via DVD if required. This size can be changed if required. .RE .RE .RS 3 @@ -148,6 +148,13 @@ will only synchronise repositories that contain new packages. This option forces to synchronise ALL repositories even if no updates are present. .RE .PP +.BR "-S", " --splitsize" +.RS 3 +Define the size of the tar chunks generated during export. By default the size will be +4200Mb (4.2Gb) to allow for transfer of segments via DVD. In some cases data diodes +require smaller chunk sizes for reliable transfer. +.RE +.PP .BR "-p", " --puppetforge" .RS 3 If exporting puppetforge modules from Satellite, also export them in a format compatible diff --git a/sat_export.py b/sat_export.py index c3fcec4..2b7b55a 100644 --- a/sat_export.py +++ b/sat_export.py @@ -45,11 +45,13 @@ def get_cv(org_id): helpers.log_msg(msg, 'DEBUG') msg = " Version: " + str(ver['version']) helpers.log_msg(msg, 'DEBUG') + cv_ver = str(ver['version']) msg = " Version ID: " + str(ver['id']) helpers.log_msg(msg, 'DEBUG') # Return the ID (should be '1') and the label (forms part of the export path name) - return cv_result['id'], cv_result['label'] + return cv_result['id'], cv_ver, cv_result['label'] + # Promote a content view version def export_cv(dov_ver, last_export, export_type): @@ -536,7 +538,7 @@ def do_gpg_check(export_dir): print helpers.GREEN + "GPG Check - Pass" + helpers.ENDC -def create_tar(export_dir, name, export_history): +def create_tar(export_dir, name, export_history, splitsize): """ Create a TAR of the content we have exported Creates a single tar, then splits into DVD size chunks and calculates @@ -554,6 +556,7 @@ def create_tar(export_dir, name, export_history): pickle.dump(export_history, open(export_dir + '/exporthistory_' + name + '.pkl', 'wb')) os.chdir(export_dir) + print "export_dir is " + export_dir full_tarfile = helpers.EXPORTDIR + '/sat6_export_' + today + '_' + name short_tarfile = 'sat6_export_' + today + '_' + name with tarfile.open(full_tarfile, 'w') as archive: @@ -581,7 +584,7 @@ def create_tar(export_dir, name, export_history): msg = "Splitting TAR file..." helpers.log_msg(msg, 'INFO') print msg - os.system("split -d -b 4200M " + full_tarfile + " " + full_tarfile + "_") + os.system("split -d -b " + str(splitsize) + "M " + full_tarfile + " " + full_tarfile + "_") os.remove(full_tarfile) # Temporary until pythonic method is done @@ -591,7 +594,7 @@ def create_tar(export_dir, name, export_history): os.system('sha256sum ' + short_tarfile + '_* > ' + short_tarfile + '.sha256') -def prep_export_tree(org_name, basepaths): +def prep_export_tree(org_label, basepaths): """ Function to combine individual export directories into single export tree Export top level contains /content and /custom directories with 'listing' @@ -608,10 +611,10 @@ def prep_export_tree(org_name, basepaths): for basepath in basepaths: msg = "Processing " + basepath helpers.log_msg(msg, 'DEBUG') - subprocess.call("cp -rp " + basepath + "*/" + org_name + \ + subprocess.call("cp -rp " + basepath + "*/" + org_label + \ "/Library/* " + helpers.EXPORTDIR + "/export", shell=True, stdout=devnull, stderr=devnull) - # Remove original directores + # Remove original directories os.system("rm -rf " + basepath + "*/") # We need to re-generate the 'listing' files as we will have overwritten some during the merge @@ -722,6 +725,8 @@ def main(args): required=False, action="store_true") parser.add_argument('-p', '--puppetforge', help='Include puppet-forge-server format Puppet Forge repo', required=False, action="store_true") + parser.add_argument('-S', '--splitsize', help='Size of split files in Megabytes, defaults to 4200', + required=False, type=int, default=4200) args = parser.parse_args() # If we are set as the 'DISCONNECTED' satellite, we will generally be IMPORTING content. @@ -748,6 +753,7 @@ def main(args): # Get the org_id (Validates our connection to the API) org_id = helpers.get_org_id(org_name) + org_label = helpers.get_org_label(org_name) exported_repos = [] export_history = [] basepaths = [] @@ -890,16 +896,16 @@ def main(args): check_running_tasks(label, ename) # Get the version of the CV (Default Org View) to export - dov_ver, dov_label = get_cv(org_id) + dov_id, dov_ver, dov_label = get_cv(org_id) # Set the basepath of the export (needed due to Satellite 6.3 changes in other exports) # 6.3 provides a 'latest_version' in the API that gives us '1.0' however this is not available # in 6.2 so we must build the string manually for compatibility - basepath = helpers.EXPORTDIR + "/" + org_name + "-" + dov_label + "-v" + str(dov_ver) + ".0" + basepath = helpers.EXPORTDIR + "/" + org_label + "-" + dov_label + "-v" + str(dov_ver) basepaths.append(basepath) # Now we have a CV ID and a starting date, and no conflicting tasks, we can export - export_id = export_cv(dov_ver, last_export, export_type) + export_id = export_cv(dov_id, last_export, export_type) # Now we need to wait for the export to complete helpers.wait_for_task(export_id, 'export') @@ -964,7 +970,6 @@ def main(args): # Check if there are any currently running tasks that will conflict ok_to_export = check_running_tasks(repo_result['label'], ename) - if ok_to_export: # Count the number of packages numpkg = count_packages(repo_result['id']) @@ -1077,7 +1082,6 @@ def main(args): # Check if there are any currently running tasks that will conflict ok_to_export = check_running_tasks(repo_result['label'], ename) - if ok_to_export: # Satellite 6.3 uses a different path for published file content if 'backend_identifier' in repo_result: @@ -1134,6 +1138,11 @@ def main(args): # Check if there are any currently running tasks that will conflict ok_to_export = check_running_tasks(repo_result['label'], ename) + # Satellite 6.3 uses a new backend_identifier key in the API result + if 'backend_identifier' in repo_result: + backend_id = repo_result['backend_identifier'] + else: + backend_id = repo_result['label'] # Satellite 6.3 uses a new backend_identifier key in the API result if 'backend_identifier' in repo_result: @@ -1166,7 +1175,7 @@ def main(args): # Combine resulting directory structures into a single repo format (top level = /content) - prep_export_tree(org_name, basepaths) + prep_export_tree(org_label, basepaths) # Now we need to process the on-disk export data. # Define the location of our exported data. @@ -1186,7 +1195,7 @@ def main(args): # Add our exported data to a tarfile if not args.notar: - create_tar(export_dir, ename, export_history) + create_tar(export_dir, ename, export_history, args.splitsize) else: # We need to manually clean up a couple of working files from the export if os.path.exists(helpers.EXPORTDIR + "/iso"):