From 0391d4ba04d42bbc226a409f5d74b30b9b098945 Mon Sep 17 00:00:00 2001 From: Geoff Gatward Date: Mon, 19 Dec 2016 13:43:30 +1100 Subject: [PATCH] Cleanup prior to RPM packaging --- bin/check_sync | 2 +- bin/clean_content_views | 2 +- ...ote_content_view => promote_content_views} | 6 +-- ...ish_content_view => publish_content_views} | 6 +-- bin/sat_export | 2 +- bin/sat_import | 2 +- ...ontent_view.py => promote_content_views.py | 0 ...ontent_view.py => publish_content_views.py | 0 sat_export.py | 23 ++++++++- sat_import.py | 51 +++++++++++++++++++ 10 files changed, 82 insertions(+), 12 deletions(-) rename bin/{promote_content_view => promote_content_views} (51%) rename bin/{publish_content_view => publish_content_views} (51%) rename promote_content_view.py => promote_content_views.py (100%) rename publish_content_view.py => publish_content_views.py (100%) diff --git a/bin/check_sync b/bin/check_sync index 587c0a3..6dd0762 100755 --- a/bin/check_sync +++ b/bin/check_sync @@ -1,7 +1,7 @@ #!/usr/bin/python import sys -sys.path.insert(0, '/usr/local/bin/sat6_scripts') +sys.path.insert(0, '/usr/share/sat6_scripts') try: import check_sync check_sync.main(sys.argv[1:]) diff --git a/bin/clean_content_views b/bin/clean_content_views index ed8fa15..ad598f4 100755 --- a/bin/clean_content_views +++ b/bin/clean_content_views @@ -1,7 +1,7 @@ #!/usr/bin/python import sys -sys.path.insert(0, '/usr/local/bin/sat6_scripts') +sys.path.insert(0, '/usr/share/sat6_scripts') try: import clean_content_views clean_content_views.main(sys.argv[1:]) diff --git a/bin/promote_content_view b/bin/promote_content_views similarity index 51% rename from bin/promote_content_view rename to bin/promote_content_views index 894e801..b208f8f 100755 --- a/bin/promote_content_view +++ b/bin/promote_content_views @@ -1,10 +1,10 @@ #!/usr/bin/python import sys -sys.path.insert(0, '/usr/local/bin/sat6_scripts') +sys.path.insert(0, '/usr/share/sat6_scripts') try: - import promote_content_view - promote_content_view.main(sys.argv[1:]) + import promote_content_views + promote_content_views.main(sys.argv[1:]) except KeyboardInterrupt, e: print >> sys.stderr, "\n\nExiting on user cancel." sys.exit(1) diff --git a/bin/publish_content_view b/bin/publish_content_views similarity index 51% rename from bin/publish_content_view rename to bin/publish_content_views index da03022..7a12c76 100755 --- a/bin/publish_content_view +++ b/bin/publish_content_views @@ -1,10 +1,10 @@ #!/usr/bin/python import sys -sys.path.insert(0, '/usr/local/bin/sat6_scripts') +sys.path.insert(0, '/usr/share/sat6_scripts') try: - import publish_content_view - publish_content_view.main(sys.argv[1:]) + import publish_content_views + publish_content_views.main(sys.argv[1:]) except KeyboardInterrupt, e: print >> sys.stderr, "\n\nExiting on user cancel." sys.exit(1) diff --git a/bin/sat_export b/bin/sat_export index 22edcfd..2a4f61d 100755 --- a/bin/sat_export +++ b/bin/sat_export @@ -1,7 +1,7 @@ #!/usr/bin/python import sys -sys.path.insert(0, '/usr/local/bin/sat6_scripts') +sys.path.insert(0, '/usr/share/sat6_scripts') try: import sat_export sat_export.main(sys.argv[1:]) diff --git a/bin/sat_import b/bin/sat_import index e0b67ad..f6c72e6 100755 --- a/bin/sat_import +++ b/bin/sat_import @@ -1,7 +1,7 @@ #!/usr/bin/python import sys -sys.path.insert(0, '/usr/local/bin/sat6_scripts') +sys.path.insert(0, '/usr/share/sat6_scripts') try: import sat_import sat_import.main(sys.argv[1:]) diff --git a/promote_content_view.py b/promote_content_views.py similarity index 100% rename from promote_content_view.py rename to promote_content_views.py diff --git a/publish_content_view.py b/publish_content_views.py similarity index 100% rename from publish_content_view.py rename to publish_content_views.py diff --git a/sat_export.py b/sat_export.py index bfd0e53..947c3c5 100755 --- a/sat_export.py +++ b/sat_export.py @@ -285,6 +285,19 @@ def export_puppet(repo_id, repo_label, repo_relative, last_export, export_type, return numfiles +def count_packages(repo_id): + """ + Return the number of packages/erratum in a respository + """ + result = helpers.get_json( + helpers.KATELLO_API + "repositories/" + str(repo_id) + ) + + numpkg = result['content_counts']['rpm'] + numerrata = result['content_counts']['erratum'] + + return str(numpkg) + ':' + str(numerrata) + def check_running_tasks(label, name): """ @@ -639,6 +652,7 @@ def main(args): # Get the org_id (Validates our connection to the API) org_id = helpers.get_org_id(org_name) exported_repos = [] + package_count = {} # If a specific environment is requested, find and read that config file repocfg = os.path.join(dir, confdir + '/exports.yml') if args.env: @@ -822,6 +836,10 @@ def main(args): ok_to_export = check_running_tasks(repo_result['label'], ename) if ok_to_export: + # Count the number of packages + numpkg = count_packages(repo_result['id']) + package_count[repo_result['label']] = numpkg + # Trigger export on the repo export_id = export_repo(repo_result['id'], last_export, export_type) @@ -973,9 +991,10 @@ def main(args): # Define the location of our exported data. export_dir = helpers.EXPORTDIR + "/export" - # Write out the list of exported repos. This will be transferred to the disconnected system - # and used to perform the repo sync tasks during the import. + # Write out the list of exported repos and the package counts. These will be transferred to the + # disconnected system and used to perform the repo sync tasks during the import. pickle.dump(exported_repos, open(export_dir + '/exported_repos.pkl', 'wb')) + pickle.dump(package_count, open(export_dir + '/package_count.pkl', 'wb')) # Run GPG Checks on the exported RPMs if not args.nogpg: diff --git a/sat_import.py b/sat_import.py index ee50021..f827ec3 100755 --- a/sat_import.py +++ b/sat_import.py @@ -159,6 +159,53 @@ def sync_content(org_id, imported_repos): return delete_override +def count_packages(repo_id): + """ + Return the number of packages/erratum in a respository + """ + result = helpers.get_json( + helpers.KATELLO_API + "repositories/" + str(repo_id) + ) + + numpkg = result['content_counts']['rpm'] + numerrata = result['content_counts']['erratum'] + + return numpkg, numerrata + + +def check_counts(package_count) + """ + Verify the number of pkgs/errutum in each repo match the sync host. + Input is a dictionary loaded from a pickle that was created on the sync + host in format {Repo_Label, pkgs:erratum} + """ + + # Get a listing of repositories in this Satellite + enabled_repos = helpers.get_p_json( + helpers.KATELLO_API + "/repositories/", \ + json.dumps( + { + "organization_id": org_id, + "per_page": '1000', + } + )) + + # First loop through the repos in the import dict and find the local ID + for repo, counts in package_count.iteritems(): + print repo, counts + # Split the count data into packages and erratum + sync_pkgs = counts.split(':')[0] + sync_erratum = counts.split(':')[1] + + for repo_result in enabled_repos['results']: + if repo in repo_result['label']: + print repo_result['label'], repo_result['id'] + local_pkgs, local_erratum = count_packages(repo_result['id']) + + print "Packages: " + str(sync_pkgs), str(local_pkgs) + print "Erratum: " + str(sync_erratum), str(local_erratum) + + def main(args): """ Main Routine @@ -249,10 +296,14 @@ def main(args): # We need to figure out which repos to sync. This comes to us via a pickle containing # a list of repositories that were exported imported_repos = pickle.load(open('exported_repos.pkl', 'rb')) + package_count = pickle.load(open('package_count.pkl', 'rb')) # Run a repo sync on each imported repo (delete_override) = sync_content(org_id, imported_repos) + # Verify the repository package/erratum counts match the sync host + check_counts(package_count) + print helpers.GREEN + "Import complete.\n" + helpers.ENDC print 'Please publish content views to make new content available.'