diff options
author | Paul Eggleton <paul.eggleton@linux.intel.com> | 2018-02-27 16:14:43 +1300 |
---|---|---|
committer | Paul Eggleton <paul.eggleton@linux.intel.com> | 2018-05-04 23:57:52 +1200 |
commit | fd786875c3ad5ef0c448035bea6110ac8e323540 (patch) | |
tree | 910bac4b358f389856cb881b412d3554841f9d56 | |
parent | 6ca3d6649e2a223417c439fd2806984e93fb70aa (diff) | |
download | openembedded-core-contrib-fd786875c3ad5ef0c448035bea6110ac8e323540.tar.gz |
rrs/tools: add dry-run option to each script
Add the ability to run the scripts without writing changes back to the
database, for debugging purposes.
Signed-off-by: Paul Eggleton <paul.eggleton@linux.intel.com>
-rw-r--r-- | rrs/tools/common.py | 4 | ||||
-rwxr-xr-x | rrs/tools/rrs_distros.py | 79 | ||||
-rwxr-xr-x | rrs/tools/rrs_maintainer_history.py | 127 | ||||
-rwxr-xr-x | rrs/tools/rrs_upgrade_history.py | 45 | ||||
-rwxr-xr-x | rrs/tools/rrs_upstream_history.py | 97 |
5 files changed, 199 insertions, 153 deletions
diff --git a/rrs/tools/common.py b/rrs/tools/common.py index 3891dfb205..b69b7016f3 100644 --- a/rrs/tools/common.py +++ b/rrs/tools/common.py @@ -7,6 +7,10 @@ import logging +class DryRunRollbackException(Exception): + pass + + def common_setup(): import sys, os sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__), '../../'))) diff --git a/rrs/tools/rrs_distros.py b/rrs/tools/rrs_distros.py index ead79d0bd1..0c529b743b 100755 --- a/rrs/tools/rrs_distros.py +++ b/rrs/tools/rrs_distros.py @@ -15,7 +15,7 @@ from datetime import datetime sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__)))) from common import common_setup, update_repo, load_recipes, \ - get_pv_type, get_logger + get_pv_type, get_logger, DryRunRollbackException common_setup() from layerindex import utils @@ -94,45 +94,54 @@ if __name__=="__main__": help = "Enable debug output", action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) + parser.add_option("--dry-run", + help = "Do not write any data back to the database", + action="store_true", dest="dry_run", default=False) + options, args = parser.parse_args(sys.argv) logger.setLevel(options.loglevel) logger.debug("Starting recipe distros update ...") - with transaction.atomic(): - for layerbranch in LayerBranch.objects.all(): - (tinfoil, d, recipes) = load_recipes(layerbranch, bitbakepath, - fetchdir, settings, logger) - - if not recipes: - tinfoil.shutdown() - continue - - from oe import distro_check - logger.debug("Downloading distro's package information ...") - distro_check.create_distro_packages_list(fetchdir, d) - pkglst_dir = os.path.join(fetchdir, "package_lists") + try: + with transaction.atomic(): + for layerbranch in LayerBranch.objects.all(): + (tinfoil, d, recipes) = load_recipes(layerbranch, bitbakepath, + fetchdir, settings, logger) - RecipeDistro.objects.filter(recipe__layerbranch = layerbranch).delete() - - for recipe_data in recipes: - pn = recipe_data.getVar('PN', True) - - try: - recipe = Recipe.objects.get(pn = pn, layerbranch = layerbranch) - except: - logger.warn('%s: layer branch %s, NOT found' % (pn, - str(layerbranch))) + if not recipes: + tinfoil.shutdown() continue - distro_info = search_package_in_distros(pkglst_dir, recipe, recipe_data) - for distro, alias in distro_info.items(): - recipedistro = RecipeDistro() - recipedistro.recipe = recipe - recipedistro.distro = distro - recipedistro.alias = alias - recipedistro.save() - logger.debug('%s: layer branch %s, add distro %s alias %s' % (pn, - str(layerbranch), distro, alias)) - - tinfoil.shutdown() + from oe import distro_check + logger.debug("Downloading distro's package information ...") + distro_check.create_distro_packages_list(fetchdir, d) + pkglst_dir = os.path.join(fetchdir, "package_lists") + + RecipeDistro.objects.filter(recipe__layerbranch = layerbranch).delete() + + for recipe_data in recipes: + pn = recipe_data.getVar('PN', True) + + try: + recipe = Recipe.objects.get(pn = pn, layerbranch = layerbranch) + except: + logger.warn('%s: layer branch %s, NOT found' % (pn, + str(layerbranch))) + continue + + distro_info = search_package_in_distros(pkglst_dir, recipe, recipe_data) + for distro, alias in distro_info.items(): + recipedistro = RecipeDistro() + recipedistro.recipe = recipe + recipedistro.distro = distro + recipedistro.alias = alias + recipedistro.save() + logger.debug('%s: layer branch %s, add distro %s alias %s' % (pn, + str(layerbranch), distro, alias)) + + tinfoil.shutdown() + if options.dry_run: + raise DryRunRollbackException + except DryRunRollbackException: + pass diff --git a/rrs/tools/rrs_maintainer_history.py b/rrs/tools/rrs_maintainer_history.py index 0d0073cd00..aef24315df 100755 --- a/rrs/tools/rrs_maintainer_history.py +++ b/rrs/tools/rrs_maintainer_history.py @@ -13,7 +13,7 @@ import optparse import logging sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__)))) -from common import common_setup, update_repo, get_logger +from common import common_setup, update_repo, get_logger, DryRunRollbackException common_setup() from layerindex import utils, recipeparse @@ -67,7 +67,7 @@ def get_commit_info(info, logger): """ Recreate Maintainership history from the beign of Yocto Project """ -def maintainer_history(logger): +def maintainer_history(options, logger): layername = settings.CORE_LAYER_NAME branchname = "master" @@ -90,50 +90,67 @@ def maintainer_history(logger): commits = utils.runcmd("git log --format='%H' --reverse --date=rfc " + MAINTAINERS_INCLUDE_PATH, pokypath, logger=logger) - with transaction.atomic(): - for commit in commits.strip().split("\n"): - if RecipeMaintainerHistory.objects.filter(sha1=commit): - continue - - logger.debug("Analysing commit %s ..." % (commit)) - - (author_name, author_email, date, title) = \ - get_commit_info(utils.runcmd("git show " + commit, pokypath, - logger=logger), logger) - - author = Maintainer.create_or_update(author_name, author_email) - rms = RecipeMaintainerHistory(title=title, date=date, author=author, - sha1=commit) - rms.save() - - branchname = 'maintainer' + commit - utils.runcmd("git checkout %s -b %s -f" % (commit, branchname), - pokypath, logger=logger) - - lines = [line.strip() for line in open(maintainers_full_path)] - for line in lines: - res = get_recipe_maintainer(line, logger) - if res: - (pn, name, email) = res - qry = Recipe.objects.filter(pn = pn, layerbranch = layerbranch) - - if qry: - m = Maintainer.create_or_update(name, email) - + try: + with transaction.atomic(): + for commit in commits.strip().split("\n"): + if RecipeMaintainerHistory.objects.filter(sha1=commit): + continue + + logger.debug("Analysing commit %s ..." % (commit)) + + (author_name, author_email, date, title) = \ + get_commit_info(utils.runcmd("git show " + commit, pokypath, + logger=logger), logger) + + author = Maintainer.create_or_update(author_name, author_email) + rms = RecipeMaintainerHistory(title=title, date=date, author=author, + sha1=commit) + rms.save() + + branchname = 'maintainer' + commit + utils.runcmd("git checkout %s -b %s -f" % (commit, branchname), + pokypath, logger=logger) + + lines = [line.strip() for line in open(maintainers_full_path)] + for line in lines: + res = get_recipe_maintainer(line, logger) + if res: + (pn, name, email) = res + qry = Recipe.objects.filter(pn = pn, layerbranch = layerbranch) + + if qry: + m = Maintainer.create_or_update(name, email) + + rm = RecipeMaintainer() + rm.recipe = qry[0] + rm.maintainer = m + rm.history = rms + rm.save() + + logger.debug("%s: Change maintainer to %s in commit %s." % \ + (pn, m.name, commit)) + else: + logger.debug("%s: Not found in layer %s." % \ + (pn, layername)) + + # set missing recipes to no maintainer + m = Maintainer.objects.get(id = 0) # No Maintainer + for recipe in Recipe.objects.all(): + if not RecipeMaintainer.objects.filter(recipe = recipe, history = rms): rm = RecipeMaintainer() - rm.recipe = qry[0] + rm.recipe = recipe rm.maintainer = m rm.history = rms rm.save() + logger.debug("%s: Not found maintainer in commit %s set to 'No maintainer'." % \ + (recipe.pn, rms.sha1)) - logger.debug("%s: Change maintainer to %s in commit %s." % \ - (pn, m.name, commit)) - else: - logger.debug("%s: Not found in layer %s." % \ - (pn, layername)) + utils.runcmd("git checkout master -f", pokypath, logger=logger) + utils.runcmd("git branch -D %s" % (branchname), pokypath, logger=logger) - # set missing recipes to no maintainer + # set new recipes to no maintainer if don't have one m = Maintainer.objects.get(id = 0) # No Maintainer + rms = RecipeMaintainerHistory.get_last() for recipe in Recipe.objects.all(): if not RecipeMaintainer.objects.filter(recipe = recipe, history = rms): rm = RecipeMaintainer() @@ -141,24 +158,12 @@ def maintainer_history(logger): rm.maintainer = m rm.history = rms rm.save() - logger.debug("%s: Not found maintainer in commit %s set to 'No maintainer'." % \ - (recipe.pn, rms.sha1)) - - utils.runcmd("git checkout master -f", pokypath, logger=logger) - utils.runcmd("git branch -D %s" % (branchname), pokypath, logger=logger) - - # set new recipes to no maintainer if don't have one - m = Maintainer.objects.get(id = 0) # No Maintainer - rms = RecipeMaintainerHistory.get_last() - for recipe in Recipe.objects.all(): - if not RecipeMaintainer.objects.filter(recipe = recipe, history = rms): - rm = RecipeMaintainer() - rm.recipe = recipe - rm.maintainer = m - rm.history = rms - rm.save() - logger.debug("%s: New recipe not found maintainer set to 'No maintainer'." % \ - (recipe.pn)) + logger.debug("%s: New recipe not found maintainer set to 'No maintainer'." % \ + (recipe.pn)) + if options.dry_run: + raise DryRunRollbackException + except DryRunRollbackException: + pass if __name__=="__main__": parser = optparse.OptionParser(usage = """%prog [options]""") @@ -167,9 +172,13 @@ if __name__=="__main__": help = "Enable debug output", action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) - + + parser.add_option("--dry-run", + help = "Do not write any data back to the database", + action="store_true", dest="dry_run", default=False) + logger = get_logger("MaintainerUpdate", settings) options, args = parser.parse_args(sys.argv) logger.setLevel(options.loglevel) - maintainer_history(logger) + maintainer_history(options, logger) diff --git a/rrs/tools/rrs_upgrade_history.py b/rrs/tools/rrs_upgrade_history.py index 64b4743953..8c97dfb695 100755 --- a/rrs/tools/rrs_upgrade_history.py +++ b/rrs/tools/rrs_upgrade_history.py @@ -18,8 +18,9 @@ import optparse import logging sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__)))) -from common import common_setup, update_repo, get_pv_type, load_recipes, \ - get_logger +from common import common_setup, get_pv_type, load_recipes, \ + get_logger, DryRunRollbackException + common_setup() from layerindex import utils, recipeparse from layerindex.update_layer import split_recipe_fn @@ -144,7 +145,7 @@ def _get_recipes_filenames(ct, repodir, layerdir, logger): return ct_files -def do_initial(layerbranch, ct, logger): +def do_initial(layerbranch, ct, logger, dry_run): layer = layerbranch.layer urldir = str(layer.get_fetch_dir()) repodir = os.path.join(fetchdir, urldir) @@ -160,16 +161,21 @@ def do_initial(layerbranch, ct, logger): (tinfoil, d, recipes) = load_recipes(layerbranch, bitbakepath, fetchdir, settings, logger, nocheckout=True) - with transaction.atomic(): - for recipe_data in recipes: - _create_upgrade(recipe_data, layerbranch, '', title, - info, logger, initial=True) + try: + with transaction.atomic(): + for recipe_data in recipes: + _create_upgrade(recipe_data, layerbranch, '', title, + info, logger, initial=True) + if dry_run: + raise DryRunRollbackException + except DryRunRollbackException: + pass utils.runcmd("git checkout master -f", repodir, logger=logger) utils.runcmd("git branch -D %s" % (branch_name_tmp), repodir, logger=logger) tinfoil.shutdown() -def do_loop(layerbranch, ct, logger): +def do_loop(layerbranch, ct, logger, dry_run): layer = layerbranch.layer urldir = str(layer.get_fetch_dir()) repodir = os.path.join(fetchdir, urldir) @@ -193,10 +199,15 @@ def do_loop(layerbranch, ct, logger): repodir, logger=logger) info = utils.runcmd("git log --format='%an;%ae;%ad;%cd' --date=rfc -n 1 " \ + ct, destdir=repodir, logger=logger) - with transaction.atomic(): - for recipe_data in recipes: - _create_upgrade(recipe_data, layerbranch, ct, title, - info, logger) + try: + with transaction.atomic(): + for recipe_data in recipes: + _create_upgrade(recipe_data, layerbranch, ct, title, + info, logger) + if dry_run: + raise DryRunRollbackException + except DryRunRollbackException: + pass utils.runcmd("git checkout master -f", repodir, logger=logger) utils.runcmd("git branch -D %s" % (branch_name_tmp), repodir, logger=logger) @@ -243,13 +254,13 @@ def upgrade_history(options, logger): logger.debug("Adding initial upgrade history ....") ct = commit_list.pop(0) - do_initial(layerbranch, ct, logger) + do_initial(layerbranch, ct, logger, options.dry_run) logger.debug("Adding upgrade history from %s to %s ..." % (since, today)) for ct in commit_list: if ct: logger.debug("Analysing commit %s ..." % ct) - do_loop(layerbranch, ct, logger) + do_loop(layerbranch, ct, logger, options.dry_run) if __name__=="__main__": parser = optparse.OptionParser(usage = """%prog [options]""") @@ -261,7 +272,11 @@ if __name__=="__main__": parser.add_option("-d", "--debug", help = "Enable debug output", action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) - + + parser.add_option("--dry-run", + help = "Do not write any data back to the database", + action="store_true", dest="dry_run", default=False) + options, args = parser.parse_args(sys.argv) logger.setLevel(options.loglevel) diff --git a/rrs/tools/rrs_upstream_history.py b/rrs/tools/rrs_upstream_history.py index df71fd38e3..84355c43e7 100755 --- a/rrs/tools/rrs_upstream_history.py +++ b/rrs/tools/rrs_upstream_history.py @@ -15,7 +15,7 @@ from datetime import datetime sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__)))) from common import common_setup, update_repo, load_recipes, \ - get_pv_type, get_logger + get_pv_type, get_logger, DryRunRollbackException common_setup() from layerindex import utils @@ -145,62 +145,71 @@ if __name__=="__main__": help = "Enable debug output", action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) + parser.add_option("--dry-run", + help = "Do not write any data back to the database", + action="store_true", dest="dry_run", default=False) + options, args = parser.parse_args(sys.argv) logger.setLevel(options.loglevel) logger.debug("Starting upstream history...") - with transaction.atomic(): - for layerbranch in LayerBranch.objects.all(): - layer = layerbranch.layer - urldir = layer.get_fetch_dir() - repodir = os.path.join(fetchdir, urldir) - layerdir = os.path.join(repodir, layerbranch.vcs_subdir) - - recipe_files = [] - for recipe in Recipe.objects.filter(layerbranch = layerbranch): - file = str(os.path.join(layerdir, recipe.full_path())) - recipe_files.append(file) - - (tinfoil, d, recipes) = load_recipes(layerbranch, bitbakepath, - fetchdir, settings, logger, recipe_files=recipe_files) - - if not recipes: - tinfoil.shutdown() - continue + try: + with transaction.atomic(): + for layerbranch in LayerBranch.objects.all(): + layer = layerbranch.layer + urldir = layer.get_fetch_dir() + repodir = os.path.join(fetchdir, urldir) + layerdir = os.path.join(repodir, layerbranch.vcs_subdir) + + recipe_files = [] + for recipe in Recipe.objects.filter(layerbranch = layerbranch): + file = str(os.path.join(layerdir, recipe.full_path())) + recipe_files.append(file) + + (tinfoil, d, recipes) = load_recipes(layerbranch, bitbakepath, + fetchdir, settings, logger, recipe_files=recipe_files) + + if not recipes: + tinfoil.shutdown() + continue - for recipe_data in recipes: - set_regexes(recipe_data) + for recipe_data in recipes: + set_regexes(recipe_data) - history = RecipeUpstreamHistory(start_date = datetime.now()) + history = RecipeUpstreamHistory(start_date = datetime.now()) - from oe.utils import ThreadedPool - import multiprocessing + from oe.utils import ThreadedPool + import multiprocessing - #nproc = min(multiprocessing.cpu_count(), len(recipes)) - # XXX: The new tinfoil API don't support pythreads so - # set to 1 while tinfoil have support. - nproc = 1 - pool = ThreadedPool(nproc, len(recipes)) + #nproc = min(multiprocessing.cpu_count(), len(recipes)) + # XXX: The new tinfoil API don't support pythreads so + # set to 1 while tinfoil have support. + nproc = 1 + pool = ThreadedPool(nproc, len(recipes)) - result = [] - for recipe_data in recipes: - pool.add_task(get_upstream_info, (layerbranch, - recipe_data, result)) + result = [] + for recipe_data in recipes: + pool.add_task(get_upstream_info, (layerbranch, + recipe_data, result)) - pool.start() - pool.wait_completion() + pool.start() + pool.wait_completion() - history.end_date = datetime.now() - history.save() + history.end_date = datetime.now() + history.save() - for res in result: - (recipe, ru) = res + for res in result: + (recipe, ru) = res - ru.history = history - ru.save() + ru.history = history + ru.save() - logger.debug('%s: layer branch %s, pv %s, upstream (%s)' % (recipe.pn, - str(layerbranch), recipe.pv, str(ru))) + logger.debug('%s: layer branch %s, pv %s, upstream (%s)' % (recipe.pn, + str(layerbranch), recipe.pv, str(ru))) - tinfoil.shutdown() + tinfoil.shutdown() + if options.dry_run: + raise DryRunRollbackException + except DryRunRollbackException: + pass |