rrs/tools: add dry-run option to each script

Add the ability to run the scripts without writing changes back to the
database, for debugging purposes.

Signed-off-by: Paul Eggleton <paul.eggleton@linux.intel.com>
This commit is contained in:
Paul Eggleton 2018-02-27 16:14:43 +13:00
parent 6ca3d6649e
commit fd786875c3
5 changed files with 198 additions and 152 deletions

View File

@ -7,6 +7,10 @@
import logging import logging
class DryRunRollbackException(Exception):
pass
def common_setup(): def common_setup():
import sys, os import sys, os
sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__), '../../'))) sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__), '../../')))

View File

@ -15,7 +15,7 @@ from datetime import datetime
sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__)))) sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__))))
from common import common_setup, update_repo, load_recipes, \ from common import common_setup, update_repo, load_recipes, \
get_pv_type, get_logger get_pv_type, get_logger, DryRunRollbackException
common_setup() common_setup()
from layerindex import utils from layerindex import utils
@ -94,45 +94,54 @@ if __name__=="__main__":
help = "Enable debug output", help = "Enable debug output",
action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO)
parser.add_option("--dry-run",
help = "Do not write any data back to the database",
action="store_true", dest="dry_run", default=False)
options, args = parser.parse_args(sys.argv) options, args = parser.parse_args(sys.argv)
logger.setLevel(options.loglevel) logger.setLevel(options.loglevel)
logger.debug("Starting recipe distros update ...") logger.debug("Starting recipe distros update ...")
with transaction.atomic(): try:
for layerbranch in LayerBranch.objects.all(): with transaction.atomic():
(tinfoil, d, recipes) = load_recipes(layerbranch, bitbakepath, for layerbranch in LayerBranch.objects.all():
fetchdir, settings, logger) (tinfoil, d, recipes) = load_recipes(layerbranch, bitbakepath,
fetchdir, settings, logger)
if not recipes: if not recipes:
tinfoil.shutdown() tinfoil.shutdown()
continue
from oe import distro_check
logger.debug("Downloading distro's package information ...")
distro_check.create_distro_packages_list(fetchdir, d)
pkglst_dir = os.path.join(fetchdir, "package_lists")
RecipeDistro.objects.filter(recipe__layerbranch = layerbranch).delete()
for recipe_data in recipes:
pn = recipe_data.getVar('PN', True)
try:
recipe = Recipe.objects.get(pn = pn, layerbranch = layerbranch)
except:
logger.warn('%s: layer branch %s, NOT found' % (pn,
str(layerbranch)))
continue continue
distro_info = search_package_in_distros(pkglst_dir, recipe, recipe_data) from oe import distro_check
for distro, alias in distro_info.items(): logger.debug("Downloading distro's package information ...")
recipedistro = RecipeDistro() distro_check.create_distro_packages_list(fetchdir, d)
recipedistro.recipe = recipe pkglst_dir = os.path.join(fetchdir, "package_lists")
recipedistro.distro = distro
recipedistro.alias = alias
recipedistro.save()
logger.debug('%s: layer branch %s, add distro %s alias %s' % (pn,
str(layerbranch), distro, alias))
tinfoil.shutdown() RecipeDistro.objects.filter(recipe__layerbranch = layerbranch).delete()
for recipe_data in recipes:
pn = recipe_data.getVar('PN', True)
try:
recipe = Recipe.objects.get(pn = pn, layerbranch = layerbranch)
except:
logger.warn('%s: layer branch %s, NOT found' % (pn,
str(layerbranch)))
continue
distro_info = search_package_in_distros(pkglst_dir, recipe, recipe_data)
for distro, alias in distro_info.items():
recipedistro = RecipeDistro()
recipedistro.recipe = recipe
recipedistro.distro = distro
recipedistro.alias = alias
recipedistro.save()
logger.debug('%s: layer branch %s, add distro %s alias %s' % (pn,
str(layerbranch), distro, alias))
tinfoil.shutdown()
if options.dry_run:
raise DryRunRollbackException
except DryRunRollbackException:
pass

View File

@ -13,7 +13,7 @@ import optparse
import logging import logging
sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__)))) sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__))))
from common import common_setup, update_repo, get_logger from common import common_setup, update_repo, get_logger, DryRunRollbackException
common_setup() common_setup()
from layerindex import utils, recipeparse from layerindex import utils, recipeparse
@ -67,7 +67,7 @@ def get_commit_info(info, logger):
""" """
Recreate Maintainership history from the beign of Yocto Project Recreate Maintainership history from the beign of Yocto Project
""" """
def maintainer_history(logger): def maintainer_history(options, logger):
layername = settings.CORE_LAYER_NAME layername = settings.CORE_LAYER_NAME
branchname = "master" branchname = "master"
@ -90,50 +90,67 @@ def maintainer_history(logger):
commits = utils.runcmd("git log --format='%H' --reverse --date=rfc " + commits = utils.runcmd("git log --format='%H' --reverse --date=rfc " +
MAINTAINERS_INCLUDE_PATH, pokypath, logger=logger) MAINTAINERS_INCLUDE_PATH, pokypath, logger=logger)
with transaction.atomic(): try:
for commit in commits.strip().split("\n"): with transaction.atomic():
if RecipeMaintainerHistory.objects.filter(sha1=commit): for commit in commits.strip().split("\n"):
continue if RecipeMaintainerHistory.objects.filter(sha1=commit):
continue
logger.debug("Analysing commit %s ..." % (commit)) logger.debug("Analysing commit %s ..." % (commit))
(author_name, author_email, date, title) = \ (author_name, author_email, date, title) = \
get_commit_info(utils.runcmd("git show " + commit, pokypath, get_commit_info(utils.runcmd("git show " + commit, pokypath,
logger=logger), logger) logger=logger), logger)
author = Maintainer.create_or_update(author_name, author_email) author = Maintainer.create_or_update(author_name, author_email)
rms = RecipeMaintainerHistory(title=title, date=date, author=author, rms = RecipeMaintainerHistory(title=title, date=date, author=author,
sha1=commit) sha1=commit)
rms.save() rms.save()
branchname = 'maintainer' + commit branchname = 'maintainer' + commit
utils.runcmd("git checkout %s -b %s -f" % (commit, branchname), utils.runcmd("git checkout %s -b %s -f" % (commit, branchname),
pokypath, logger=logger) pokypath, logger=logger)
lines = [line.strip() for line in open(maintainers_full_path)] lines = [line.strip() for line in open(maintainers_full_path)]
for line in lines: for line in lines:
res = get_recipe_maintainer(line, logger) res = get_recipe_maintainer(line, logger)
if res: if res:
(pn, name, email) = res (pn, name, email) = res
qry = Recipe.objects.filter(pn = pn, layerbranch = layerbranch) qry = Recipe.objects.filter(pn = pn, layerbranch = layerbranch)
if qry: if qry:
m = Maintainer.create_or_update(name, email) m = Maintainer.create_or_update(name, email)
rm = RecipeMaintainer()
rm.recipe = qry[0]
rm.maintainer = m
rm.history = rms
rm.save()
logger.debug("%s: Change maintainer to %s in commit %s." % \
(pn, m.name, commit))
else:
logger.debug("%s: Not found in layer %s." % \
(pn, layername))
# set missing recipes to no maintainer
m = Maintainer.objects.get(id = 0) # No Maintainer
for recipe in Recipe.objects.all():
if not RecipeMaintainer.objects.filter(recipe = recipe, history = rms):
rm = RecipeMaintainer() rm = RecipeMaintainer()
rm.recipe = qry[0] rm.recipe = recipe
rm.maintainer = m rm.maintainer = m
rm.history = rms rm.history = rms
rm.save() rm.save()
logger.debug("%s: Not found maintainer in commit %s set to 'No maintainer'." % \
(recipe.pn, rms.sha1))
logger.debug("%s: Change maintainer to %s in commit %s." % \ utils.runcmd("git checkout master -f", pokypath, logger=logger)
(pn, m.name, commit)) utils.runcmd("git branch -D %s" % (branchname), pokypath, logger=logger)
else:
logger.debug("%s: Not found in layer %s." % \
(pn, layername))
# set missing recipes to no maintainer # set new recipes to no maintainer if don't have one
m = Maintainer.objects.get(id = 0) # No Maintainer m = Maintainer.objects.get(id = 0) # No Maintainer
rms = RecipeMaintainerHistory.get_last()
for recipe in Recipe.objects.all(): for recipe in Recipe.objects.all():
if not RecipeMaintainer.objects.filter(recipe = recipe, history = rms): if not RecipeMaintainer.objects.filter(recipe = recipe, history = rms):
rm = RecipeMaintainer() rm = RecipeMaintainer()
@ -141,24 +158,12 @@ def maintainer_history(logger):
rm.maintainer = m rm.maintainer = m
rm.history = rms rm.history = rms
rm.save() rm.save()
logger.debug("%s: Not found maintainer in commit %s set to 'No maintainer'." % \ logger.debug("%s: New recipe not found maintainer set to 'No maintainer'." % \
(recipe.pn, rms.sha1)) (recipe.pn))
if options.dry_run:
utils.runcmd("git checkout master -f", pokypath, logger=logger) raise DryRunRollbackException
utils.runcmd("git branch -D %s" % (branchname), pokypath, logger=logger) except DryRunRollbackException:
pass
# set new recipes to no maintainer if don't have one
m = Maintainer.objects.get(id = 0) # No Maintainer
rms = RecipeMaintainerHistory.get_last()
for recipe in Recipe.objects.all():
if not RecipeMaintainer.objects.filter(recipe = recipe, history = rms):
rm = RecipeMaintainer()
rm.recipe = recipe
rm.maintainer = m
rm.history = rms
rm.save()
logger.debug("%s: New recipe not found maintainer set to 'No maintainer'." % \
(recipe.pn))
if __name__=="__main__": if __name__=="__main__":
parser = optparse.OptionParser(usage = """%prog [options]""") parser = optparse.OptionParser(usage = """%prog [options]""")
@ -168,8 +173,12 @@ if __name__=="__main__":
action="store_const", const=logging.DEBUG, dest="loglevel", action="store_const", const=logging.DEBUG, dest="loglevel",
default=logging.INFO) default=logging.INFO)
parser.add_option("--dry-run",
help = "Do not write any data back to the database",
action="store_true", dest="dry_run", default=False)
logger = get_logger("MaintainerUpdate", settings) logger = get_logger("MaintainerUpdate", settings)
options, args = parser.parse_args(sys.argv) options, args = parser.parse_args(sys.argv)
logger.setLevel(options.loglevel) logger.setLevel(options.loglevel)
maintainer_history(logger) maintainer_history(options, logger)

View File

@ -18,8 +18,9 @@ import optparse
import logging import logging
sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__)))) sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__))))
from common import common_setup, update_repo, get_pv_type, load_recipes, \ from common import common_setup, get_pv_type, load_recipes, \
get_logger get_logger, DryRunRollbackException
common_setup() common_setup()
from layerindex import utils, recipeparse from layerindex import utils, recipeparse
from layerindex.update_layer import split_recipe_fn from layerindex.update_layer import split_recipe_fn
@ -144,7 +145,7 @@ def _get_recipes_filenames(ct, repodir, layerdir, logger):
return ct_files return ct_files
def do_initial(layerbranch, ct, logger): def do_initial(layerbranch, ct, logger, dry_run):
layer = layerbranch.layer layer = layerbranch.layer
urldir = str(layer.get_fetch_dir()) urldir = str(layer.get_fetch_dir())
repodir = os.path.join(fetchdir, urldir) repodir = os.path.join(fetchdir, urldir)
@ -160,16 +161,21 @@ def do_initial(layerbranch, ct, logger):
(tinfoil, d, recipes) = load_recipes(layerbranch, bitbakepath, (tinfoil, d, recipes) = load_recipes(layerbranch, bitbakepath,
fetchdir, settings, logger, nocheckout=True) fetchdir, settings, logger, nocheckout=True)
with transaction.atomic(): try:
for recipe_data in recipes: with transaction.atomic():
_create_upgrade(recipe_data, layerbranch, '', title, for recipe_data in recipes:
info, logger, initial=True) _create_upgrade(recipe_data, layerbranch, '', title,
info, logger, initial=True)
if dry_run:
raise DryRunRollbackException
except DryRunRollbackException:
pass
utils.runcmd("git checkout master -f", repodir, logger=logger) utils.runcmd("git checkout master -f", repodir, logger=logger)
utils.runcmd("git branch -D %s" % (branch_name_tmp), repodir, logger=logger) utils.runcmd("git branch -D %s" % (branch_name_tmp), repodir, logger=logger)
tinfoil.shutdown() tinfoil.shutdown()
def do_loop(layerbranch, ct, logger): def do_loop(layerbranch, ct, logger, dry_run):
layer = layerbranch.layer layer = layerbranch.layer
urldir = str(layer.get_fetch_dir()) urldir = str(layer.get_fetch_dir())
repodir = os.path.join(fetchdir, urldir) repodir = os.path.join(fetchdir, urldir)
@ -193,10 +199,15 @@ def do_loop(layerbranch, ct, logger):
repodir, logger=logger) repodir, logger=logger)
info = utils.runcmd("git log --format='%an;%ae;%ad;%cd' --date=rfc -n 1 " \ info = utils.runcmd("git log --format='%an;%ae;%ad;%cd' --date=rfc -n 1 " \
+ ct, destdir=repodir, logger=logger) + ct, destdir=repodir, logger=logger)
with transaction.atomic(): try:
for recipe_data in recipes: with transaction.atomic():
_create_upgrade(recipe_data, layerbranch, ct, title, for recipe_data in recipes:
info, logger) _create_upgrade(recipe_data, layerbranch, ct, title,
info, logger)
if dry_run:
raise DryRunRollbackException
except DryRunRollbackException:
pass
utils.runcmd("git checkout master -f", repodir, logger=logger) utils.runcmd("git checkout master -f", repodir, logger=logger)
utils.runcmd("git branch -D %s" % (branch_name_tmp), repodir, logger=logger) utils.runcmd("git branch -D %s" % (branch_name_tmp), repodir, logger=logger)
@ -243,13 +254,13 @@ def upgrade_history(options, logger):
logger.debug("Adding initial upgrade history ....") logger.debug("Adding initial upgrade history ....")
ct = commit_list.pop(0) ct = commit_list.pop(0)
do_initial(layerbranch, ct, logger) do_initial(layerbranch, ct, logger, options.dry_run)
logger.debug("Adding upgrade history from %s to %s ..." % (since, today)) logger.debug("Adding upgrade history from %s to %s ..." % (since, today))
for ct in commit_list: for ct in commit_list:
if ct: if ct:
logger.debug("Analysing commit %s ..." % ct) logger.debug("Analysing commit %s ..." % ct)
do_loop(layerbranch, ct, logger) do_loop(layerbranch, ct, logger, options.dry_run)
if __name__=="__main__": if __name__=="__main__":
parser = optparse.OptionParser(usage = """%prog [options]""") parser = optparse.OptionParser(usage = """%prog [options]""")
@ -262,6 +273,10 @@ if __name__=="__main__":
help = "Enable debug output", help = "Enable debug output",
action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO)
parser.add_option("--dry-run",
help = "Do not write any data back to the database",
action="store_true", dest="dry_run", default=False)
options, args = parser.parse_args(sys.argv) options, args = parser.parse_args(sys.argv)
logger.setLevel(options.loglevel) logger.setLevel(options.loglevel)

View File

@ -15,7 +15,7 @@ from datetime import datetime
sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__)))) sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__))))
from common import common_setup, update_repo, load_recipes, \ from common import common_setup, update_repo, load_recipes, \
get_pv_type, get_logger get_pv_type, get_logger, DryRunRollbackException
common_setup() common_setup()
from layerindex import utils from layerindex import utils
@ -145,62 +145,71 @@ if __name__=="__main__":
help = "Enable debug output", help = "Enable debug output",
action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO)
parser.add_option("--dry-run",
help = "Do not write any data back to the database",
action="store_true", dest="dry_run", default=False)
options, args = parser.parse_args(sys.argv) options, args = parser.parse_args(sys.argv)
logger.setLevel(options.loglevel) logger.setLevel(options.loglevel)
logger.debug("Starting upstream history...") logger.debug("Starting upstream history...")
with transaction.atomic(): try:
for layerbranch in LayerBranch.objects.all(): with transaction.atomic():
layer = layerbranch.layer for layerbranch in LayerBranch.objects.all():
urldir = layer.get_fetch_dir() layer = layerbranch.layer
repodir = os.path.join(fetchdir, urldir) urldir = layer.get_fetch_dir()
layerdir = os.path.join(repodir, layerbranch.vcs_subdir) repodir = os.path.join(fetchdir, urldir)
layerdir = os.path.join(repodir, layerbranch.vcs_subdir)
recipe_files = [] recipe_files = []
for recipe in Recipe.objects.filter(layerbranch = layerbranch): for recipe in Recipe.objects.filter(layerbranch = layerbranch):
file = str(os.path.join(layerdir, recipe.full_path())) file = str(os.path.join(layerdir, recipe.full_path()))
recipe_files.append(file) recipe_files.append(file)
(tinfoil, d, recipes) = load_recipes(layerbranch, bitbakepath, (tinfoil, d, recipes) = load_recipes(layerbranch, bitbakepath,
fetchdir, settings, logger, recipe_files=recipe_files) fetchdir, settings, logger, recipe_files=recipe_files)
if not recipes:
tinfoil.shutdown()
continue
for recipe_data in recipes:
set_regexes(recipe_data)
history = RecipeUpstreamHistory(start_date = datetime.now())
from oe.utils import ThreadedPool
import multiprocessing
#nproc = min(multiprocessing.cpu_count(), len(recipes))
# XXX: The new tinfoil API don't support pythreads so
# set to 1 while tinfoil have support.
nproc = 1
pool = ThreadedPool(nproc, len(recipes))
result = []
for recipe_data in recipes:
pool.add_task(get_upstream_info, (layerbranch,
recipe_data, result))
pool.start()
pool.wait_completion()
history.end_date = datetime.now()
history.save()
for res in result:
(recipe, ru) = res
ru.history = history
ru.save()
logger.debug('%s: layer branch %s, pv %s, upstream (%s)' % (recipe.pn,
str(layerbranch), recipe.pv, str(ru)))
if not recipes:
tinfoil.shutdown() tinfoil.shutdown()
continue if options.dry_run:
raise DryRunRollbackException
for recipe_data in recipes: except DryRunRollbackException:
set_regexes(recipe_data) pass
history = RecipeUpstreamHistory(start_date = datetime.now())
from oe.utils import ThreadedPool
import multiprocessing
#nproc = min(multiprocessing.cpu_count(), len(recipes))
# XXX: The new tinfoil API don't support pythreads so
# set to 1 while tinfoil have support.
nproc = 1
pool = ThreadedPool(nproc, len(recipes))
result = []
for recipe_data in recipes:
pool.add_task(get_upstream_info, (layerbranch,
recipe_data, result))
pool.start()
pool.wait_completion()
history.end_date = datetime.now()
history.save()
for res in result:
(recipe, ru) = res
ru.history = history
ru.save()
logger.debug('%s: layer branch %s, pv %s, upstream (%s)' % (recipe.pn,
str(layerbranch), recipe.pv, str(ru)))
tinfoil.shutdown()