mirror of
git://git.yoctoproject.org/layerindex-web.git
synced 2025-07-19 20:59:01 +02:00
rrs/tools: Upgrade to use transaction.atomic() in Django 1.6
Django 1.6 provides a context manager for atomic transactions so update the way for make db transactions. Signed-off-by: Aníbal Limón <anibal.limon@linux.intel.com>
This commit is contained in:
parent
1c5b79a312
commit
8ed223e13c
|
@ -94,42 +94,37 @@ if __name__=="__main__":
|
|||
|
||||
logger.debug("Starting recipe distros update ...")
|
||||
|
||||
transaction.enter_transaction_management()
|
||||
transaction.managed(True)
|
||||
with transaction.atomic():
|
||||
for layerbranch in LayerBranch.objects.all():
|
||||
(tinfoil, d, recipes) = load_recipes(layerbranch, bitbakepath,
|
||||
fetchdir, settings, logger)
|
||||
|
||||
for layerbranch in LayerBranch.objects.all():
|
||||
(tinfoil, d, recipes) = load_recipes(layerbranch, bitbakepath,
|
||||
fetchdir, settings, logger)
|
||||
|
||||
if not recipes:
|
||||
continue
|
||||
|
||||
from oe import distro_check
|
||||
logger.debug("Downloading distro's package information ...")
|
||||
distro_check.create_distro_packages_list(fetchdir, d)
|
||||
pkglst_dir = os.path.join(fetchdir, "package_lists")
|
||||
|
||||
RecipeDistro.objects.filter(recipe__layerbranch = layerbranch).delete()
|
||||
|
||||
for recipe_data in recipes:
|
||||
pn = recipe_data.getVar('PN', True)
|
||||
|
||||
try:
|
||||
recipe = Recipe.objects.get(pn = pn, layerbranch = layerbranch)
|
||||
except:
|
||||
logger.warn('%s: layer branch %s, NOT found' % (pn,
|
||||
str(layerbranch)))
|
||||
if not recipes:
|
||||
continue
|
||||
|
||||
distro_info = search_package_in_distros(pkglst_dir, recipe, recipe_data)
|
||||
for distro, alias in distro_info.items():
|
||||
recipedistro = RecipeDistro()
|
||||
recipedistro.recipe = recipe
|
||||
recipedistro.distro = distro
|
||||
recipedistro.alias = alias
|
||||
recipedistro.save()
|
||||
logger.debug('%s: layer branch %s, add distro %s alias %s' % (pn,
|
||||
str(layerbranch), distro, alias))
|
||||
from oe import distro_check
|
||||
logger.debug("Downloading distro's package information ...")
|
||||
distro_check.create_distro_packages_list(fetchdir, d)
|
||||
pkglst_dir = os.path.join(fetchdir, "package_lists")
|
||||
|
||||
transaction.commit()
|
||||
transaction.leave_transaction_management()
|
||||
RecipeDistro.objects.filter(recipe__layerbranch = layerbranch).delete()
|
||||
|
||||
for recipe_data in recipes:
|
||||
pn = recipe_data.getVar('PN', True)
|
||||
|
||||
try:
|
||||
recipe = Recipe.objects.get(pn = pn, layerbranch = layerbranch)
|
||||
except:
|
||||
logger.warn('%s: layer branch %s, NOT found' % (pn,
|
||||
str(layerbranch)))
|
||||
continue
|
||||
|
||||
distro_info = search_package_in_distros(pkglst_dir, recipe, recipe_data)
|
||||
for distro, alias in distro_info.items():
|
||||
recipedistro = RecipeDistro()
|
||||
recipedistro.recipe = recipe
|
||||
recipedistro.distro = distro
|
||||
recipedistro.alias = alias
|
||||
recipedistro.save()
|
||||
logger.debug('%s: layer branch %s, add distro %s alias %s' % (pn,
|
||||
str(layerbranch), distro, alias))
|
||||
|
|
|
@ -90,51 +90,66 @@ def maintainer_history(logger):
|
|||
commits = utils.runcmd("git log --format='%H' --reverse --date=rfc " +
|
||||
MAINTAINERS_INCLUDE_PATH, pokypath, logger=logger)
|
||||
|
||||
transaction.enter_transaction_management()
|
||||
transaction.managed(True)
|
||||
for commit in commits.strip().split("\n"):
|
||||
if RecipeMaintainerHistory.objects.filter(sha1=commit):
|
||||
continue
|
||||
with transaction.atomic():
|
||||
for commit in commits.strip().split("\n"):
|
||||
if RecipeMaintainerHistory.objects.filter(sha1=commit):
|
||||
continue
|
||||
|
||||
logger.debug("Analysing commit %s ..." % (commit))
|
||||
logger.debug("Analysing commit %s ..." % (commit))
|
||||
|
||||
(author_name, author_email, date, title) = \
|
||||
get_commit_info(utils.runcmd("git show " + commit, pokypath,
|
||||
logger=logger), logger)
|
||||
(author_name, author_email, date, title) = \
|
||||
get_commit_info(utils.runcmd("git show " + commit, pokypath,
|
||||
logger=logger), logger)
|
||||
|
||||
author = Maintainer.create_or_update(author_name, author_email)
|
||||
rms = RecipeMaintainerHistory(title=title, date=date, author=author,
|
||||
sha1=commit)
|
||||
rms.save()
|
||||
author = Maintainer.create_or_update(author_name, author_email)
|
||||
rms = RecipeMaintainerHistory(title=title, date=date, author=author,
|
||||
sha1=commit)
|
||||
rms.save()
|
||||
|
||||
branchname = 'maintainer' + commit
|
||||
utils.runcmd("git checkout %s -b %s -f" % (commit, branchname),
|
||||
pokypath, logger=logger)
|
||||
branchname = 'maintainer' + commit
|
||||
utils.runcmd("git checkout %s -b %s -f" % (commit, branchname),
|
||||
pokypath, logger=logger)
|
||||
|
||||
lines = [line.strip() for line in open(maintainers_full_path)]
|
||||
for line in lines:
|
||||
res = get_recipe_maintainer(line, logger)
|
||||
if res:
|
||||
(pn, name, email) = res
|
||||
qry = Recipe.objects.filter(pn = pn, layerbranch = layerbranch)
|
||||
lines = [line.strip() for line in open(maintainers_full_path)]
|
||||
for line in lines:
|
||||
res = get_recipe_maintainer(line, logger)
|
||||
if res:
|
||||
(pn, name, email) = res
|
||||
qry = Recipe.objects.filter(pn = pn, layerbranch = layerbranch)
|
||||
|
||||
if qry:
|
||||
m = Maintainer.create_or_update(name, email)
|
||||
if qry:
|
||||
m = Maintainer.create_or_update(name, email)
|
||||
|
||||
rm = RecipeMaintainer()
|
||||
rm.recipe = qry[0]
|
||||
rm.maintainer = m
|
||||
rm.history = rms
|
||||
rm.save()
|
||||
|
||||
logger.debug("%s: Change maintainer to %s in commit %s." % \
|
||||
(pn, m.name, commit))
|
||||
else:
|
||||
logger.debug("%s: Not found in layer %s." % \
|
||||
(pn, layername))
|
||||
|
||||
# set missing recipes to no maintainer
|
||||
m = Maintainer.objects.get(id = 0) # No Maintainer
|
||||
for recipe in Recipe.objects.all():
|
||||
if not RecipeMaintainer.objects.filter(recipe = recipe, history = rms):
|
||||
rm = RecipeMaintainer()
|
||||
rm.recipe = qry[0]
|
||||
rm.recipe = recipe
|
||||
rm.maintainer = m
|
||||
rm.history = rms
|
||||
rm.save()
|
||||
logger.debug("%s: Not found maintainer in commit %s set to 'No maintainer'." % \
|
||||
(recipe.pn, rms.sha1))
|
||||
|
||||
logger.debug("%s: Change maintainer to %s in commit %s." % \
|
||||
(pn, m.name, commit))
|
||||
else:
|
||||
logger.debug("%s: Not found in layer %s." % \
|
||||
(pn, layername))
|
||||
utils.runcmd("git checkout master -f", pokypath, logger=logger)
|
||||
utils.runcmd("git branch -D %s" % (branchname), pokypath, logger=logger)
|
||||
|
||||
# set missing recipes to no maintainer
|
||||
# set new recipes to no maintainer if don't have one
|
||||
m = Maintainer.objects.get(id = 0) # No Maintainer
|
||||
rms = RecipeMaintainerHistory.get_last()
|
||||
for recipe in Recipe.objects.all():
|
||||
if not RecipeMaintainer.objects.filter(recipe = recipe, history = rms):
|
||||
rm = RecipeMaintainer()
|
||||
|
@ -142,27 +157,8 @@ def maintainer_history(logger):
|
|||
rm.maintainer = m
|
||||
rm.history = rms
|
||||
rm.save()
|
||||
logger.debug("%s: Not found maintainer in commit %s set to 'No maintainer'." % \
|
||||
(recipe.pn, rms.sha1))
|
||||
|
||||
utils.runcmd("git checkout master -f", pokypath, logger=logger)
|
||||
utils.runcmd("git branch -D %s" % (branchname), pokypath, logger=logger)
|
||||
|
||||
# set new recipes to no maintainer if don't have one
|
||||
m = Maintainer.objects.get(id = 0) # No Maintainer
|
||||
rms = RecipeMaintainerHistory.get_last()
|
||||
for recipe in Recipe.objects.all():
|
||||
if not RecipeMaintainer.objects.filter(recipe = recipe, history = rms):
|
||||
rm = RecipeMaintainer()
|
||||
rm.recipe = recipe
|
||||
rm.maintainer = m
|
||||
rm.history = rms
|
||||
rm.save()
|
||||
logger.debug("%s: New recipe not found maintainer set to 'No maintainer'." % \
|
||||
(recipe.pn))
|
||||
|
||||
transaction.commit()
|
||||
transaction.leave_transaction_management()
|
||||
logger.debug("%s: New recipe not found maintainer set to 'No maintainer'." % \
|
||||
(recipe.pn))
|
||||
|
||||
if __name__=="__main__":
|
||||
parser = optparse.OptionParser(usage = """%prog [options]""")
|
||||
|
|
|
@ -46,43 +46,37 @@ if __name__=="__main__":
|
|||
|
||||
logger.info('Starting unique recipes ...')
|
||||
|
||||
transaction.enter_transaction_management()
|
||||
transaction.managed(True)
|
||||
|
||||
# only keep the major version of recipe
|
||||
logger.info('Starting remove of duplicate recipes only keep major version ...')
|
||||
for layerbranch in LayerBranch.objects.all():
|
||||
recipes = {}
|
||||
with transaction.atomic():
|
||||
for layerbranch in LayerBranch.objects.all():
|
||||
recipes = {}
|
||||
|
||||
for recipe in Recipe.objects.filter(layerbranch=layerbranch):
|
||||
recipes[recipe.pn] = None
|
||||
for recipe in Recipe.objects.filter(layerbranch=layerbranch):
|
||||
recipes[recipe.pn] = None
|
||||
|
||||
for pn in recipes.keys():
|
||||
for recipe in Recipe.objects.filter(layerbranch=layerbranch,
|
||||
pn=pn):
|
||||
for pn in recipes.keys():
|
||||
for recipe in Recipe.objects.filter(layerbranch=layerbranch,
|
||||
pn=pn):
|
||||
|
||||
if recipes[pn] is None:
|
||||
recipes[pn] = recipe
|
||||
else:
|
||||
(ppv, _, _) = get_recipe_pv_without_srcpv(recipes[pn].pv,
|
||||
get_pv_type(recipes[pn].pv))
|
||||
(npv, _, _) = get_recipe_pv_without_srcpv(recipe.pv,
|
||||
get_pv_type(recipe.pv))
|
||||
|
||||
if npv == 'git':
|
||||
logger.debug("%s: Removed git recipe without version." \
|
||||
% (recipe.pn))
|
||||
recipe.delete()
|
||||
elif ppv == 'git' or vercmp_string(ppv, npv) == -1:
|
||||
logger.debug("%s: Removed older recipe (%s), new recipe (%s)." \
|
||||
% (recipes[pn].pn, recipes[pn].pv, recipe.pv))
|
||||
recipes[pn].delete()
|
||||
if recipes[pn] is None:
|
||||
recipes[pn] = recipe
|
||||
else:
|
||||
logger.debug("%s: Removed older recipe (%s), current recipe (%s)." \
|
||||
% (recipes[pn].pn, recipe.pv, recipes[pn].pv))
|
||||
recipe.delete()
|
||||
(ppv, _, _) = get_recipe_pv_without_srcpv(recipes[pn].pv,
|
||||
get_pv_type(recipes[pn].pv))
|
||||
(npv, _, _) = get_recipe_pv_without_srcpv(recipe.pv,
|
||||
get_pv_type(recipe.pv))
|
||||
|
||||
|
||||
transaction.commit()
|
||||
transaction.leave_transaction_management()
|
||||
if npv == 'git':
|
||||
logger.debug("%s: Removed git recipe without version." \
|
||||
% (recipe.pn))
|
||||
recipe.delete()
|
||||
elif ppv == 'git' or vercmp_string(ppv, npv) == -1:
|
||||
logger.debug("%s: Removed older recipe (%s), new recipe (%s)." \
|
||||
% (recipes[pn].pn, recipes[pn].pv, recipe.pv))
|
||||
recipes[pn].delete()
|
||||
recipes[pn] = recipe
|
||||
else:
|
||||
logger.debug("%s: Removed older recipe (%s), current recipe (%s)." \
|
||||
% (recipes[pn].pn, recipe.pv, recipes[pn].pv))
|
||||
recipe.delete()
|
||||
|
|
|
@ -157,9 +157,10 @@ def do_initial(layerbranch, ct, logger):
|
|||
(tinfoil, d, recipes) = load_recipes(layerbranch, bitbakepath,
|
||||
fetchdir, settings, logger, nocheckout=True)
|
||||
|
||||
for recipe_data in recipes:
|
||||
_create_upgrade(recipe_data, layerbranch, '', title,
|
||||
info, logger, initial=True)
|
||||
with transaction.atomic():
|
||||
for recipe_data in recipes:
|
||||
_create_upgrade(recipe_data, layerbranch, '', title,
|
||||
info, logger, initial=True)
|
||||
|
||||
utils.runcmd("git checkout master -f", repodir, logger=logger)
|
||||
utils.runcmd("git branch -D %s" % (branch_name_tmp), repodir, logger=logger)
|
||||
|
@ -188,9 +189,10 @@ def do_loop(layerbranch, ct, logger):
|
|||
repodir, logger=logger)
|
||||
info = utils.runcmd("git log --format='%an;%ae;%ad;%cd' --date=rfc -n 1 " \
|
||||
+ ct, destdir=repodir, logger=logger)
|
||||
for recipe_data in recipes:
|
||||
_create_upgrade(recipe_data, layerbranch, ct, title,
|
||||
info, logger)
|
||||
with transaction.atomic():
|
||||
for recipe_data in recipes:
|
||||
_create_upgrade(recipe_data, layerbranch, ct, title,
|
||||
info, logger)
|
||||
|
||||
utils.runcmd("git checkout master -f", repodir, logger=logger)
|
||||
utils.runcmd("git branch -D %s" % (branch_name_tmp), repodir, logger=logger)
|
||||
|
@ -232,8 +234,6 @@ def upgrade_history(options, logger):
|
|||
logger=logger)
|
||||
commit_list = commits.split('\n')
|
||||
|
||||
transaction.enter_transaction_management()
|
||||
transaction.managed(True)
|
||||
if options.initial:
|
||||
logger.debug("Adding initial upgrade history ....")
|
||||
|
||||
|
@ -255,9 +255,6 @@ def upgrade_history(options, logger):
|
|||
p.start()
|
||||
p.join()
|
||||
|
||||
transaction.commit()
|
||||
transaction.leave_transaction_management()
|
||||
|
||||
if __name__=="__main__":
|
||||
parser = optparse.OptionParser(usage = """%prog [options]""")
|
||||
|
||||
|
|
|
@ -146,55 +146,51 @@ if __name__=="__main__":
|
|||
|
||||
logger.debug("Starting upstream history...")
|
||||
|
||||
transaction.enter_transaction_management()
|
||||
transaction.managed(True)
|
||||
for layerbranch in LayerBranch.objects.all():
|
||||
layer = layerbranch.layer
|
||||
urldir = layer.get_fetch_dir()
|
||||
repodir = os.path.join(fetchdir, urldir)
|
||||
layerdir = os.path.join(repodir, layerbranch.vcs_subdir)
|
||||
with transaction.atomic():
|
||||
for layerbranch in LayerBranch.objects.all():
|
||||
layer = layerbranch.layer
|
||||
urldir = layer.get_fetch_dir()
|
||||
repodir = os.path.join(fetchdir, urldir)
|
||||
layerdir = os.path.join(repodir, layerbranch.vcs_subdir)
|
||||
|
||||
recipe_files = []
|
||||
for recipe in Recipe.objects.filter(layerbranch = layerbranch):
|
||||
file = str(os.path.join(layerdir, recipe.full_path()))
|
||||
recipe_files.append(file)
|
||||
recipe_files = []
|
||||
for recipe in Recipe.objects.filter(layerbranch = layerbranch):
|
||||
file = str(os.path.join(layerdir, recipe.full_path()))
|
||||
recipe_files.append(file)
|
||||
|
||||
(tinfoil, d, recipes) = load_recipes(layerbranch, bitbakepath,
|
||||
fetchdir, settings, logger, recipe_files=recipe_files)
|
||||
(tinfoil, d, recipes) = load_recipes(layerbranch, bitbakepath,
|
||||
fetchdir, settings, logger, recipe_files=recipe_files)
|
||||
|
||||
if not recipes:
|
||||
continue
|
||||
if not recipes:
|
||||
continue
|
||||
|
||||
for recipe_data in recipes:
|
||||
set_regexes(recipe_data)
|
||||
for recipe_data in recipes:
|
||||
set_regexes(recipe_data)
|
||||
|
||||
history = RecipeUpstreamHistory(start_date = datetime.now())
|
||||
history = RecipeUpstreamHistory(start_date = datetime.now())
|
||||
|
||||
from oe.utils import ThreadedPool
|
||||
import multiprocessing
|
||||
from oe.utils import ThreadedPool
|
||||
import multiprocessing
|
||||
|
||||
nproc = min(multiprocessing.cpu_count(), len(recipes))
|
||||
pool = ThreadedPool(nproc, len(recipes))
|
||||
nproc = min(multiprocessing.cpu_count(), len(recipes))
|
||||
pool = ThreadedPool(nproc, len(recipes))
|
||||
|
||||
result = []
|
||||
for recipe_data in recipes:
|
||||
pool.add_task(get_upstream_info, (layerbranch,
|
||||
recipe_data, result))
|
||||
result = []
|
||||
for recipe_data in recipes:
|
||||
pool.add_task(get_upstream_info, (layerbranch,
|
||||
recipe_data, result))
|
||||
|
||||
pool.start()
|
||||
pool.wait_completion()
|
||||
pool.start()
|
||||
pool.wait_completion()
|
||||
|
||||
history.end_date = datetime.now()
|
||||
history.save()
|
||||
history.end_date = datetime.now()
|
||||
history.save()
|
||||
|
||||
for res in result:
|
||||
(recipe, ru) = res
|
||||
for res in result:
|
||||
(recipe, ru) = res
|
||||
|
||||
ru.history = history
|
||||
ru.save()
|
||||
ru.history = history
|
||||
ru.save()
|
||||
|
||||
logger.debug('%s: layer branch %s, pv %s, upstream (%s)' % (recipe.pn,
|
||||
str(layerbranch), recipe.pv, str(ru)))
|
||||
|
||||
transaction.commit()
|
||||
transaction.leave_transaction_management()
|
||||
logger.debug('%s: layer branch %s, pv %s, upstream (%s)' % (recipe.pn,
|
||||
str(layerbranch), recipe.pv, str(ru)))
|
||||
|
|
Loading…
Reference in New Issue
Block a user