Handle updating of inc files/classes within same layer

If we aren't doing a full refresh, when changes to an included/required
file or an inherited class occur we need to update all of the files that
depend on them. BitBake records these dependencies, so read them and
record them against each recipe.

At the moment this only handles dependencies within the same layer; that
is probably sufficient to handle most changes for the purposes of
extracting the data this application cares about. A future improved
solution will probably involve making use of BitBake's cache rather than
and parsing all layers and their dependencies rather than individual
recipes.

This change also adds -x/--nofetch and -n/--dry-run options for
debugging as well as some further debug messages.

Signed-off-by: Paul Eggleton <paul.eggleton@linux.intel.com>
This commit is contained in:
Paul Eggleton 2013-02-26 10:50:13 +00:00
parent b47400c38d
commit e0f8a05423
3 changed files with 125 additions and 70 deletions

View File

@ -42,7 +42,7 @@ class LayerNoteAdmin(CompareVersionAdmin):
class RecipeAdmin(admin.ModelAdmin): class RecipeAdmin(admin.ModelAdmin):
search_fields = ['filename', 'pn'] search_fields = ['filename', 'pn']
list_filter = ['layer__name'] list_filter = ['layer__name']
readonly_fields = Recipe._meta.get_all_field_names() readonly_fields = [fieldname for fieldname in Recipe._meta.get_all_field_names() if fieldname != 'recipefiledependency']
def has_add_permission(self, request, obj=None): def has_add_permission(self, request, obj=None):
return False return False
def has_delete_permission(self, request, obj=None): def has_delete_permission(self, request, obj=None):
@ -62,4 +62,5 @@ admin.site.register(LayerMaintainer, LayerMaintainerAdmin)
admin.site.register(LayerDependency, LayerDependencyAdmin) admin.site.register(LayerDependency, LayerDependencyAdmin)
admin.site.register(LayerNote, LayerNoteAdmin) admin.site.register(LayerNote, LayerNoteAdmin)
admin.site.register(Recipe, RecipeAdmin) admin.site.register(Recipe, RecipeAdmin)
admin.site.register(RecipeFileDependency)
admin.site.register(Machine, MachineAdmin) admin.site.register(Machine, MachineAdmin)

View File

@ -152,6 +152,18 @@ class Recipe(models.Model):
return os.path.join(self.filepath, self.filename) return os.path.join(self.filepath, self.filename)
class RecipeFileDependency(models.Model):
recipe = models.ForeignKey(Recipe)
layer = models.ForeignKey(LayerItem, related_name='+')
path = models.CharField(max_length=255, db_index=True)
class Meta:
verbose_name_plural = "Recipe file dependencies"
def __unicode__(self):
return '%s' % self.path
class Machine(models.Model): class Machine(models.Model):
layer = models.ForeignKey(LayerItem) layer = models.ForeignKey(LayerItem)
name = models.CharField(max_length=255) name = models.CharField(max_length=255)

View File

@ -67,7 +67,7 @@ def sanitise_path(inpath):
def split_bb_file_path(recipe_path, subdir_start): def split_bb_file_path(recipe_path, subdir_start):
if recipe_path.startswith(subdir_start) and fnmatch.fnmatch(recipe_path, "*.bb"): if fnmatch.fnmatch(recipe_path, "*.bb"):
if subdir_start: if subdir_start:
filepath = os.path.relpath(os.path.dirname(recipe_path), subdir_start) filepath = os.path.relpath(os.path.dirname(recipe_path), subdir_start)
else: else:
@ -76,16 +76,16 @@ def split_bb_file_path(recipe_path, subdir_start):
return (None, None) return (None, None)
conf_re = re.compile(r'conf/machine/([^/.]*).conf$') conf_re = re.compile(r'conf/machine/([^/.]*).conf$')
def check_machine_conf(path, subdir_start = None): def check_machine_conf(path):
if not subdir_start or path.startswith(subdir_start): res = conf_re.search(path)
res = conf_re.search(path) if res:
if res: return res.group(1)
return res.group(1) return None
return None
def update_recipe_file(data, path, recipe): def update_recipe_file(data, path, recipe, layerdir_start, repodir):
fn = str(os.path.join(path, recipe.filename)) fn = str(os.path.join(path, recipe.filename))
try: try:
logger.debug('Updating recipe %s' % fn)
envdata = bb.cache.Cache.loadDataFull(fn, [], data) envdata = bb.cache.Cache.loadDataFull(fn, [], data)
envdata.setVar('SRCPV', 'X') envdata.setVar('SRCPV', 'X')
recipe.pn = envdata.getVar("PN", True) recipe.pn = envdata.getVar("PN", True)
@ -95,13 +95,31 @@ def update_recipe_file(data, path, recipe):
recipe.section = envdata.getVar("SECTION", True) recipe.section = envdata.getVar("SECTION", True)
recipe.license = envdata.getVar("LICENSE", True) recipe.license = envdata.getVar("LICENSE", True)
recipe.homepage = envdata.getVar("HOMEPAGE", True) recipe.homepage = envdata.getVar("HOMEPAGE", True)
recipe.save()
# Get file dependencies within this layer
deps = envdata.getVar('__depends', True)
filedeps = []
for depstr, date in deps:
found = False
if depstr.startswith(layerdir_start) and not depstr.endswith('/conf/layer.conf'):
filedeps.append(os.path.relpath(depstr, repodir))
from layerindex.models import RecipeFileDependency
RecipeFileDependency.objects.filter(recipe=recipe).delete()
for filedep in filedeps:
recipedep = RecipeFileDependency()
recipedep.layer = recipe.layer
recipedep.recipe = recipe
recipedep.path = filedep
recipedep.save()
except KeyboardInterrupt: except KeyboardInterrupt:
raise raise
except BaseException as e: except BaseException as e:
logger.info("Unable to read %s: %s", fn, str(e)) logger.info("Unable to read %s: %s", fn, str(e))
def update_machine_conf_file(path, machine): def update_machine_conf_file(path, machine):
with open(path) as f: logger.debug('Updating machine %s' % path)
with open(path, 'r') as f:
for line in f: for line in f:
if line.startswith('#@DESCRIPTION:'): if line.startswith('#@DESCRIPTION:'):
desc = line[14:].strip() desc = line[14:].strip()
@ -155,6 +173,12 @@ def main():
parser.add_option("-r", "--reload", parser.add_option("-r", "--reload",
help = "Discard existing recipe data and fetch it from scratch", help = "Discard existing recipe data and fetch it from scratch",
action="store_true", dest="reload") action="store_true", dest="reload")
parser.add_option("-n", "--dry-run",
help = "Don't write any data back to the database",
action="store_true", dest="dryrun")
parser.add_option("-x", "--nofetch",
help = "Don't fetch repositories",
action="store_true", dest="nofetch")
parser.add_option("-d", "--debug", parser.add_option("-d", "--debug",
help = "Enable debug output", help = "Enable debug output",
action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO)
@ -172,7 +196,7 @@ def main():
from django.core.management import setup_environ from django.core.management import setup_environ
from django.conf import settings from django.conf import settings
from layerindex.models import LayerItem, Recipe, Machine from layerindex.models import LayerItem, Recipe, RecipeFileDependency, Machine
from django.db import transaction from django.db import transaction
import settings import settings
@ -206,7 +230,6 @@ def main():
# why won't they just fix that?!) # why won't they just fix that?!)
tinfoil.config_data.setVar('LICENSE', '') tinfoil.config_data.setVar('LICENSE', '')
fetchdir = settings.LAYER_FETCH_DIR fetchdir = settings.LAYER_FETCH_DIR
if not fetchdir: if not fetchdir:
logger.error("Please set LAYER_FETCH_DIR in settings.py") logger.error("Please set LAYER_FETCH_DIR in settings.py")
@ -228,24 +251,25 @@ def main():
fetchedrepos = [] fetchedrepos = []
failedrepos = [] failedrepos = []
# Fetch latest metadata from repositories if not options.nofetch:
for layer in layerquery: # Fetch latest metadata from repositories
# Handle multiple layers in a single repo for layer in layerquery:
urldir = sanitise_path(layer.vcs_url) # Handle multiple layers in a single repo
repodir = os.path.join(fetchdir, urldir) urldir = sanitise_path(layer.vcs_url)
if not layer.vcs_url in fetchedrepos: repodir = os.path.join(fetchdir, urldir)
logger.info("Fetching remote repository %s" % layer.vcs_url) if not layer.vcs_url in fetchedrepos:
out = None logger.info("Fetching remote repository %s" % layer.vcs_url)
try: out = None
if not os.path.exists(repodir): try:
out = runcmd("git clone %s %s" % (layer.vcs_url, urldir), fetchdir) if not os.path.exists(repodir):
else: out = runcmd("git clone %s %s" % (layer.vcs_url, urldir), fetchdir)
out = runcmd("git pull", repodir) else:
except Exception as e: out = runcmd("git pull", repodir)
logger.error("fetch failed: %s" % str(e)) except Exception as e:
failedrepos.append(layer.vcs_url) logger.error("fetch failed: %s" % str(e))
continue failedrepos.append(layer.vcs_url)
fetchedrepos.append(layer.vcs_url) continue
fetchedrepos.append(layer.vcs_url)
# Process and extract data from each layer # Process and extract data from each layer
for layer in layerquery: for layer in layerquery:
@ -264,6 +288,7 @@ def main():
topcommit = repo.commit('master') topcommit = repo.commit('master')
layerdir = os.path.join(repodir, layer.vcs_subdir) layerdir = os.path.join(repodir, layer.vcs_subdir)
layerdir_start = os.path.normpath(layerdir) + os.sep
layerrecipes = Recipe.objects.filter(layer=layer) layerrecipes = Recipe.objects.filter(layer=layer)
layermachines = Machine.objects.filter(layer=layer) layermachines = Machine.objects.filter(layer=layer)
if layer.vcs_last_rev != topcommit.hexsha or options.reload: if layer.vcs_last_rev != topcommit.hexsha or options.reload:
@ -301,52 +326,66 @@ def main():
else: else:
subdir_start = "" subdir_start = ""
updatedrecipes = set()
for d in diff.iter_change_type('D'): for d in diff.iter_change_type('D'):
path = d.a_blob.path path = d.a_blob.path
(filepath, filename) = split_bb_file_path(path, subdir_start) if path.startswith(subdir_start):
if filename: (filepath, filename) = split_bb_file_path(path, subdir_start)
layerrecipes.filter(filepath=filepath).filter(filename=filename).delete() if filename:
else: layerrecipes.filter(filepath=filepath).filter(filename=filename).delete()
machinename = check_machine_conf(path, subdir_start) else:
if machinename: machinename = check_machine_conf(path)
layermachines.filter(name=machinename).delete() if machinename:
layermachines.filter(name=machinename).delete()
for d in diff.iter_change_type('A'): for d in diff.iter_change_type('A'):
path = d.b_blob.path path = d.b_blob.path
(filepath, filename) = split_bb_file_path(path, subdir_start) if path.startswith(subdir_start):
if filename: (filepath, filename) = split_bb_file_path(path, subdir_start)
recipe = Recipe() if filename:
recipe.layer = layer recipe = Recipe()
recipe.filename = filename recipe.layer = layer
recipe.filepath = filepath recipe.filename = filename
update_recipe_file(config_data_copy, os.path.join(layerdir, filepath), recipe) recipe.filepath = filepath
recipe.save() update_recipe_file(config_data_copy, os.path.join(layerdir, filepath), recipe, layerdir_start, repodir)
else:
machinename = check_machine_conf(path, subdir_start)
if machinename:
machine = Machine()
machine.layer = layer
machine.name = machinename
update_machine_conf_file(os.path.join(repodir, path), machine)
machine.save()
for d in diff.iter_change_type('M'):
path = d.a_blob.path
(filepath, filename) = split_bb_file_path(path, subdir_start)
if filename:
results = layerrecipes.filter(filepath=filepath).filter(filename=filename)[:1]
if results:
recipe = results[0]
update_recipe_file(config_data_copy, os.path.join(layerdir, filepath), recipe)
recipe.save() recipe.save()
else: updatedrecipes.add(recipe)
machinename = check_machine_conf(path, subdir_start) else:
if machinename: machinename = check_machine_conf(path)
results = layermachines.filter(name=machinename) if machinename:
if results: machine = Machine()
machine = results[0] machine.layer = layer
machine.name = machinename
update_machine_conf_file(os.path.join(repodir, path), machine) update_machine_conf_file(os.path.join(repodir, path), machine)
machine.save() machine.save()
dirtyrecipes = set()
for d in diff.iter_change_type('M'):
path = d.a_blob.path
if path.startswith(subdir_start):
(filepath, filename) = split_bb_file_path(path, subdir_start)
if filename:
results = layerrecipes.filter(filepath=filepath).filter(filename=filename)[:1]
if results:
recipe = results[0]
update_recipe_file(config_data_copy, os.path.join(layerdir, filepath), recipe, layerdir_start, repodir)
recipe.save()
updatedrecipes.add(recipe)
else:
machinename = check_machine_conf(path)
if machinename:
results = layermachines.filter(name=machinename)
if results:
machine = results[0]
update_machine_conf_file(os.path.join(repodir, path), machine)
machine.save()
deps = RecipeFileDependency.objects.filter(layer=layer).filter(path=path)
for dep in deps:
dirtyrecipes.add(dep.recipe)
dirtyrecipes -= updatedrecipes
for recipe in dirtyrecipes:
update_recipe_file(config_data_copy, os.path.join(layerdir, recipe.filepath), recipe, layerdir_start, repodir)
else: else:
# Collect recipe data from scratch # Collect recipe data from scratch
layerrecipes.delete() layerrecipes.delete()
@ -358,7 +397,7 @@ def main():
recipe.layer = layer recipe.layer = layer
recipe.filename = f recipe.filename = f
recipe.filepath = os.path.relpath(root, layerdir) recipe.filepath = os.path.relpath(root, layerdir)
update_recipe_file(config_data_copy, root, recipe) update_recipe_file(config_data_copy, root, recipe, layerdir_start, repodir)
recipe.save() recipe.save()
else: else:
fullpath = os.path.join(root, f) fullpath = os.path.join(root, f)
@ -379,7 +418,10 @@ def main():
layer.vcs_last_fetch = datetime.now() layer.vcs_last_fetch = datetime.now()
layer.save() layer.save()
transaction.commit() if options.dryrun:
transaction.rollback()
else:
transaction.commit()
except: except:
import traceback import traceback
traceback.print_exc() traceback.print_exc()