layerindex: Fix recipe loading using tinfoil

Due to enablement of the Bitbake multiconfiguration
builds now the bb.cache requires an instance to load
a recipe.

This changes adds an instance of the databuilder and
cache to tinfoil, in the future we need to provide a
method for get data from the recipe at tinfoil level.

Signed-off-by: Aníbal Limón <anibal.limon@linux.intel.com>
This commit is contained in:
Aníbal Limón 2016-08-29 21:05:05 +00:00
parent 42d9891497
commit 33110335d2
2 changed files with 14 additions and 7 deletions

View File

@ -34,6 +34,12 @@ def _setup_tinfoil(bitbakepath, enable_tracking):
tinfoil.cooker.enableDataTracking()
tinfoil.prepare(config_only = True)
# XXX: Setup databuilder
tinfoil.databuilder = bb.cookerdata.CookerDataBuilder(tinfoil.config)
tinfoil.databuilder.parseBaseConfiguration()
tinfoil.cache = bb.cache.NoCache(tinfoil.databuilder)
return tinfoil
def _parse_layer_conf(layerdir, data):
@ -135,6 +141,7 @@ def setup_layer(config_data, fetchdir, layerdir, layer, layerbranch):
deplayerdir = os.path.join(deprepodir, deplayerbranch.vcs_subdir)
_parse_layer_conf(deplayerdir, config_data_copy)
config_data_copy.delVar('LAYERDIR')
return config_data_copy
def get_var_files(fn, varlist, d):

View File

@ -54,11 +54,11 @@ def split_recipe_fn(path):
pv = "1.0"
return (pn, pv)
def update_recipe_file(data, path, recipe, layerdir_start, repodir):
def update_recipe_file(tinfoil, data, path, recipe, layerdir_start, repodir):
fn = str(os.path.join(path, recipe.filename))
try:
logger.debug('Updating recipe %s' % fn)
envdata = bb.cache.Cache.loadDataFull(fn, [], data)
envdata = tinfoil.cache.loadDataFull(fn, [])
envdata.setVar('SRCPV', 'X')
recipe.pn = envdata.getVar("PN", True)
recipe.pv = envdata.getVar("PV", True)
@ -356,7 +356,7 @@ def main():
recipe.filepath = newfilepath
recipe.filename = newfilename
recipe.save()
update_recipe_file(config_data_copy, os.path.join(layerdir, newfilepath), recipe, layerdir_start, repodir)
update_recipe_file(tinfoil, config_data_copy, os.path.join(layerdir, newfilepath), recipe, layerdir_start, repodir)
updatedrecipes.add(os.path.join(oldfilepath, oldfilename))
updatedrecipes.add(os.path.join(newfilepath, newfilename))
else:
@ -471,7 +471,7 @@ def main():
results = layerrecipes.filter(filepath=filepath).filter(filename=filename)[:1]
if results:
recipe = results[0]
update_recipe_file(config_data_copy, os.path.join(layerdir, filepath), recipe, layerdir_start, repodir)
update_recipe_file(tinfoil, config_data_copy, os.path.join(layerdir, filepath), recipe, layerdir_start, repodir)
recipe.save()
updatedrecipes.add(recipe.full_path())
elif typename == 'machine':
@ -487,7 +487,7 @@ def main():
for recipe in dirtyrecipes:
if not recipe.full_path() in updatedrecipes:
update_recipe_file(config_data_copy, os.path.join(layerdir, recipe.filepath), recipe, layerdir_start, repodir)
update_recipe_file(tinfoil, config_data_copy, os.path.join(layerdir, recipe.filepath), recipe, layerdir_start, repodir)
else:
# Collect recipe data from scratch
@ -513,7 +513,7 @@ def main():
# Recipe still exists, update it
results = layerrecipes.filter(id=v['id'])[:1]
recipe = results[0]
update_recipe_file(config_data_copy, root, recipe, layerdir_start, repodir)
update_recipe_file(tinfoil, config_data_copy, root, recipe, layerdir_start, repodir)
else:
# Recipe no longer exists, mark it for later on
layerrecipes_delete.append(v)
@ -575,7 +575,7 @@ def main():
recipe.filename = os.path.basename(added)
root = os.path.dirname(added)
recipe.filepath = os.path.relpath(root, layerdir)
update_recipe_file(config_data_copy, root, recipe, layerdir_start, repodir)
update_recipe_file(tinfoil, config_data_copy, root, recipe, layerdir_start, repodir)
recipe.save()
for deleted in layerrecipes_delete: