mirror of
git://git.yoctoproject.org/poky.git
synced 2025-07-19 21:09:03 +02:00
recipetool: create: reimplement fetching with normal fetch/unpack tasks
Now that we have the ability to run the tasks in a more standard context through tinfoil, change recipetool's fetching code to use that to fetch files using it. This has the major advantage that any dependencies of do_fetch and do_unpack (e.g. for subversion or npm) will be handled automatically. This also has the beneficial side-effect of fixing a recent regression that prevented this fetch operation from working with memory resident bitbake. Also fix devtool's usage of fetch_uri() at the same time so that we can completely replace it. Fixes [YOCTO #11710]. (From OE-Core rev: 9a47a6690052ef943c0d4760630ee630fb012153) Signed-off-by: Paul Eggleton <paul.eggleton@linux.intel.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
parent
7d474e27bc
commit
e4346e8be5
|
@ -207,10 +207,16 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, branch, keep_temp, tin
|
||||||
|
|
||||||
tmpdir = tempfile.mkdtemp(prefix='devtool')
|
tmpdir = tempfile.mkdtemp(prefix='devtool')
|
||||||
try:
|
try:
|
||||||
md5, sha256 = scriptutils.fetch_uri(tinfoil.config_data, uri, tmpdir, rev)
|
checksums, ftmpdir = scriptutils.fetch_url(tinfoil, uri, rev, tmpdir, logger, preserve_tmp=keep_temp)
|
||||||
except bb.fetch2.FetchError as e:
|
except scriptutils.FetchUrlFailure as e:
|
||||||
raise DevtoolError(e)
|
raise DevtoolError(e)
|
||||||
|
|
||||||
|
if ftmpdir and keep_temp:
|
||||||
|
logger.info('Fetch temp directory is %s' % ftmpdir)
|
||||||
|
|
||||||
|
md5 = checksums['md5sum']
|
||||||
|
sha256 = checksums['sha256sum']
|
||||||
|
|
||||||
tmpsrctree = _get_srctree(tmpdir)
|
tmpsrctree = _get_srctree(tmpdir)
|
||||||
srctree = os.path.abspath(srctree)
|
srctree = os.path.abspath(srctree)
|
||||||
|
|
||||||
|
|
|
@ -417,7 +417,7 @@ def create_recipe(args):
|
||||||
pkgarch = "${MACHINE_ARCH}"
|
pkgarch = "${MACHINE_ARCH}"
|
||||||
|
|
||||||
extravalues = {}
|
extravalues = {}
|
||||||
checksums = (None, None)
|
checksums = {}
|
||||||
tempsrc = ''
|
tempsrc = ''
|
||||||
source = args.source
|
source = args.source
|
||||||
srcsubdir = ''
|
srcsubdir = ''
|
||||||
|
@ -439,22 +439,25 @@ def create_recipe(args):
|
||||||
if res:
|
if res:
|
||||||
srcrev = res.group(1)
|
srcrev = res.group(1)
|
||||||
srcuri = rev_re.sub('', srcuri)
|
srcuri = rev_re.sub('', srcuri)
|
||||||
tempsrc = tempfile.mkdtemp(prefix='recipetool-')
|
|
||||||
srctree = tempsrc
|
tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR')
|
||||||
d = bb.data.createCopy(tinfoil.config_data)
|
bb.utils.mkdirhier(tmpparent)
|
||||||
if fetchuri.startswith('npm://'):
|
tempsrc = tempfile.mkdtemp(prefix='recipetool-', dir=tmpparent)
|
||||||
# Check if npm is available
|
srctree = os.path.join(tempsrc, 'source')
|
||||||
npm_bindir = check_npm(tinfoil, args.devtool)
|
|
||||||
d.prependVar('PATH', '%s:' % npm_bindir)
|
|
||||||
logger.info('Fetching %s...' % srcuri)
|
|
||||||
try:
|
try:
|
||||||
checksums = scriptutils.fetch_uri(d, fetchuri, srctree, srcrev)
|
checksums, ftmpdir = scriptutils.fetch_url(tinfoil, srcuri, srcrev, srctree, logger, preserve_tmp=args.keep_temp)
|
||||||
except bb.fetch2.BBFetchException as e:
|
except scriptutils.FetchUrlFailure as e:
|
||||||
logger.error(str(e).rstrip())
|
logger.error(str(e))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
if ftmpdir and args.keep_temp:
|
||||||
|
logger.info('Fetch temp directory is %s' % ftmpdir)
|
||||||
|
|
||||||
dirlist = os.listdir(srctree)
|
dirlist = os.listdir(srctree)
|
||||||
if 'git.indirectionsymlink' in dirlist:
|
filterout = ['git.indirectionsymlink']
|
||||||
dirlist.remove('git.indirectionsymlink')
|
dirlist = [x for x in dirlist if x not in filterout]
|
||||||
|
logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist))
|
||||||
if len(dirlist) == 1:
|
if len(dirlist) == 1:
|
||||||
singleitem = os.path.join(srctree, dirlist[0])
|
singleitem = os.path.join(srctree, dirlist[0])
|
||||||
if os.path.isdir(singleitem):
|
if os.path.isdir(singleitem):
|
||||||
|
@ -465,7 +468,7 @@ def create_recipe(args):
|
||||||
check_single_file(dirlist[0], fetchuri)
|
check_single_file(dirlist[0], fetchuri)
|
||||||
elif len(dirlist) == 0:
|
elif len(dirlist) == 0:
|
||||||
if '/' in fetchuri:
|
if '/' in fetchuri:
|
||||||
fn = os.path.join(d.getVar('DL_DIR'), fetchuri.split('/')[-1])
|
fn = os.path.join(tinfoil.config_data.getVar('DL_DIR'), fetchuri.split('/')[-1])
|
||||||
if os.path.isfile(fn):
|
if os.path.isfile(fn):
|
||||||
check_single_file(fn, fetchuri)
|
check_single_file(fn, fetchuri)
|
||||||
# If we've got to here then there's no source so we might as well give up
|
# If we've got to here then there's no source so we might as well give up
|
||||||
|
@ -593,11 +596,8 @@ def create_recipe(args):
|
||||||
if not srcuri:
|
if not srcuri:
|
||||||
lines_before.append('# No information for SRC_URI yet (only an external source tree was specified)')
|
lines_before.append('# No information for SRC_URI yet (only an external source tree was specified)')
|
||||||
lines_before.append('SRC_URI = "%s"' % srcuri)
|
lines_before.append('SRC_URI = "%s"' % srcuri)
|
||||||
(md5value, sha256value) = checksums
|
for key, value in sorted(checksums.items()):
|
||||||
if md5value:
|
lines_before.append('SRC_URI[%s] = "%s"' % (key, value))
|
||||||
lines_before.append('SRC_URI[md5sum] = "%s"' % md5value)
|
|
||||||
if sha256value:
|
|
||||||
lines_before.append('SRC_URI[sha256sum] = "%s"' % sha256value)
|
|
||||||
if srcuri and supports_srcrev(srcuri):
|
if srcuri and supports_srcrev(srcuri):
|
||||||
lines_before.append('')
|
lines_before.append('')
|
||||||
lines_before.append('# Modify these as desired')
|
lines_before.append('# Modify these as desired')
|
||||||
|
|
|
@ -109,7 +109,6 @@ class NpmRecipeHandler(RecipeHandler):
|
||||||
if varname == 'SRC_URI':
|
if varname == 'SRC_URI':
|
||||||
if not origvalue.startswith('npm://'):
|
if not origvalue.startswith('npm://'):
|
||||||
src_uri = origvalue.split()
|
src_uri = origvalue.split()
|
||||||
changed = False
|
|
||||||
deplist = {}
|
deplist = {}
|
||||||
for dep, depver in optdeps.items():
|
for dep, depver in optdeps.items():
|
||||||
depdata = self.get_npm_data(dep, depver, d)
|
depdata = self.get_npm_data(dep, depver, d)
|
||||||
|
@ -123,14 +122,15 @@ class NpmRecipeHandler(RecipeHandler):
|
||||||
depdata = self.get_npm_data(dep, depver, d)
|
depdata = self.get_npm_data(dep, depver, d)
|
||||||
deplist[dep] = depdata
|
deplist[dep] = depdata
|
||||||
|
|
||||||
|
extra_urls = []
|
||||||
for dep, depdata in deplist.items():
|
for dep, depdata in deplist.items():
|
||||||
version = depdata.get('version', None)
|
version = depdata.get('version', None)
|
||||||
if version:
|
if version:
|
||||||
url = 'npm://registry.npmjs.org;name=%s;version=%s;subdir=node_modules/%s' % (dep, version, dep)
|
url = 'npm://registry.npmjs.org;name=%s;version=%s;subdir=node_modules/%s' % (dep, version, dep)
|
||||||
scriptutils.fetch_uri(d, url, srctree)
|
extra_urls.append(url)
|
||||||
src_uri.append(url)
|
if extra_urls:
|
||||||
changed = True
|
scriptutils.fetch_url(tinfoil, ' '.join(extra_urls), None, srctree, logger)
|
||||||
if changed:
|
src_uri.extend(extra_urls)
|
||||||
return src_uri, None, -1, True
|
return src_uri, None, -1, True
|
||||||
return origvalue, None, 0, True
|
return origvalue, None, 0, True
|
||||||
updated, newlines = bb.utils.edit_metadata(lines_before, ['SRC_URI'], varfunc)
|
updated, newlines = bb.utils.edit_metadata(lines_before, ['SRC_URI'], varfunc)
|
||||||
|
|
|
@ -23,6 +23,8 @@ import argparse
|
||||||
import subprocess
|
import subprocess
|
||||||
import tempfile
|
import tempfile
|
||||||
import shutil
|
import shutil
|
||||||
|
import random
|
||||||
|
import string
|
||||||
|
|
||||||
def logger_create(name, stream=None):
|
def logger_create(name, stream=None):
|
||||||
logger = logging.getLogger(name)
|
logger = logging.getLogger(name)
|
||||||
|
@ -78,50 +80,130 @@ def git_convert_standalone_clone(repodir):
|
||||||
bb.process.run('git repack -a', cwd=repodir)
|
bb.process.run('git repack -a', cwd=repodir)
|
||||||
os.remove(alternatesfile)
|
os.remove(alternatesfile)
|
||||||
|
|
||||||
def fetch_uri(d, uri, destdir, srcrev=None):
|
def _get_temp_recipe_dir(d):
|
||||||
"""Fetch a URI to a local directory"""
|
# This is a little bit hacky but we need to find a place where we can put
|
||||||
|
# the recipe so that bitbake can find it. We're going to delete it at the
|
||||||
|
# end so it doesn't really matter where we put it.
|
||||||
|
bbfiles = d.getVar('BBFILES').split()
|
||||||
|
fetchrecipedir = None
|
||||||
|
for pth in bbfiles:
|
||||||
|
if pth.endswith('.bb'):
|
||||||
|
pthdir = os.path.dirname(pth)
|
||||||
|
if os.access(os.path.dirname(os.path.dirname(pthdir)), os.W_OK):
|
||||||
|
fetchrecipedir = pthdir.replace('*', 'recipetool')
|
||||||
|
if pthdir.endswith('workspace/recipes/*'):
|
||||||
|
# Prefer the workspace
|
||||||
|
break
|
||||||
|
return fetchrecipedir
|
||||||
|
|
||||||
|
class FetchUrlFailure(Exception):
|
||||||
|
def __init__(self, url):
|
||||||
|
self.url = url
|
||||||
|
def __str__(self):
|
||||||
|
return "Failed to fetch URL %s" % self.url
|
||||||
|
|
||||||
|
def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False):
|
||||||
|
"""
|
||||||
|
Fetch the specified URL using normal do_fetch and do_unpack tasks, i.e.
|
||||||
|
any dependencies that need to be satisfied in order to support the fetch
|
||||||
|
operation will be taken care of
|
||||||
|
"""
|
||||||
|
|
||||||
import bb
|
import bb
|
||||||
tmpparent = d.getVar('BASE_WORKDIR')
|
|
||||||
|
checksums = {}
|
||||||
|
fetchrecipepn = None
|
||||||
|
|
||||||
|
# We need to put our temp directory under ${BASE_WORKDIR} otherwise
|
||||||
|
# we may have problems with the recipe-specific sysroot population
|
||||||
|
tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR')
|
||||||
bb.utils.mkdirhier(tmpparent)
|
bb.utils.mkdirhier(tmpparent)
|
||||||
tmpworkdir = tempfile.mkdtemp(dir=tmpparent)
|
tmpdir = tempfile.mkdtemp(prefix='recipetool-', dir=tmpparent)
|
||||||
try:
|
try:
|
||||||
bb.utils.mkdirhier(destdir)
|
tmpworkdir = os.path.join(tmpdir, 'work')
|
||||||
localdata = bb.data.createCopy(d)
|
logger.debug('fetch_url: temp dir is %s' % tmpdir)
|
||||||
|
|
||||||
# Set some values to allow extend_recipe_sysroot to work here we're we are not running from a task
|
fetchrecipedir = _get_temp_recipe_dir(tinfoil.config_data)
|
||||||
localdata.setVar('WORKDIR', tmpworkdir)
|
if not fetchrecipedir:
|
||||||
localdata.setVar('BB_RUNTASK', 'do_fetch')
|
logger.error('Searched BBFILES but unable to find a writeable place to put temporary recipe')
|
||||||
localdata.setVar('PN', 'dummy')
|
sys.exit(1)
|
||||||
localdata.setVar('BB_LIMITEDDEPS', '1')
|
fetchrecipe = None
|
||||||
bb.build.exec_func("extend_recipe_sysroot", localdata)
|
bb.utils.mkdirhier(fetchrecipedir)
|
||||||
|
|
||||||
# Set some values for the benefit of the fetcher code
|
|
||||||
localdata.setVar('BB_STRICT_CHECKSUM', '')
|
|
||||||
localdata.setVar('SRCREV', srcrev)
|
|
||||||
ret = (None, None)
|
|
||||||
olddir = os.getcwd()
|
|
||||||
try:
|
try:
|
||||||
fetcher = bb.fetch2.Fetch([uri], localdata)
|
# Generate a dummy recipe so we can follow more or less normal paths
|
||||||
for u in fetcher.ud:
|
# for do_fetch and do_unpack
|
||||||
ud = fetcher.ud[u]
|
# I'd use tempfile functions here but underscores can be produced by that and those
|
||||||
ud.ignore_checksums = True
|
# aren't allowed in recipe file names except to separate the version
|
||||||
fetcher.download()
|
rndstring = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(8))
|
||||||
for u in fetcher.ud:
|
fetchrecipe = os.path.join(fetchrecipedir, 'tmp-recipetool-%s.bb' % rndstring)
|
||||||
ud = fetcher.ud[u]
|
fetchrecipepn = os.path.splitext(os.path.basename(fetchrecipe))[0]
|
||||||
if ud.localpath.rstrip(os.sep) == localdata.getVar('DL_DIR').rstrip(os.sep):
|
logger.debug('Generating initial recipe %s for fetching' % fetchrecipe)
|
||||||
raise Exception('Local path is download directory - please check that the URI "%s" is correct' % uri)
|
with open(fetchrecipe, 'w') as f:
|
||||||
fetcher.unpack(destdir)
|
# We don't want to have to specify LIC_FILES_CHKSUM
|
||||||
for u in fetcher.ud:
|
f.write('LICENSE = "CLOSED"\n')
|
||||||
ud = fetcher.ud[u]
|
# We don't need the cross-compiler
|
||||||
if ud.method.recommends_checksum(ud):
|
f.write('INHIBIT_DEFAULT_DEPS = "1"\n')
|
||||||
md5value = bb.utils.md5_file(ud.localpath)
|
# We don't have the checksums yet so we can't require them
|
||||||
sha256value = bb.utils.sha256_file(ud.localpath)
|
f.write('BB_STRICT_CHECKSUM = "ignore"\n')
|
||||||
ret = (md5value, sha256value)
|
f.write('SRC_URI = "%s"\n' % srcuri)
|
||||||
|
f.write('SRCREV = "%s"\n' % srcrev)
|
||||||
|
f.write('WORKDIR = "%s"\n' % tmpworkdir)
|
||||||
|
# Set S out of the way so it doesn't get created under the workdir
|
||||||
|
f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc'))
|
||||||
|
|
||||||
|
logger.info('Fetching %s...' % srcuri)
|
||||||
|
|
||||||
|
# FIXME this is too noisy at the moment
|
||||||
|
|
||||||
|
# Parse recipes so our new recipe gets picked up
|
||||||
|
tinfoil.parse_recipes()
|
||||||
|
|
||||||
|
def eventhandler(event):
|
||||||
|
if isinstance(event, bb.fetch2.MissingChecksumEvent):
|
||||||
|
checksums.update(event.checksums)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Run the fetch + unpack tasks
|
||||||
|
res = tinfoil.build_targets(fetchrecipepn,
|
||||||
|
'do_unpack',
|
||||||
|
handle_events=True,
|
||||||
|
extra_events=['bb.fetch2.MissingChecksumEvent'],
|
||||||
|
event_callback=eventhandler)
|
||||||
|
if not res:
|
||||||
|
raise FetchUrlFailure(srcuri)
|
||||||
|
|
||||||
|
# Remove unneeded directories
|
||||||
|
rd = tinfoil.parse_recipe(fetchrecipepn)
|
||||||
|
if rd:
|
||||||
|
pathvars = ['T', 'RECIPE_SYSROOT', 'RECIPE_SYSROOT_NATIVE']
|
||||||
|
for pathvar in pathvars:
|
||||||
|
path = rd.getVar(pathvar)
|
||||||
|
shutil.rmtree(path)
|
||||||
finally:
|
finally:
|
||||||
os.chdir(olddir)
|
if fetchrecipe:
|
||||||
|
try:
|
||||||
|
os.remove(fetchrecipe)
|
||||||
|
except FileNotFoundError:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
os.rmdir(fetchrecipedir)
|
||||||
|
except OSError as e:
|
||||||
|
import errno
|
||||||
|
if e.errno != errno.ENOTEMPTY:
|
||||||
|
raise
|
||||||
|
|
||||||
|
bb.utils.mkdirhier(destdir)
|
||||||
|
for fn in os.listdir(tmpworkdir):
|
||||||
|
shutil.move(os.path.join(tmpworkdir, fn), destdir)
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
shutil.rmtree(tmpworkdir)
|
if not preserve_tmp:
|
||||||
return ret
|
shutil.rmtree(tmpdir)
|
||||||
|
tmpdir = None
|
||||||
|
|
||||||
|
return checksums, tmpdir
|
||||||
|
|
||||||
|
|
||||||
def run_editor(fn):
|
def run_editor(fn):
|
||||||
if isinstance(fn, str):
|
if isinstance(fn, str):
|
||||||
|
|
Loading…
Reference in New Issue
Block a user