logging: use warning instead warn

The warn method is deprecated. We should use the documented warning instead.

Quoting from the python's official doc:
"""
Note: There is an obsolete method warn which is functionally identical to warning.
      As warn is deprecated, please do not use it - use warning instead.
"""

(From OE-Core rev: cc771aa4b74f222f1bea38b0b50196b2fbc97ab4)

Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
Signed-off-by: Ross Burton <ross.burton@intel.com>
Signed-off-by: Armin Kuster <akuster808@gmail.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
Chen Qi 2018-07-19 13:47:18 +08:00 committed by Richard Purdie
parent 8dcc23b5cc
commit 8c7ca97dd6
16 changed files with 48 additions and 48 deletions

View File

@ -112,7 +112,7 @@ class Screen(Terminal):
bb.event.fire(bb.event.LogExecTTY(msg, "screen -r %s" % s_id, bb.event.fire(bb.event.LogExecTTY(msg, "screen -r %s" % s_id,
0.5, 10), d) 0.5, 10), d)
else: else:
logger.warn(msg) logger.warning(msg)
class TmuxRunning(Terminal): class TmuxRunning(Terminal):
"""Open a new pane in the current running tmux window""" """Open a new pane in the current running tmux window"""
@ -168,7 +168,7 @@ class Tmux(Terminal):
if d: if d:
bb.event.fire(bb.event.LogExecTTY(msg, attach_cmd, 0.5, 10), d) bb.event.fire(bb.event.LogExecTTY(msg, attach_cmd, 0.5, 10), d)
else: else:
logger.warn(msg) logger.warning(msg)
class Custom(Terminal): class Custom(Terminal):
command = 'false' # This is a placeholder command = 'false' # This is a placeholder
@ -180,7 +180,7 @@ class Custom(Terminal):
if not '{command}' in self.command: if not '{command}' in self.command:
self.command += ' {command}' self.command += ' {command}'
Terminal.__init__(self, sh_cmd, title, env, d) Terminal.__init__(self, sh_cmd, title, env, d)
logger.warn('Custom terminal was started.') logger.warning('Custom terminal was started.')
else: else:
logger.debug(1, 'No custom terminal (OE_TERMINAL_CUSTOMCMD) set') logger.debug(1, 'No custom terminal (OE_TERMINAL_CUSTOMCMD) set')
raise UnsupportedTerminal('OE_TERMINAL_CUSTOMCMD not set') raise UnsupportedTerminal('OE_TERMINAL_CUSTOMCMD not set')

View File

@ -159,7 +159,7 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
os.chdir(builddir) os.chdir(builddir)
if not "meta-selftest" in self.tc.td["BBLAYERS"]: if not "meta-selftest" in self.tc.td["BBLAYERS"]:
self.tc.logger.warn("meta-selftest layer not found in BBLAYERS, adding it") self.tc.logger.warning("meta-selftest layer not found in BBLAYERS, adding it")
meta_selftestdir = os.path.join( meta_selftestdir = os.path.join(
self.tc.td["BBLAYERS_FETCH_DIR"], 'meta-selftest') self.tc.td["BBLAYERS_FETCH_DIR"], 'meta-selftest')
if os.path.isdir(meta_selftestdir): if os.path.isdir(meta_selftestdir):

View File

@ -393,7 +393,7 @@ class QemuRunner:
# qemu-system behaves well and a SIGTERM is enough # qemu-system behaves well and a SIGTERM is enough
os.kill(self.qemupid, signal.SIGTERM) os.kill(self.qemupid, signal.SIGTERM)
except ProcessLookupError as e: except ProcessLookupError as e:
self.logger.warn('qemu-system ended unexpectedly') self.logger.warning('qemu-system ended unexpectedly')
def stop_thread(self): def stop_thread(self):
if self.thread and self.thread.is_alive(): if self.thread and self.thread.is_alive():
@ -470,7 +470,7 @@ class QemuRunner:
def _dump_host(self): def _dump_host(self):
self.host_dumper.create_dir("qemu") self.host_dumper.create_dir("qemu")
self.logger.warn("Qemu ended unexpectedly, dump data from host" self.logger.warning("Qemu ended unexpectedly, dump data from host"
" is in %s" % self.host_dumper.dump_dir) " is in %s" % self.host_dumper.dump_dir)
self.host_dumper.dump_host() self.host_dumper.dump_host()

View File

@ -246,7 +246,7 @@ def action_init(conf, args):
# traditional behavior from "git archive" (preserved # traditional behavior from "git archive" (preserved
# here) it to choose the first one. This might not be # here) it to choose the first one. This might not be
# intended, so at least warn about it. # intended, so at least warn about it.
logger.warn("%s: initial revision '%s' not unique, picking result of rev-parse = %s" % logger.warning("%s: initial revision '%s' not unique, picking result of rev-parse = %s" %
(name, initialrev, refs[0])) (name, initialrev, refs[0]))
initialrev = rev initialrev = rev
except: except:

View File

@ -27,7 +27,7 @@ logger = scriptutils.logger_create('verify_homepage')
def wgetHomepage(pn, homepage): def wgetHomepage(pn, homepage):
result = subprocess.call('wget ' + '-q -T 5 -t 1 --spider ' + homepage, shell = True) result = subprocess.call('wget ' + '-q -T 5 -t 1 --spider ' + homepage, shell = True)
if result: if result:
logger.warn("%s: failed to verify HOMEPAGE: %s " % (pn, homepage)) logger.warning("%s: failed to verify HOMEPAGE: %s " % (pn, homepage))
return 1 return 1
else: else:
return 0 return 0

View File

@ -84,7 +84,7 @@ def export(args, config, basepath, workspace):
# if all workspace is excluded, quit # if all workspace is excluded, quit
if not len(set(workspace.keys()).difference(set(args.exclude))): if not len(set(workspace.keys()).difference(set(args.exclude))):
logger.warn('All recipes in workspace excluded, nothing to export') logger.warning('All recipes in workspace excluded, nothing to export')
return 0 return 0
exported = [] exported = []

View File

@ -81,7 +81,7 @@ def devimport(args, config, basepath, workspace):
break break
else: else:
non_importables.append(fn) non_importables.append(fn)
logger.warn('No recipe to append %s.bbapppend, skipping' % fn) logger.warning('No recipe to append %s.bbapppend, skipping' % fn)
# Extract # Extract
imported = [] imported = []
@ -104,9 +104,9 @@ def devimport(args, config, basepath, workspace):
try: try:
tar.extract(member, path=config.workspace_path) tar.extract(member, path=config.workspace_path)
except PermissionError as pe: except PermissionError as pe:
logger.warn(pe) logger.warning(pe)
else: else:
logger.warn('File already present. Use --overwrite/-o to overwrite it: %s' % member.name) logger.warning('File already present. Use --overwrite/-o to overwrite it: %s' % member.name)
continue continue
else: else:
tar.extract(member, path=config.workspace_path) tar.extract(member, path=config.workspace_path)
@ -129,7 +129,7 @@ def devimport(args, config, basepath, workspace):
if imported: if imported:
logger.info('Imported recipes into workspace %s: %s' % (config.workspace_path, ', '.join(imported))) logger.info('Imported recipes into workspace %s: %s' % (config.workspace_path, ', '.join(imported)))
else: else:
logger.warn('No recipes imported into the workspace') logger.warning('No recipes imported into the workspace')
return 0 return 0

View File

@ -66,7 +66,7 @@ def add(args, config, basepath, workspace):
args.srctree = args.recipename args.srctree = args.recipename
args.recipename = None args.recipename = None
elif os.path.isdir(args.recipename): elif os.path.isdir(args.recipename):
logger.warn('Ambiguous argument "%s" - assuming you mean it to be the recipe name' % args.recipename) logger.warning('Ambiguous argument "%s" - assuming you mean it to be the recipe name' % args.recipename)
if not args.fetchuri: if not args.fetchuri:
if args.srcrev: if args.srcrev:
@ -82,7 +82,7 @@ def add(args, config, basepath, workspace):
if args.fetchuri: if args.fetchuri:
raise DevtoolError('URI specified as positional argument as well as -f/--fetch') raise DevtoolError('URI specified as positional argument as well as -f/--fetch')
else: else:
logger.warn('-f/--fetch option is deprecated - you can now simply specify the URL to fetch as a positional argument instead') logger.warning('-f/--fetch option is deprecated - you can now simply specify the URL to fetch as a positional argument instead')
args.fetchuri = args.fetch args.fetchuri = args.fetch
if args.recipename: if args.recipename:
@ -217,7 +217,7 @@ def add(args, config, basepath, workspace):
raise DevtoolError('Command \'%s\' did not create any recipe file:\n%s' % (e.command, e.stdout)) raise DevtoolError('Command \'%s\' did not create any recipe file:\n%s' % (e.command, e.stdout))
attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile)) attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile))
if os.path.exists(attic_recipe): if os.path.exists(attic_recipe):
logger.warn('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe) logger.warning('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe)
finally: finally:
if tmpsrcdir and os.path.exists(tmpsrcdir): if tmpsrcdir and os.path.exists(tmpsrcdir):
shutil.rmtree(tmpsrcdir) shutil.rmtree(tmpsrcdir)
@ -295,7 +295,7 @@ def add(args, config, basepath, workspace):
with open(layerconf_file, 'a') as f: with open(layerconf_file, 'a') as f:
f.write('%s = "%s"\n' % (preferred_provider, recipe_name)) f.write('%s = "%s"\n' % (preferred_provider, recipe_name))
else: else:
logger.warn('Set \'%s\' in order to use the recipe' % preferred_provider) logger.warning('Set \'%s\' in order to use the recipe' % preferred_provider)
break break
_add_md5(config, recipename, appendfile) _add_md5(config, recipename, appendfile)
@ -704,7 +704,7 @@ def _check_preserve(config, recipename):
if splitline[2] != md5: if splitline[2] != md5:
bb.utils.mkdirhier(preservepath) bb.utils.mkdirhier(preservepath)
preservefile = os.path.basename(removefile) preservefile = os.path.basename(removefile)
logger.warn('File %s modified since it was written, preserving in %s' % (preservefile, preservepath)) logger.warning('File %s modified since it was written, preserving in %s' % (preservefile, preservepath))
shutil.move(removefile, os.path.join(preservepath, preservefile)) shutil.move(removefile, os.path.join(preservepath, preservefile))
else: else:
os.remove(removefile) os.remove(removefile)
@ -795,7 +795,7 @@ def modify(args, config, basepath, workspace):
if branchname.startswith(override_branch_prefix): if branchname.startswith(override_branch_prefix):
branches.append(branchname) branches.append(branchname)
if branches: if branches:
logger.warn('SRC_URI is conditionally overridden in this recipe, thus several %s* branches have been created, one for each override that makes changes to SRC_URI. It is recommended that you make changes to the %s branch first, then checkout and rebase each %s* branch and update any unique patches there (duplicates on those branches will be ignored by devtool finish/update-recipe)' % (override_branch_prefix, args.branch, override_branch_prefix)) logger.warning('SRC_URI is conditionally overridden in this recipe, thus several %s* branches have been created, one for each override that makes changes to SRC_URI. It is recommended that you make changes to the %s branch first, then checkout and rebase each %s* branch and update any unique patches there (duplicates on those branches will be ignored by devtool finish/update-recipe)' % (override_branch_prefix, args.branch, override_branch_prefix))
branches.insert(0, args.branch) branches.insert(0, args.branch)
seen_patches = [] seen_patches = []
for branch in branches: for branch in branches:
@ -1720,7 +1720,7 @@ def update_recipe(args, config, basepath, workspace):
if updated: if updated:
rf = rd.getVar('FILE') rf = rd.getVar('FILE')
if rf.startswith(config.workspace_path): if rf.startswith(config.workspace_path):
logger.warn('Recipe file %s has been updated but is inside the workspace - you will need to move it (and any associated files next to it) out to the desired layer before using "devtool reset" in order to keep any changes' % rf) logger.warning('Recipe file %s has been updated but is inside the workspace - you will need to move it (and any associated files next to it) out to the desired layer before using "devtool reset" in order to keep any changes' % rf)
finally: finally:
tinfoil.shutdown() tinfoil.shutdown()
@ -1803,7 +1803,7 @@ def _reset(recipes, no_clean, config, basepath, workspace):
if os.path.exists(origdir): if os.path.exists(origdir):
for root, dirs, files in os.walk(origdir): for root, dirs, files in os.walk(origdir):
for fn in files: for fn in files:
logger.warn('Preserving %s in %s' % (fn, preservepath)) logger.warning('Preserving %s in %s' % (fn, preservepath))
_move_file(os.path.join(origdir, fn), _move_file(os.path.join(origdir, fn),
os.path.join(preservepath, fn)) os.path.join(preservepath, fn))
for dn in dirs: for dn in dirs:

View File

@ -264,7 +264,7 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
if no_patch: if no_patch:
patches = oe.recipeutils.get_recipe_patches(crd) patches = oe.recipeutils.get_recipe_patches(crd)
if patches: if patches:
logger.warn('By user choice, the following patches will NOT be applied to the new source tree:\n %s' % '\n '.join([os.path.basename(patch) for patch in patches])) logger.warning('By user choice, the following patches will NOT be applied to the new source tree:\n %s' % '\n '.join([os.path.basename(patch) for patch in patches]))
else: else:
__run('git checkout devtool-patched -b %s' % branch) __run('git checkout devtool-patched -b %s' % branch)
skiptag = False skiptag = False
@ -273,9 +273,9 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
except bb.process.ExecutionError as e: except bb.process.ExecutionError as e:
skiptag = True skiptag = True
if 'conflict' in e.stdout: if 'conflict' in e.stdout:
logger.warn('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip())) logger.warning('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip()))
else: else:
logger.warn('Command \'%s\' failed:\n%s' % (e.command, e.stdout)) logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout))
if not skiptag: if not skiptag:
if uri.startswith('git://'): if uri.startswith('git://'):
suffix = 'new' suffix = 'new'

View File

@ -238,7 +238,7 @@ def appendfile(args):
if stdout: if stdout:
logger.debug('file command output: %s' % stdout.rstrip()) logger.debug('file command output: %s' % stdout.rstrip())
if ('executable' in stdout and not 'shell script' in stdout) or 'shared object' in stdout: if ('executable' in stdout and not 'shell script' in stdout) or 'shared object' in stdout:
logger.warn('This file looks like it is a binary or otherwise the output of compilation. If it is, you should consider building it properly instead of substituting a binary file directly.') logger.warning('This file looks like it is a binary or otherwise the output of compilation. If it is, you should consider building it properly instead of substituting a binary file directly.')
if args.recipe: if args.recipe:
recipes = {args.targetpath: [args.recipe],} recipes = {args.targetpath: [args.recipe],}
@ -275,7 +275,7 @@ def appendfile(args):
if selectpn: if selectpn:
logger.debug('Selecting recipe %s for file %s' % (selectpn, args.targetpath)) logger.debug('Selecting recipe %s for file %s' % (selectpn, args.targetpath))
if postinst_pns: if postinst_pns:
logger.warn('%s be modified by postinstall scripts for the following recipes:\n %s\nThis may or may not be an issue depending on what modifications these postinstall scripts make.' % (args.targetpath, '\n '.join(postinst_pns))) logger.warning('%s be modified by postinstall scripts for the following recipes:\n %s\nThis may or may not be an issue depending on what modifications these postinstall scripts make.' % (args.targetpath, '\n '.join(postinst_pns)))
rd = _parse_recipe(selectpn, tinfoil) rd = _parse_recipe(selectpn, tinfoil)
if not rd: if not rd:
# Error message already shown # Error message already shown
@ -286,12 +286,12 @@ def appendfile(args):
sourcetype, sourcepath = sourcefile.split('://', 1) sourcetype, sourcepath = sourcefile.split('://', 1)
logger.debug('Original source file is %s (%s)' % (sourcepath, sourcetype)) logger.debug('Original source file is %s (%s)' % (sourcepath, sourcetype))
if sourcetype == 'patch': if sourcetype == 'patch':
logger.warn('File %s is added by the patch %s - you may need to remove or replace this patch in order to replace the file.' % (args.targetpath, sourcepath)) logger.warning('File %s is added by the patch %s - you may need to remove or replace this patch in order to replace the file.' % (args.targetpath, sourcepath))
sourcepath = None sourcepath = None
else: else:
logger.debug('Unable to determine source file, proceeding anyway') logger.debug('Unable to determine source file, proceeding anyway')
if modpatches: if modpatches:
logger.warn('File %s is modified by the following patches:\n %s' % (args.targetpath, '\n '.join(modpatches))) logger.warning('File %s is modified by the following patches:\n %s' % (args.targetpath, '\n '.join(modpatches)))
if instelements and sourcepath: if instelements and sourcepath:
install = None install = None
@ -343,7 +343,7 @@ def appendsrc(args, files, rd, extralines=None):
if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'): if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'):
srcdir = os.path.join(workdir, 'git') srcdir = os.path.join(workdir, 'git')
if not bb.data.inherits_class('kernel-yocto', rd): if not bb.data.inherits_class('kernel-yocto', rd):
logger.warn('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${WORKDIR}/git') logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${WORKDIR}/git')
src_destdir = os.path.join(os.path.relpath(srcdir, workdir), src_destdir) src_destdir = os.path.join(os.path.relpath(srcdir, workdir), src_destdir)
src_destdir = os.path.normpath(src_destdir) src_destdir = os.path.normpath(src_destdir)
@ -357,9 +357,9 @@ def appendsrc(args, files, rd, extralines=None):
if simple_str in simplified: if simple_str in simplified:
existing = simplified[simple_str] existing = simplified[simple_str]
if source_uri != existing: if source_uri != existing:
logger.warn('{0!r} is already in SRC_URI, with different parameters: {1!r}, not adding'.format(source_uri, existing)) logger.warning('{0!r} is already in SRC_URI, with different parameters: {1!r}, not adding'.format(source_uri, existing))
else: else:
logger.warn('{0!r} is already in SRC_URI, not adding'.format(source_uri)) logger.warning('{0!r} is already in SRC_URI, not adding'.format(source_uri))
else: else:
extralines.append('SRC_URI += {0}'.format(source_uri)) extralines.append('SRC_URI += {0}'.format(source_uri))
copyfiles[newfile] = srcfile copyfiles[newfile] = srcfile

View File

@ -98,7 +98,7 @@ class RecipeHandler(object):
break break
except IOError as ioe: except IOError as ioe:
if ioe.errno == 2: if ioe.errno == 2:
logger.warn('unable to find a pkgdata file for package %s' % pkg) logger.warning('unable to find a pkgdata file for package %s' % pkg)
else: else:
raise raise
@ -437,7 +437,7 @@ def create_recipe(args):
if scriptutils.is_src_url(source): if scriptutils.is_src_url(source):
# Warn about github archive URLs # Warn about github archive URLs
if re.match('https?://github.com/[^/]+/[^/]+/archive/.+(\.tar\..*|\.zip)$', source): if re.match('https?://github.com/[^/]+/[^/]+/archive/.+(\.tar\..*|\.zip)$', source):
logger.warn('github archive files are not guaranteed to be stable and may be re-generated over time. If the latter occurs, the checksums will likely change and the recipe will fail at do_fetch. It is recommended that you point to an actual commit or tag in the repository instead (using the repository URL in conjunction with the -S/--srcrev option).') logger.warning('github archive files are not guaranteed to be stable and may be re-generated over time. If the latter occurs, the checksums will likely change and the recipe will fail at do_fetch. It is recommended that you point to an actual commit or tag in the repository instead (using the repository URL in conjunction with the -S/--srcrev option).')
# Fetch a URL # Fetch a URL
fetchuri = reformat_git_uri(urldefrag(source)[0]) fetchuri = reformat_git_uri(urldefrag(source)[0])
if args.binary: if args.binary:

View File

@ -141,7 +141,7 @@ class KernelModuleRecipeHandler(RecipeHandler):
warnmsg = 'Unable to find means of passing kernel path into install makefile - if kernel path is hardcoded you will need to patch the makefile' warnmsg = 'Unable to find means of passing kernel path into install makefile - if kernel path is hardcoded you will need to patch the makefile'
if warnmsg: if warnmsg:
warnmsg += '. Note that the variable KERNEL_SRC will be passed in as the kernel source path.' warnmsg += '. Note that the variable KERNEL_SRC will be passed in as the kernel source path.'
logger.warn(warnmsg) logger.warning(warnmsg)
lines_after.append('# %s' % warnmsg) lines_after.append('# %s' % warnmsg)
return True return True

View File

@ -90,7 +90,7 @@ class NpmRecipeHandler(RecipeHandler):
runenv = dict(os.environ, PATH=d.getVar('PATH')) runenv = dict(os.environ, PATH=d.getVar('PATH'))
bb.process.run('npm shrinkwrap', cwd=srctree, stderr=subprocess.STDOUT, env=runenv, shell=True) bb.process.run('npm shrinkwrap', cwd=srctree, stderr=subprocess.STDOUT, env=runenv, shell=True)
except bb.process.ExecutionError as e: except bb.process.ExecutionError as e:
logger.warn('npm shrinkwrap failed:\n%s' % e.stdout) logger.warning('npm shrinkwrap failed:\n%s' % e.stdout)
return return
tmpfile = os.path.join(localfilesdir, 'npm-shrinkwrap.json') tmpfile = os.path.join(localfilesdir, 'npm-shrinkwrap.json')
@ -107,12 +107,12 @@ class NpmRecipeHandler(RecipeHandler):
cwd=srctree, stderr=subprocess.STDOUT, env=runenv, shell=True) cwd=srctree, stderr=subprocess.STDOUT, env=runenv, shell=True)
relockbin = os.path.join(NpmRecipeHandler.lockdownpath, 'node_modules', 'lockdown', 'relock.js') relockbin = os.path.join(NpmRecipeHandler.lockdownpath, 'node_modules', 'lockdown', 'relock.js')
if not os.path.exists(relockbin): if not os.path.exists(relockbin):
logger.warn('Could not find relock.js within lockdown directory; skipping lockdown') logger.warning('Could not find relock.js within lockdown directory; skipping lockdown')
return return
try: try:
bb.process.run('node %s' % relockbin, cwd=srctree, stderr=subprocess.STDOUT, env=runenv, shell=True) bb.process.run('node %s' % relockbin, cwd=srctree, stderr=subprocess.STDOUT, env=runenv, shell=True)
except bb.process.ExecutionError as e: except bb.process.ExecutionError as e:
logger.warn('lockdown-relock failed:\n%s' % e.stdout) logger.warning('lockdown-relock failed:\n%s' % e.stdout)
return return
tmpfile = os.path.join(localfilesdir, 'lockdown.json') tmpfile = os.path.join(localfilesdir, 'lockdown.json')

View File

@ -58,11 +58,11 @@ def newappend(args):
return 1 return 1
if not path_ok: if not path_ok:
logger.warn('Unable to determine correct subdirectory path for bbappend file - check that what %s adds to BBFILES also matches .bbappend files. Using %s for now, but until you fix this the bbappend will not be applied.', os.path.join(args.destlayer, 'conf', 'layer.conf'), os.path.dirname(append_path)) logger.warning('Unable to determine correct subdirectory path for bbappend file - check that what %s adds to BBFILES also matches .bbappend files. Using %s for now, but until you fix this the bbappend will not be applied.', os.path.join(args.destlayer, 'conf', 'layer.conf'), os.path.dirname(append_path))
layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS').split()] layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS').split()]
if not os.path.abspath(args.destlayer) in layerdirs: if not os.path.abspath(args.destlayer) in layerdirs:
logger.warn('Specified layer is not currently enabled in bblayers.conf, you will need to add it before this bbappend will be active') logger.warning('Specified layer is not currently enabled in bblayers.conf, you will need to add it before this bbappend will be active')
if not os.path.exists(append_path): if not os.path.exists(append_path):
bb.utils.mkdirhier(os.path.dirname(append_path)) bb.utils.mkdirhier(os.path.dirname(append_path))

View File

@ -506,7 +506,7 @@ class Disk:
sparse_copy(partfname, target, seek=part['start'] * self._lsector_size) sparse_copy(partfname, target, seek=part['start'] * self._lsector_size)
os.unlink(partfname) os.unlink(partfname)
elif part['type'] != 'f': elif part['type'] != 'f':
logger.warn("skipping partition {}: unsupported fstype {}".format(pnum, fstype)) logger.warning("skipping partition {}: unsupported fstype {}".format(pnum, fstype))
def wic_ls(args, native_sysroot): def wic_ls(args, native_sysroot):
"""List contents of partitioned image or vfat partition.""" """List contents of partitioned image or vfat partition."""

View File

@ -290,7 +290,7 @@ class BaseConfig(object):
def check_arg_fstype(self, fst): def check_arg_fstype(self, fst):
"""Check and set FSTYPE""" """Check and set FSTYPE"""
if fst not in self.fstypes + self.vmtypes: if fst not in self.fstypes + self.vmtypes:
logger.warn("Maybe unsupported FSTYPE: %s" % fst) logger.warning("Maybe unsupported FSTYPE: %s" % fst)
if not self.fstype or self.fstype == fst: if not self.fstype or self.fstype == fst:
if fst == 'ramfs': if fst == 'ramfs':
fst = 'cpio.gz' fst = 'cpio.gz'
@ -352,7 +352,7 @@ class BaseConfig(object):
self.qemuboot = qb self.qemuboot = qb
self.qbconfload = True self.qbconfload = True
else: else:
logger.warn("%s doesn't exist" % qb) logger.warning("%s doesn't exist" % qb)
else: else:
raise RunQemuError("Can't find FSTYPE from: %s" % p) raise RunQemuError("Can't find FSTYPE from: %s" % p)
@ -692,7 +692,7 @@ class BaseConfig(object):
if not self.get('QB_AUDIO_DRV'): if not self.get('QB_AUDIO_DRV'):
raise RunQemuError("QB_AUDIO_DRV is NULL, this board doesn't support audio") raise RunQemuError("QB_AUDIO_DRV is NULL, this board doesn't support audio")
if not self.get('QB_AUDIO_OPT'): if not self.get('QB_AUDIO_OPT'):
logger.warn('QB_AUDIO_OPT is NULL, you may need define it to make audio work') logger.warning('QB_AUDIO_OPT is NULL, you may need define it to make audio work')
else: else:
self.qemu_opt_script += ' %s' % self.get('QB_AUDIO_OPT') self.qemu_opt_script += ' %s' % self.get('QB_AUDIO_OPT')
os.putenv('QEMU_AUDIO_DRV', self.get('QB_AUDIO_DRV')) os.putenv('QEMU_AUDIO_DRV', self.get('QB_AUDIO_DRV'))
@ -714,7 +714,7 @@ class BaseConfig(object):
if self.get('DEPLOY_DIR_IMAGE'): if self.get('DEPLOY_DIR_IMAGE'):
deploy_dir_image = self.get('DEPLOY_DIR_IMAGE') deploy_dir_image = self.get('DEPLOY_DIR_IMAGE')
else: else:
logger.warn("Can't find qemuboot conf file, DEPLOY_DIR_IMAGE is NULL!") logger.warning("Can't find qemuboot conf file, DEPLOY_DIR_IMAGE is NULL!")
return return
if self.rootfs and not os.path.exists(self.rootfs): if self.rootfs and not os.path.exists(self.rootfs):
@ -1058,9 +1058,9 @@ class BaseConfig(object):
# virtio might have been selected explicitly (just use it), or # virtio might have been selected explicitly (just use it), or
# is used as fallback (then warn about that). # is used as fallback (then warn about that).
if not drive_type.startswith("/dev/vd"): if not drive_type.startswith("/dev/vd"):
logger.warn("Unknown QB_DRIVE_TYPE: %s" % drive_type) logger.warning("Unknown QB_DRIVE_TYPE: %s" % drive_type)
logger.warn("Failed to figure out drive type, consider define or fix QB_DRIVE_TYPE") logger.warning("Failed to figure out drive type, consider define or fix QB_DRIVE_TYPE")
logger.warn('Trying to use virtio block drive') logger.warning('Trying to use virtio block drive')
vm_drive = '-drive if=virtio,file=%s,format=%s' % (self.rootfs, rootfs_format) vm_drive = '-drive if=virtio,file=%s,format=%s' % (self.rootfs, rootfs_format)
# All branches above set vm_drive. # All branches above set vm_drive.
@ -1275,7 +1275,7 @@ class BaseConfig(object):
self.bitbake_e = subprocess.check_output(cmd, shell=True).decode('utf-8') self.bitbake_e = subprocess.check_output(cmd, shell=True).decode('utf-8')
except subprocess.CalledProcessError as err: except subprocess.CalledProcessError as err:
self.bitbake_e = '' self.bitbake_e = ''
logger.warn("Couldn't run 'bitbake -e' to gather environment information:\n%s" % err.output.decode('utf-8')) logger.warning("Couldn't run 'bitbake -e' to gather environment information:\n%s" % err.output.decode('utf-8'))
def validate_combos(self): def validate_combos(self):
if (self.fstype in self.vmtypes) and self.kernel: if (self.fstype in self.vmtypes) and self.kernel: