bitbake: bitbake: Update logger.warn() -> logger.warning()

python deprecated logger.warn() in favour of logger.warning(). This is only
used in bitbake code so we may as well just translate everything to avoid
warnings under python 3. Its safe for python 2.7.

(Bitbake rev: 676a5f592e8507e81b8f748d58acfea7572f8796)

Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
Richard Purdie 2016-05-09 14:01:12 +01:00
parent 5052bf92e4
commit 654eadfa30
21 changed files with 70 additions and 70 deletions

View File

@ -654,7 +654,7 @@ build results (as the layer priority order has effectively changed).
logger.plain(' Skipping layer config file %s' % f1full ) logger.plain(' Skipping layer config file %s' % f1full )
continue continue
else: else:
logger.warn('Overwriting file %s', fdest) logger.warning('Overwriting file %s', fdest)
bb.utils.copyfile(f1full, fdest) bb.utils.copyfile(f1full, fdest)
if ext == '.bb': if ext == '.bb':
for append in self.bbhandler.cooker.collection.get_file_appends(f1full): for append in self.bbhandler.cooker.collection.get_file_appends(f1full):
@ -790,8 +790,8 @@ Lists recipes with the bbappends that apply to them as subitems.
if best_filename: if best_filename:
if best_filename in missing: if best_filename in missing:
logger.warn('%s: missing append for preferred version', logger.warning('%s: missing append for preferred version',
best_filename) best_filename)
return True return True
else: else:
return False return False

View File

@ -364,7 +364,7 @@ class BitbakeWorker(object):
def handle_ping(self, _): def handle_ping(self, _):
workerlog_write("Handling ping\n") workerlog_write("Handling ping\n")
logger.warn("Pong from bitbake-worker!") logger.warning("Pong from bitbake-worker!")
def handle_quit(self, data): def handle_quit(self, data):
workerlog_write("Handling quit\n") workerlog_write("Handling quit\n")

View File

@ -85,7 +85,7 @@ def plain(*args):
def debug(lvl, *args): def debug(lvl, *args):
if isinstance(lvl, basestring): if isinstance(lvl, basestring):
mainlogger.warn("Passed invalid debug level '%s' to bb.debug", lvl) mainlogger.warning("Passed invalid debug level '%s' to bb.debug", lvl)
args = (lvl,) + args args = (lvl,) + args
lvl = 1 lvl = 1
mainlogger.debug(lvl, ''.join(args)) mainlogger.debug(lvl, ''.join(args))
@ -94,7 +94,7 @@ def note(*args):
mainlogger.info(''.join(args)) mainlogger.info(''.join(args))
def warn(*args): def warn(*args):
mainlogger.warn(''.join(args)) mainlogger.warning(''.join(args))
def error(*args, **kwargs): def error(*args, **kwargs):
mainlogger.error(''.join(args), extra=kwargs) mainlogger.error(''.join(args), extra=kwargs)

View File

@ -172,7 +172,7 @@ def exec_func(func, d, dirs = None, pythonexception=False):
body = d.getVar(func, False) body = d.getVar(func, False)
if not body: if not body:
if body is None: if body is None:
logger.warn("Function %s doesn't exist", func) logger.warning("Function %s doesn't exist", func)
return return
flags = d.getVarFlags(func) flags = d.getVarFlags(func)

View File

@ -364,7 +364,7 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
if varflags.get("python"): if varflags.get("python"):
parser = bb.codeparser.PythonParser(key, logger) parser = bb.codeparser.PythonParser(key, logger)
if value and "\t" in value: if value and "\t" in value:
logger.warn("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE", True))) logger.warning("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE", True)))
parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno")) parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno"))
deps = deps | parser.references deps = deps | parser.references
deps = deps | (keys & parser.execs) deps = deps | (keys & parser.execs)

View File

@ -586,12 +586,12 @@ def verify_checksum(ud, d, precomputed={}):
raise NoChecksumError('Missing SRC_URI checksum', ud.url) raise NoChecksumError('Missing SRC_URI checksum', ud.url)
# Log missing sums so user can more easily add them # Log missing sums so user can more easily add them
logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n' logger.warning('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
'SRC_URI[%s] = "%s"', 'SRC_URI[%s] = "%s"',
ud.localpath, ud.md5_name, md5data) ud.localpath, ud.md5_name, md5data)
logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n' logger.warning('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
'SRC_URI[%s] = "%s"', 'SRC_URI[%s] = "%s"',
ud.localpath, ud.sha256_name, sha256data) ud.localpath, ud.sha256_name, sha256data)
# We want to alert the user if a checksum is defined in the recipe but # We want to alert the user if a checksum is defined in the recipe but
# it does not match. # it does not match.
@ -659,9 +659,9 @@ def verify_donestamp(ud, d, origud=None):
# files to those containing the checksums. # files to those containing the checksums.
if not isinstance(e, EOFError): if not isinstance(e, EOFError):
# Ignore errors, they aren't fatal # Ignore errors, they aren't fatal
logger.warn("Couldn't load checksums from donestamp %s: %s " logger.warning("Couldn't load checksums from donestamp %s: %s "
"(msg: %s)" % (ud.donestamp, type(e).__name__, "(msg: %s)" % (ud.donestamp, type(e).__name__,
str(e))) str(e)))
try: try:
checksums = verify_checksum(ud, d, precomputed_checksums) checksums = verify_checksum(ud, d, precomputed_checksums)
@ -675,8 +675,8 @@ def verify_donestamp(ud, d, origud=None):
except ChecksumError as e: except ChecksumError as e:
# Checksums failed to verify, trigger re-download and remove the # Checksums failed to verify, trigger re-download and remove the
# incorrect stamp file. # incorrect stamp file.
logger.warn("Checksum mismatch for local file %s\n" logger.warning("Checksum mismatch for local file %s\n"
"Cleaning and trying again." % ud.localpath) "Cleaning and trying again." % ud.localpath)
if os.path.exists(ud.localpath): if os.path.exists(ud.localpath):
rename_bad_checksum(ud, e.checksum) rename_bad_checksum(ud, e.checksum)
bb.utils.remove(ud.donestamp) bb.utils.remove(ud.donestamp)
@ -708,8 +708,8 @@ def update_stamp(ud, d):
except ChecksumError as e: except ChecksumError as e:
# Checksums failed to verify, trigger re-download and remove the # Checksums failed to verify, trigger re-download and remove the
# incorrect stamp file. # incorrect stamp file.
logger.warn("Checksum mismatch for local file %s\n" logger.warning("Checksum mismatch for local file %s\n"
"Cleaning and trying again." % ud.localpath) "Cleaning and trying again." % ud.localpath)
if os.path.exists(ud.localpath): if os.path.exists(ud.localpath):
rename_bad_checksum(ud, e.checksum) rename_bad_checksum(ud, e.checksum)
bb.utils.remove(ud.donestamp) bb.utils.remove(ud.donestamp)
@ -984,8 +984,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
except bb.fetch2.BBFetchException as e: except bb.fetch2.BBFetchException as e:
if isinstance(e, ChecksumError): if isinstance(e, ChecksumError):
logger.warn("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url)) logger.warning("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
logger.warn(str(e)) logger.warning(str(e))
if os.path.exists(ud.localpath): if os.path.exists(ud.localpath):
rename_bad_checksum(ud, e.checksum) rename_bad_checksum(ud, e.checksum)
elif isinstance(e, NoChecksumError): elif isinstance(e, NoChecksumError):
@ -1200,7 +1200,7 @@ class FetchData(object):
raise NonLocalMethod() raise NonLocalMethod()
if self.parm.get("proto", None) and "protocol" not in self.parm: if self.parm.get("proto", None) and "protocol" not in self.parm:
logger.warn('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True)) logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True))
self.parm["protocol"] = self.parm.get("proto", None) self.parm["protocol"] = self.parm.get("proto", None)
if hasattr(self.method, "urldata_init"): if hasattr(self.method, "urldata_init"):
@ -1596,14 +1596,14 @@ class Fetch(object):
except BBFetchException as e: except BBFetchException as e:
if isinstance(e, ChecksumError): if isinstance(e, ChecksumError):
logger.warn("Checksum failure encountered with download of %s - will attempt other sources if available" % u) logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
logger.debug(1, str(e)) logger.debug(1, str(e))
if os.path.exists(ud.localpath): if os.path.exists(ud.localpath):
rename_bad_checksum(ud, e.checksum) rename_bad_checksum(ud, e.checksum)
elif isinstance(e, NoChecksumError): elif isinstance(e, NoChecksumError):
raise raise
else: else:
logger.warn('Failed to fetch URL %s, attempting MIRRORS if available' % u) logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u)
logger.debug(1, str(e)) logger.debug(1, str(e))
firsterr = e firsterr = e
# Remove any incomplete fetch # Remove any incomplete fetch

View File

@ -251,14 +251,14 @@ class Npm(FetchMethod):
with open(shwrf) as datafile: with open(shwrf) as datafile:
shrinkobj = json.load(datafile) shrinkobj = json.load(datafile)
except: except:
logger.warn('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname) logger.warning('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname)
lckdf = d.getVar('NPM_LOCKDOWN', True) lckdf = d.getVar('NPM_LOCKDOWN', True)
logger.debug(2, "NPM lockdown file is %s" % lckdf) logger.debug(2, "NPM lockdown file is %s" % lckdf)
try: try:
with open(lckdf) as datafile: with open(lckdf) as datafile:
lockdown = json.load(datafile) lockdown = json.load(datafile)
except: except:
logger.warn('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname) logger.warning('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname)
if ('name' not in shrinkobj): if ('name' not in shrinkobj):
self._getdependencies(ud.pkgname, jsondepobj, ud.version, d, ud) self._getdependencies(ud.pkgname, jsondepobj, ud.version, d, ud)

View File

@ -220,7 +220,7 @@ class diskMonitor:
if minSpace and freeSpace < minSpace: if minSpace and freeSpace < minSpace:
# Always show warning, the self.checked would always be False if the action is WARN # Always show warning, the self.checked would always be False if the action is WARN
if self.preFreeS[k] == 0 or self.preFreeS[k] - freeSpace > self.spaceInterval and not self.checked[k]: if self.preFreeS[k] == 0 or self.preFreeS[k] - freeSpace > self.spaceInterval and not self.checked[k]:
logger.warn("The free space of %s (%s) is running low (%.3fGB left)" % \ logger.warning("The free space of %s (%s) is running low (%.3fGB left)" % \
(path, dev, freeSpace / 1024 / 1024 / 1024.0)) (path, dev, freeSpace / 1024 / 1024 / 1024.0))
self.preFreeS[k] = freeSpace self.preFreeS[k] = freeSpace
@ -246,7 +246,7 @@ class diskMonitor:
continue continue
# Always show warning, the self.checked would always be False if the action is WARN # Always show warning, the self.checked would always be False if the action is WARN
if self.preFreeI[k] == 0 or self.preFreeI[k] - freeInode > self.inodeInterval and not self.checked[k]: if self.preFreeI[k] == 0 or self.preFreeI[k] - freeInode > self.inodeInterval and not self.checked[k]:
logger.warn("The free inode of %s (%s) is running low (%.3fK left)" % \ logger.warning("The free inode of %s (%s) is running low (%.3fK left)" % \
(path, dev, freeInode / 1024.0)) (path, dev, freeInode / 1024.0))
self.preFreeI[k] = freeInode self.preFreeI[k] = freeInode

View File

@ -84,13 +84,13 @@ def include(parentfn, fn, lineno, data, error_out):
bbpath = "%s:%s" % (dname, data.getVar("BBPATH", True)) bbpath = "%s:%s" % (dname, data.getVar("BBPATH", True))
abs_fn, attempts = bb.utils.which(bbpath, fn, history=True) abs_fn, attempts = bb.utils.which(bbpath, fn, history=True)
if abs_fn and bb.parse.check_dependency(data, abs_fn): if abs_fn and bb.parse.check_dependency(data, abs_fn):
logger.warn("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True))) logger.warning("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True)))
for af in attempts: for af in attempts:
bb.parse.mark_dependency(data, af) bb.parse.mark_dependency(data, af)
if abs_fn: if abs_fn:
fn = abs_fn fn = abs_fn
elif bb.parse.check_dependency(data, fn): elif bb.parse.check_dependency(data, fn):
logger.warn("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True))) logger.warning("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True)))
try: try:
bb.parse.handle(fn, data, True) bb.parse.handle(fn, data, True)

View File

@ -824,7 +824,7 @@ class RunQueueData:
msg += "\n%s has unique rprovides:\n %s" % (provfn, "\n ".join(rprovide_results[provfn] - commonrprovs)) msg += "\n%s has unique rprovides:\n %s" % (provfn, "\n ".join(rprovide_results[provfn] - commonrprovs))
if self.warn_multi_bb: if self.warn_multi_bb:
logger.warn(msg) logger.warning(msg)
else: else:
logger.error(msg) logger.error(msg)
@ -852,7 +852,7 @@ class RunQueueData:
taskdep = self.dataCache.task_deps[fn] taskdep = self.dataCache.task_deps[fn]
fnid = self.taskData.getfn_id(fn) fnid = self.taskData.getfn_id(fn)
if taskname not in taskData.tasks_lookup[fnid]: if taskname not in taskData.tasks_lookup[fnid]:
logger.warn("Task %s does not exist, invalidating this task will have no effect" % taskname) logger.warning("Task %s does not exist, invalidating this task will have no effect" % taskname)
if 'nostamp' in taskdep and taskname in taskdep['nostamp']: if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
if error_nostamp: if error_nostamp:
bb.fatal("Task %s is marked nostamp, cannot invalidate this task" % taskname) bb.fatal("Task %s is marked nostamp, cannot invalidate this task" % taskname)

View File

@ -221,7 +221,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
if taint: if taint:
data = data + taint data = data + taint
self.taints[k] = taint self.taints[k] = taint
logger.warn("%s is tainted from a forced run" % k) logger.warning("%s is tainted from a forced run" % k)
h = hashlib.md5(data).hexdigest() h = hashlib.md5(data).hexdigest()
self.taskhash[k] = h self.taskhash[k] = h

View File

@ -664,8 +664,8 @@ class ORMWrapper(object):
dep_type = tdeptype, dep_type = tdeptype,
target = target_obj)) target = target_obj))
except KeyError as e: except KeyError as e:
logger.warn("Could not add dependency to the package %s " logger.warning("Could not add dependency to the package %s "
"because %s is an unknown package", p, px) "because %s is an unknown package", p, px)
if len(packagedeps_objs) > 0: if len(packagedeps_objs) > 0:
Package_Dependency.objects.bulk_create(packagedeps_objs) Package_Dependency.objects.bulk_create(packagedeps_objs)
@ -673,7 +673,7 @@ class ORMWrapper(object):
logger.info("No package dependencies created") logger.info("No package dependencies created")
if len(errormsg) > 0: if len(errormsg) > 0:
logger.warn("buildinfohelper: target_package_info could not identify recipes: \n%s", errormsg) logger.warning("buildinfohelper: target_package_info could not identify recipes: \n%s", errormsg)
def save_target_image_file_information(self, target_obj, file_name, file_size): def save_target_image_file_information(self, target_obj, file_name, file_size):
Target_Image_File.objects.create( target = target_obj, Target_Image_File.objects.create( target = target_obj,
@ -932,7 +932,7 @@ class BuildInfoHelper(object):
return lvo return lvo
#if we get here, we didn't read layers correctly; dump whatever information we have on the error log #if we get here, we didn't read layers correctly; dump whatever information we have on the error log
logger.warn("Could not match layer version for recipe path %s : %s", path, self.orm_wrapper.layer_version_objects) logger.warning("Could not match layer version for recipe path %s : %s", path, self.orm_wrapper.layer_version_objects)
#mockup the new layer #mockup the new layer
unknown_layer, _ = Layer.objects.get_or_create(name="Unidentified layer", layer_index_url="") unknown_layer, _ = Layer.objects.get_or_create(name="Unidentified layer", layer_index_url="")
@ -1003,7 +1003,7 @@ class BuildInfoHelper(object):
self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)] = layerinfos[layer]['version'] self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)] = layerinfos[layer]['version']
self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)]['local_path'] = layerinfos[layer]['local_path'] self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)]['local_path'] = layerinfos[layer]['local_path']
except NotExisting as nee: except NotExisting as nee:
logger.warn("buildinfohelper: cannot identify layer exception:%s ", nee) logger.warning("buildinfohelper: cannot identify layer exception:%s ", nee)
def store_started_build(self, event, build_log_path): def store_started_build(self, event, build_log_path):
@ -1240,14 +1240,14 @@ class BuildInfoHelper(object):
self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'], built_package=True) self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'], built_package=True)
self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata.copy(), pkgdata, self.internal_state['recipes'], built_package=False) self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata.copy(), pkgdata, self.internal_state['recipes'], built_package=False)
except KeyError as e: except KeyError as e:
logger.warn("KeyError in save_target_package_information" logger.warning("KeyError in save_target_package_information"
"%s ", e) "%s ", e)
try: try:
self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata) self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata)
except KeyError as e: except KeyError as e:
logger.warn("KeyError in save_target_file_information" logger.warning("KeyError in save_target_file_information"
"%s ", e) "%s ", e)
@ -1392,7 +1392,7 @@ class BuildInfoHelper(object):
Task_Dependency.objects.bulk_create(taskdeps_objects) Task_Dependency.objects.bulk_create(taskdeps_objects)
if len(errormsg) > 0: if len(errormsg) > 0:
logger.warn("buildinfohelper: dependency info not identify recipes: \n%s", errormsg) logger.warning("buildinfohelper: dependency info not identify recipes: \n%s", errormsg)
def store_build_package_information(self, event): def store_build_package_information(self, event):

View File

@ -350,7 +350,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
tries -= 1 tries -= 1
if tries: if tries:
continue continue
logger.warn(event.msg) logger.warning(event.msg)
continue continue
if isinstance(event, logging.LogRecord): if isinstance(event, logging.LogRecord):
@ -377,7 +377,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
continue continue
if isinstance(event, bb.build.TaskFailedSilent): if isinstance(event, bb.build.TaskFailedSilent):
logger.warn("Logfile for failed setscene task is %s" % event.logfile) logger.warning("Logfile for failed setscene task is %s" % event.logfile)
continue continue
if isinstance(event, bb.build.TaskFailed): if isinstance(event, bb.build.TaskFailed):
return_value = 1 return_value = 1
@ -509,8 +509,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
continue continue
if isinstance(event, bb.runqueue.sceneQueueTaskFailed): if isinstance(event, bb.runqueue.sceneQueueTaskFailed):
logger.warn("Setscene task %s (%s) failed with exit code '%s' - real task will be run instead", logger.warning("Setscene task %s (%s) failed with exit code '%s' - real task will be run instead",
event.taskid, event.taskstring, event.exitcode) event.taskid, event.taskstring, event.exitcode)
continue continue
if isinstance(event, bb.event.DepTreeGenerated): if isinstance(event, bb.event.DepTreeGenerated):

View File

@ -163,7 +163,7 @@ def main(server, eventHandler, params):
inheritlist, _ = server.runCommand(["getVariable", "INHERIT"]) inheritlist, _ = server.runCommand(["getVariable", "INHERIT"])
if not "buildhistory" in inheritlist.split(" "): if not "buildhistory" in inheritlist.split(" "):
logger.warn("buildhistory is not enabled. Please enable INHERIT += \"buildhistory\" to see image details.") logger.warning("buildhistory is not enabled. Please enable INHERIT += \"buildhistory\" to see image details.")
build_history_enabled = False build_history_enabled = False
if not params.observe_only: if not params.observe_only:
@ -433,7 +433,7 @@ def main(server, eventHandler, params):
buildinfohelper.store_dependency_information(event) buildinfohelper.store_dependency_information(event)
continue continue
logger.warn("Unknown event: %s", event) logger.warning("Unknown event: %s", event)
return_value += 1 return_value += 1
except EnvironmentError as ioerror: except EnvironmentError as ioerror:
@ -461,5 +461,5 @@ def main(server, eventHandler, params):
if interrupted and return_value == 0: if interrupted and return_value == 0:
return_value += 1 return_value += 1
logger.warn("Return value is %d", return_value) logger.warning("Return value is %d", return_value)
return return_value return return_value

View File

@ -822,7 +822,7 @@ def copyfile(src, dest, newmtime = None, sstat = None):
if not sstat: if not sstat:
sstat = os.lstat(src) sstat = os.lstat(src)
except Exception as e: except Exception as e:
logger.warn("copyfile: stat of %s failed (%s)" % (src, e)) logger.warning("copyfile: stat of %s failed (%s)" % (src, e))
return False return False
destexists = 1 destexists = 1
@ -849,7 +849,7 @@ def copyfile(src, dest, newmtime = None, sstat = None):
#os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID]) #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
return os.lstat(dest) return os.lstat(dest)
except Exception as e: except Exception as e:
logger.warn("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e)) logger.warning("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
return False return False
if stat.S_ISREG(sstat[stat.ST_MODE]): if stat.S_ISREG(sstat[stat.ST_MODE]):
@ -864,7 +864,7 @@ def copyfile(src, dest, newmtime = None, sstat = None):
shutil.copyfile(src, dest + "#new") shutil.copyfile(src, dest + "#new")
os.rename(dest + "#new", dest) os.rename(dest + "#new", dest)
except Exception as e: except Exception as e:
logger.warn("copyfile: copy %s to %s failed (%s)" % (src, dest, e)) logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
return False return False
finally: finally:
if srcchown: if srcchown:
@ -875,13 +875,13 @@ def copyfile(src, dest, newmtime = None, sstat = None):
#we don't yet handle special, so we need to fall back to /bin/mv #we don't yet handle special, so we need to fall back to /bin/mv
a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'") a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
if a[0] != 0: if a[0] != 0:
logger.warn("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a)) logger.warning("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
return False # failure return False # failure
try: try:
os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID]) os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
except Exception as e: except Exception as e:
logger.warn("copyfile: failed to chown/chmod %s (%s)" % (dest, e)) logger.warning("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
return False return False
if newmtime: if newmtime:

View File

@ -66,7 +66,7 @@ class LocalhostBEController(BuildEnvironmentController):
err = "command: %s \n%s" % (command, out) err = "command: %s \n%s" % (command, out)
else: else:
err = "command: %s \n%s" % (command, err) err = "command: %s \n%s" % (command, err)
logger.warn("localhostbecontroller: shellcmd error %s" % err) logger.warning("localhostbecontroller: shellcmd error %s" % err)
raise ShellCmdException(err) raise ShellCmdException(err)
else: else:
logger.debug("localhostbecontroller: shellcmd success") logger.debug("localhostbecontroller: shellcmd success")

View File

@ -164,16 +164,16 @@ class Command(NoArgsCommand):
try: try:
self.cleanup() self.cleanup()
except Exception as e: except Exception as e:
logger.warn("runbuilds: cleanup exception %s" % str(e)) logger.warning("runbuilds: cleanup exception %s" % str(e))
try: try:
self.archive() self.archive()
except Exception as e: except Exception as e:
logger.warn("runbuilds: archive exception %s" % str(e)) logger.warning("runbuilds: archive exception %s" % str(e))
try: try:
self.schedule() self.schedule()
except Exception as e: except Exception as e:
logger.warn("runbuilds: schedule exception %s" % str(e)) logger.warning("runbuilds: schedule exception %s" % str(e))
time.sleep(1) time.sleep(1)

View File

@ -98,11 +98,11 @@ class BuildRequest(models.Model):
# Check that the state we're trying to set is not going backwards # Check that the state we're trying to set is not going backwards
# e.g. from REQ_FAILED to REQ_INPROGRESS # e.g. from REQ_FAILED to REQ_INPROGRESS
if self.old_state != self.state and self.old_state > self.state: if self.old_state != self.state and self.old_state > self.state:
logger.warn("Invalid state change requested: " logger.warning("Invalid state change requested: "
"Cannot go from %s to %s - ignoring request" % "Cannot go from %s to %s - ignoring request" %
(BuildRequest.REQUEST_STATE[self.old_state][1], (BuildRequest.REQUEST_STATE[self.old_state][1],
BuildRequest.REQUEST_STATE[self.state][1]) BuildRequest.REQUEST_STATE[self.state][1])
) )
# Set property back to the old value # Set property back to the old value
self.state = self.old_state self.state = self.old_state
return return

View File

@ -133,7 +133,7 @@ def run_shell_cmd(command, cwd=None):
err = "command: %s \n%s" % (command, out) err = "command: %s \n%s" % (command, out)
else: else:
err = "command: %s \n%s" % (command, err) err = "command: %s \n%s" % (command, err)
config.logger.warn("_shellcmd: error \n%s\n%s", out, err) config.logger.warning("_shellcmd: error \n%s\n%s", out, err)
raise ShellCmdException(err) raise ShellCmdException(err)
else: else:
#config.logger.debug("localhostbecontroller: shellcmd success\n%s" % out) #config.logger.debug("localhostbecontroller: shellcmd success\n%s" % out)

View File

@ -26,12 +26,12 @@ def validate_html5(url):
warnings = int(resp['x-w3c-validator-warnings']) warnings = int(resp['x-w3c-validator-warnings'])
if status == 'Invalid': if status == 'Invalid':
config.logger.warn("Failed %s is %s\terrors %s warnings %s (check at %s)", url, status, errors, warnings, urlrequest) config.logger.warning("Failed %s is %s\terrors %s warnings %s (check at %s)", url, status, errors, warnings, urlrequest)
else: else:
config.logger.debug("OK! %s", url) config.logger.debug("OK! %s", url)
except Exception as exc: except Exception as exc:
config.logger.warn("Failed validation call: %s", exc) config.logger.warning("Failed validation call: %s", exc)
return (status, errors, warnings) return (status, errors, warnings)

View File

@ -84,7 +84,7 @@ for t in os.walk(os.path.dirname(currentdir)):
if not conflict: if not conflict:
urlpatterns.insert(0, url(r'^' + modulename + '/', include ( modulename + '.urls'))) urlpatterns.insert(0, url(r'^' + modulename + '/', include ( modulename + '.urls')))
else: else:
logger.warn("Module \'%s\' has a regexp conflict, was not added to the urlpatterns" % modulename) logger.warning("Module \'%s\' has a regexp conflict, was not added to the urlpatterns" % modulename)
from pprint import pformat from pprint import pformat
#logger.debug("urlpatterns list %s", pformat(urlpatterns)) #logger.debug("urlpatterns list %s", pformat(urlpatterns))