mirror of
git://git.yoctoproject.org/poky.git
synced 2025-07-19 21:09:03 +02:00
meta/scripts: Improve internal variable naming
Update internal variable names to improve the terms used. (From OE-Core rev: f408068e5d7998ae165f3002e51bc54b380b8099) Signed-off-by: Saul Wold <saul.wold@windriver.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
parent
8827a3ed80
commit
d9e500f83d
|
@ -329,9 +329,9 @@ python base_eventhandler() {
|
||||||
source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
|
source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
|
||||||
if not source_mirror_fetch:
|
if not source_mirror_fetch:
|
||||||
provs = (d.getVar("PROVIDES") or "").split()
|
provs = (d.getVar("PROVIDES") or "").split()
|
||||||
multiwhitelist = (d.getVar("BB_MULTI_PROVIDER_ALLOWED") or "").split()
|
multiprovidersallowed = (d.getVar("BB_MULTI_PROVIDER_ALLOWED") or "").split()
|
||||||
for p in provs:
|
for p in provs:
|
||||||
if p.startswith("virtual/") and p not in multiwhitelist:
|
if p.startswith("virtual/") and p not in multiprovidersallowed:
|
||||||
profprov = d.getVar("PREFERRED_PROVIDER_" + p)
|
profprov = d.getVar("PREFERRED_PROVIDER_" + p)
|
||||||
if profprov and pn != profprov:
|
if profprov and pn != profprov:
|
||||||
raise bb.parse.SkipRecipe("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn))
|
raise bb.parse.SkipRecipe("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn))
|
||||||
|
|
|
@ -36,7 +36,7 @@ python () {
|
||||||
return
|
return
|
||||||
|
|
||||||
tos = d.getVar("TARGET_OS")
|
tos = d.getVar("TARGET_OS")
|
||||||
whitelist = ["mingw32"]
|
tos_known = ["mingw32"]
|
||||||
extralibcs = [""]
|
extralibcs = [""]
|
||||||
if "musl" in d.getVar("BASECANADIANEXTRAOS"):
|
if "musl" in d.getVar("BASECANADIANEXTRAOS"):
|
||||||
extralibcs.append("musl")
|
extralibcs.append("musl")
|
||||||
|
@ -51,8 +51,8 @@ python () {
|
||||||
entry = entry + "-gnu" + variant
|
entry = entry + "-gnu" + variant
|
||||||
elif libc:
|
elif libc:
|
||||||
entry = entry + "-" + libc
|
entry = entry + "-" + libc
|
||||||
whitelist.append(entry)
|
tos_known.append(entry)
|
||||||
if tos not in whitelist:
|
if tos not in tos_known:
|
||||||
bb.fatal("Building cross-candian for an unknown TARGET_SYS (%s), please update cross-canadian.bbclass" % d.getVar("TARGET_SYS"))
|
bb.fatal("Building cross-candian for an unknown TARGET_SYS (%s), please update cross-canadian.bbclass" % d.getVar("TARGET_SYS"))
|
||||||
|
|
||||||
for n in ["PROVIDES", "DEPENDS"]:
|
for n in ["PROVIDES", "DEPENDS"]:
|
||||||
|
|
|
@ -43,11 +43,12 @@ CVE_CHECK_CREATE_MANIFEST ??= "1"
|
||||||
|
|
||||||
CVE_CHECK_REPORT_PATCHED ??= "1"
|
CVE_CHECK_REPORT_PATCHED ??= "1"
|
||||||
|
|
||||||
# Whitelist for packages (PN)
|
# Skip CVE Check for packages (PN)
|
||||||
CVE_CHECK_SKIP_RECIPE ?= ""
|
CVE_CHECK_SKIP_RECIPE ?= ""
|
||||||
|
|
||||||
# Whitelist for CVE. If a CVE is found, then it is considered patched.
|
# Ingore the check for a given list of CVEs. If a CVE is found,
|
||||||
# The value is a string containing space separated CVE values:
|
# then it is considered patched. The value is a string containing
|
||||||
|
# space separated CVE values:
|
||||||
#
|
#
|
||||||
# CVE_CHECK_IGNORE = 'CVE-2014-2524 CVE-2018-1234'
|
# CVE_CHECK_IGNORE = 'CVE-2014-2524 CVE-2018-1234'
|
||||||
#
|
#
|
||||||
|
@ -101,10 +102,10 @@ python do_cve_check () {
|
||||||
patched_cves = get_patched_cves(d)
|
patched_cves = get_patched_cves(d)
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
bb.fatal("Failure in searching patches")
|
bb.fatal("Failure in searching patches")
|
||||||
whitelisted, patched, unpatched = check_cves(d, patched_cves)
|
ignored, patched, unpatched = check_cves(d, patched_cves)
|
||||||
if patched or unpatched:
|
if patched or unpatched:
|
||||||
cve_data = get_cve_info(d, patched + unpatched)
|
cve_data = get_cve_info(d, patched + unpatched)
|
||||||
cve_write_data(d, patched, unpatched, whitelisted, cve_data)
|
cve_write_data(d, patched, unpatched, ignored, cve_data)
|
||||||
else:
|
else:
|
||||||
bb.note("No CVE database found, skipping CVE check")
|
bb.note("No CVE database found, skipping CVE check")
|
||||||
|
|
||||||
|
@ -176,12 +177,12 @@ def check_cves(d, patched_cves):
|
||||||
return ([], [], [])
|
return ([], [], [])
|
||||||
pv = d.getVar("CVE_VERSION").split("+git")[0]
|
pv = d.getVar("CVE_VERSION").split("+git")[0]
|
||||||
|
|
||||||
# If the recipe has been whitelisted we return empty lists
|
# If the recipe has been skipped/ignored we return empty lists
|
||||||
if pn in d.getVar("CVE_CHECK_SKIP_RECIPE").split():
|
if pn in d.getVar("CVE_CHECK_SKIP_RECIPE").split():
|
||||||
bb.note("Recipe has been whitelisted, skipping check")
|
bb.note("Recipe has been skipped by cve-check")
|
||||||
return ([], [], [])
|
return ([], [], [])
|
||||||
|
|
||||||
cve_whitelist = d.getVar("CVE_CHECK_IGNORE").split()
|
cve_ignore = d.getVar("CVE_CHECK_IGNORE").split()
|
||||||
|
|
||||||
import sqlite3
|
import sqlite3
|
||||||
db_file = d.expand("file:${CVE_CHECK_DB_FILE}?mode=ro")
|
db_file = d.expand("file:${CVE_CHECK_DB_FILE}?mode=ro")
|
||||||
|
@ -198,9 +199,9 @@ def check_cves(d, patched_cves):
|
||||||
for cverow in conn.execute("SELECT DISTINCT ID FROM PRODUCTS WHERE PRODUCT IS ? AND VENDOR LIKE ?", (product, vendor)):
|
for cverow in conn.execute("SELECT DISTINCT ID FROM PRODUCTS WHERE PRODUCT IS ? AND VENDOR LIKE ?", (product, vendor)):
|
||||||
cve = cverow[0]
|
cve = cverow[0]
|
||||||
|
|
||||||
if cve in cve_whitelist:
|
if cve in cve_ignore:
|
||||||
bb.note("%s-%s has been whitelisted for %s" % (product, pv, cve))
|
bb.note("%s-%s has been ignored for %s" % (product, pv, cve))
|
||||||
# TODO: this should be in the report as 'whitelisted'
|
# TODO: this should be in the report as 'ignored'
|
||||||
patched_cves.add(cve)
|
patched_cves.add(cve)
|
||||||
continue
|
continue
|
||||||
elif cve in patched_cves:
|
elif cve in patched_cves:
|
||||||
|
@ -254,7 +255,7 @@ def check_cves(d, patched_cves):
|
||||||
|
|
||||||
conn.close()
|
conn.close()
|
||||||
|
|
||||||
return (list(cve_whitelist), list(patched_cves), cves_unpatched)
|
return (list(cve_ignore), list(patched_cves), cves_unpatched)
|
||||||
|
|
||||||
def get_cve_info(d, cves):
|
def get_cve_info(d, cves):
|
||||||
"""
|
"""
|
||||||
|
@ -279,7 +280,7 @@ def get_cve_info(d, cves):
|
||||||
conn.close()
|
conn.close()
|
||||||
return cve_data
|
return cve_data
|
||||||
|
|
||||||
def cve_write_data(d, patched, unpatched, whitelisted, cve_data):
|
def cve_write_data(d, patched, unpatched, ignored, cve_data):
|
||||||
"""
|
"""
|
||||||
Write CVE information in WORKDIR; and to CVE_CHECK_DIR, and
|
Write CVE information in WORKDIR; and to CVE_CHECK_DIR, and
|
||||||
CVE manifest if enabled.
|
CVE manifest if enabled.
|
||||||
|
@ -312,8 +313,8 @@ def cve_write_data(d, patched, unpatched, whitelisted, cve_data):
|
||||||
write_string += "PACKAGE NAME: %s\n" % d.getVar("PN")
|
write_string += "PACKAGE NAME: %s\n" % d.getVar("PN")
|
||||||
write_string += "PACKAGE VERSION: %s%s\n" % (d.getVar("EXTENDPE"), d.getVar("PV"))
|
write_string += "PACKAGE VERSION: %s%s\n" % (d.getVar("EXTENDPE"), d.getVar("PV"))
|
||||||
write_string += "CVE: %s\n" % cve
|
write_string += "CVE: %s\n" % cve
|
||||||
if cve in whitelisted:
|
if cve in ignored:
|
||||||
write_string += "CVE STATUS: Whitelisted\n"
|
write_string += "CVE STATUS: Ignored\n"
|
||||||
elif is_patched:
|
elif is_patched:
|
||||||
write_string += "CVE STATUS: Patched\n"
|
write_string += "CVE STATUS: Patched\n"
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -441,7 +441,8 @@ def package_qa_hash_style(path, name, d, elf, messages):
|
||||||
QAPATHTEST[buildpaths] = "package_qa_check_buildpaths"
|
QAPATHTEST[buildpaths] = "package_qa_check_buildpaths"
|
||||||
def package_qa_check_buildpaths(path, name, d, elf, messages):
|
def package_qa_check_buildpaths(path, name, d, elf, messages):
|
||||||
"""
|
"""
|
||||||
Check for build paths inside target files and error if not found in the whitelist
|
Check for build paths inside target files and error if paths are not
|
||||||
|
explicitly ignored.
|
||||||
"""
|
"""
|
||||||
# Ignore .debug files, not interesting
|
# Ignore .debug files, not interesting
|
||||||
if path.find(".debug") != -1:
|
if path.find(".debug") != -1:
|
||||||
|
@ -1283,8 +1284,8 @@ Rerun configure task after fixing this."""
|
||||||
options = set()
|
options = set()
|
||||||
for line in output.splitlines():
|
for line in output.splitlines():
|
||||||
options |= set(line.partition(flag)[2].split())
|
options |= set(line.partition(flag)[2].split())
|
||||||
whitelist = set(d.getVar("UNKNOWN_CONFIGURE_OPT_IGNORE").split())
|
ignore_opts = set(d.getVar("UNKNOWN_CONFIGURE_OPT_IGNORE").split())
|
||||||
options -= whitelist
|
options -= ignore_opts
|
||||||
if options:
|
if options:
|
||||||
pn = d.getVar('PN')
|
pn = d.getVar('PN')
|
||||||
error_msg = pn + ": configure was passed unrecognised options: " + " ".join(options)
|
error_msg = pn + ": configure was passed unrecognised options: " + " ".join(options)
|
||||||
|
|
|
@ -282,8 +282,8 @@ python copy_buildsystem () {
|
||||||
bb.utils.mkdirhier(uninative_outdir)
|
bb.utils.mkdirhier(uninative_outdir)
|
||||||
shutil.copy(uninative_file, uninative_outdir)
|
shutil.copy(uninative_file, uninative_outdir)
|
||||||
|
|
||||||
env_whitelist = (d.getVar('BB_ENV_PASSTHROUGH_ADDITIONS') or '').split()
|
env_passthrough = (d.getVar('BB_ENV_PASSTHROUGH_ADDITIONS') or '').split()
|
||||||
env_whitelist_values = {}
|
env_passthrough_values = {}
|
||||||
|
|
||||||
# Create local.conf
|
# Create local.conf
|
||||||
builddir = d.getVar('TOPDIR')
|
builddir = d.getVar('TOPDIR')
|
||||||
|
@ -294,15 +294,15 @@ python copy_buildsystem () {
|
||||||
if derivative:
|
if derivative:
|
||||||
shutil.copyfile(builddir + '/conf/local.conf', baseoutpath + '/conf/local.conf')
|
shutil.copyfile(builddir + '/conf/local.conf', baseoutpath + '/conf/local.conf')
|
||||||
else:
|
else:
|
||||||
local_conf_whitelist = (d.getVar('ESDK_LOCALCONF_ALLOW') or '').split()
|
local_conf_allowed = (d.getVar('ESDK_LOCALCONF_ALLOW') or '').split()
|
||||||
local_conf_blacklist = (d.getVar('ESDK_LOCALCONF_REMOVE') or '').split()
|
local_conf_remove = (d.getVar('ESDK_LOCALCONF_REMOVE') or '').split()
|
||||||
def handle_var(varname, origvalue, op, newlines):
|
def handle_var(varname, origvalue, op, newlines):
|
||||||
if varname in local_conf_blacklist or (origvalue.strip().startswith('/') and not varname in local_conf_whitelist):
|
if varname in local_conf_remove or (origvalue.strip().startswith('/') and not varname in local_conf_allowed):
|
||||||
newlines.append('# Removed original setting of %s\n' % varname)
|
newlines.append('# Removed original setting of %s\n' % varname)
|
||||||
return None, op, 0, True
|
return None, op, 0, True
|
||||||
else:
|
else:
|
||||||
if varname in env_whitelist:
|
if varname in env_passthrough:
|
||||||
env_whitelist_values[varname] = origvalue
|
env_passthrough_values[varname] = origvalue
|
||||||
return origvalue, op, 0, True
|
return origvalue, op, 0, True
|
||||||
varlist = ['[^#=+ ]*']
|
varlist = ['[^#=+ ]*']
|
||||||
oldlines = []
|
oldlines = []
|
||||||
|
@ -356,7 +356,7 @@ python copy_buildsystem () {
|
||||||
# We want to be able to set this without a full reparse
|
# We want to be able to set this without a full reparse
|
||||||
f.write('BB_HASHCONFIG_IGNORE_VARS:append = " SIGGEN_UNLOCKED_RECIPES"\n\n')
|
f.write('BB_HASHCONFIG_IGNORE_VARS:append = " SIGGEN_UNLOCKED_RECIPES"\n\n')
|
||||||
|
|
||||||
# Set up whitelist for run on install
|
# Set up which tasks are ignored for run on install
|
||||||
f.write('BB_SETSCENE_ENFORCE_IGNORE_TASKS = "%:* *:do_shared_workdir *:do_rm_work wic-tools:* *:do_addto_recipe_sysroot"\n\n')
|
f.write('BB_SETSCENE_ENFORCE_IGNORE_TASKS = "%:* *:do_shared_workdir *:do_rm_work wic-tools:* *:do_addto_recipe_sysroot"\n\n')
|
||||||
|
|
||||||
# Hide the config information from bitbake output (since it's fixed within the SDK)
|
# Hide the config information from bitbake output (since it's fixed within the SDK)
|
||||||
|
@ -438,7 +438,7 @@ python copy_buildsystem () {
|
||||||
# Ensure any variables set from the external environment (by way of
|
# Ensure any variables set from the external environment (by way of
|
||||||
# BB_ENV_PASSTHROUGH_ADDITIONS) are set in the SDK's configuration
|
# BB_ENV_PASSTHROUGH_ADDITIONS) are set in the SDK's configuration
|
||||||
extralines = []
|
extralines = []
|
||||||
for name, value in env_whitelist_values.items():
|
for name, value in env_passthrough_values.items():
|
||||||
actualvalue = d.getVar(name) or ''
|
actualvalue = d.getVar(name) or ''
|
||||||
if value != actualvalue:
|
if value != actualvalue:
|
||||||
extralines.append('%s = "%s"\n' % (name, actualvalue))
|
extralines.append('%s = "%s"\n' % (name, actualvalue))
|
||||||
|
|
|
@ -259,13 +259,13 @@ def sstate_install(ss, d):
|
||||||
shareddirs.append(dstdir)
|
shareddirs.append(dstdir)
|
||||||
|
|
||||||
# Check the file list for conflicts against files which already exist
|
# Check the file list for conflicts against files which already exist
|
||||||
whitelist = (d.getVar("SSTATE_ALLOW_OVERLAP_FILES") or "").split()
|
overlap_allowed = (d.getVar("SSTATE_ALLOW_OVERLAP_FILES") or "").split()
|
||||||
match = []
|
match = []
|
||||||
for f in sharedfiles:
|
for f in sharedfiles:
|
||||||
if os.path.exists(f) and not os.path.islink(f):
|
if os.path.exists(f) and not os.path.islink(f):
|
||||||
f = os.path.normpath(f)
|
f = os.path.normpath(f)
|
||||||
realmatch = True
|
realmatch = True
|
||||||
for w in whitelist:
|
for w in overlap_allowed:
|
||||||
w = os.path.normpath(w)
|
w = os.path.normpath(w)
|
||||||
if f.startswith(w):
|
if f.startswith(w):
|
||||||
realmatch = False
|
realmatch = False
|
||||||
|
|
|
@ -26,7 +26,7 @@ CVE_CHECK_IGNORE += "CVE-2000-0006"
|
||||||
# There has been much discussion amongst the epiphany and webkit developers and
|
# There has been much discussion amongst the epiphany and webkit developers and
|
||||||
# whilst there are improvements about how domains are handled and displayed to the user
|
# whilst there are improvements about how domains are handled and displayed to the user
|
||||||
# there is unlikely ever to be a single fix to webkit or epiphany which addresses this
|
# there is unlikely ever to be a single fix to webkit or epiphany which addresses this
|
||||||
# problem. Whitelisted as there isn't any mitigation or fix or way to progress this further
|
# problem. Ignore this CVE as there isn't any mitigation or fix or way to progress this further
|
||||||
# we can seem to take.
|
# we can seem to take.
|
||||||
CVE_CHECK_IGNORE += "CVE-2005-0238"
|
CVE_CHECK_IGNORE += "CVE-2005-0238"
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# Setup extra CFLAGS and LDFLAGS which have 'security' benefits. These
|
# Setup extra CFLAGS and LDFLAGS which have 'security' benefits. These
|
||||||
# don't work universally, there are recipes which can't use one, the other
|
# don't work universally, there are recipes which can't use one, the other
|
||||||
# or both so a blacklist is maintained here. The idea would be over
|
# or both so an override is maintained here. The idea would be over
|
||||||
# time to reduce this list to nothing.
|
# time to reduce this list to nothing.
|
||||||
# From a Yocto Project perspective, this file is included and tested
|
# From a Yocto Project perspective, this file is included and tested
|
||||||
# in the DISTRO="poky" configuration.
|
# in the DISTRO="poky" configuration.
|
||||||
|
|
|
@ -221,12 +221,12 @@ def packages_filter_out_system(d):
|
||||||
PN-dbg PN-doc PN-locale-eb-gb removed.
|
PN-dbg PN-doc PN-locale-eb-gb removed.
|
||||||
"""
|
"""
|
||||||
pn = d.getVar('PN')
|
pn = d.getVar('PN')
|
||||||
blacklist = [pn + suffix for suffix in ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev', '-src')]
|
pkgfilter = [pn + suffix for suffix in ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev', '-src')]
|
||||||
localepkg = pn + "-locale-"
|
localepkg = pn + "-locale-"
|
||||||
pkgs = []
|
pkgs = []
|
||||||
|
|
||||||
for pkg in d.getVar('PACKAGES').split():
|
for pkg in d.getVar('PACKAGES').split():
|
||||||
if pkg not in blacklist and localepkg not in pkg:
|
if pkg not in pkgfilter and localepkg not in pkg:
|
||||||
pkgs.append(pkg)
|
pkgs.append(pkg)
|
||||||
return pkgs
|
return pkgs
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,7 @@
|
||||||
"expected_results": ""
|
"expected_results": ""
|
||||||
},
|
},
|
||||||
"5": {
|
"5": {
|
||||||
"action": "Remove USB, and reboot into new installed system. \nNote: If installation was successfully completed and received this message \"\"(sdx): Volume was not properly unmounted...Please run fsck.\"\" ignore it because this was whitelisted according to bug 9652.",
|
"action": "Remove USB, and reboot into new installed system. \nNote: If installation was successfully completed and received this message \"\"(sdx): Volume was not properly unmounted...Please run fsck.\"\" ignore it because this was allowed according to bug 9652.",
|
||||||
"expected_results": ""
|
"expected_results": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
|
@ -13,7 +13,7 @@ from oeqa.utils.commands import bitbake, get_bb_vars, runCmd
|
||||||
# The only package added to the image is container_image_testpkg, which
|
# The only package added to the image is container_image_testpkg, which
|
||||||
# contains one file. However, due to some other things not cleaning up during
|
# contains one file. However, due to some other things not cleaning up during
|
||||||
# rootfs creation, there is some cruft. Ideally bugs will be filed and the
|
# rootfs creation, there is some cruft. Ideally bugs will be filed and the
|
||||||
# cruft removed, but for now we whitelist some known set.
|
# cruft removed, but for now we ignore some known set.
|
||||||
#
|
#
|
||||||
# Also for performance reasons we're only checking the cruft when using ipk.
|
# Also for performance reasons we're only checking the cruft when using ipk.
|
||||||
# When using deb, and rpm it is a bit different and we could test all
|
# When using deb, and rpm it is a bit different and we could test all
|
||||||
|
|
|
@ -153,7 +153,7 @@ class BSPCheckLayer(OECheckLayerTestCase):
|
||||||
# do_build can be ignored: it is know to have
|
# do_build can be ignored: it is know to have
|
||||||
# different signatures in some cases, for example in
|
# different signatures in some cases, for example in
|
||||||
# the allarch ca-certificates due to RDEPENDS=openssl.
|
# the allarch ca-certificates due to RDEPENDS=openssl.
|
||||||
# That particular dependency is whitelisted via
|
# That particular dependency is marked via
|
||||||
# SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS, but still shows up
|
# SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS, but still shows up
|
||||||
# in the sstate signature hash because filtering it
|
# in the sstate signature hash because filtering it
|
||||||
# out would be hard and running do_build multiple
|
# out would be hard and running do_build multiple
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
|
|
||||||
import sys, os, subprocess, re, shutil
|
import sys, os, subprocess, re, shutil
|
||||||
|
|
||||||
whitelist = (
|
allowed = (
|
||||||
# type is supported by dash
|
# type is supported by dash
|
||||||
'if type systemctl >/dev/null 2>/dev/null; then',
|
'if type systemctl >/dev/null 2>/dev/null; then',
|
||||||
'if type systemd-tmpfiles >/dev/null 2>/dev/null; then',
|
'if type systemd-tmpfiles >/dev/null 2>/dev/null; then',
|
||||||
|
@ -19,8 +19,8 @@ whitelist = (
|
||||||
'. $target_sdk_dir/${oe_init_build_env_path} $target_sdk_dir >> $LOGFILE'
|
'. $target_sdk_dir/${oe_init_build_env_path} $target_sdk_dir >> $LOGFILE'
|
||||||
)
|
)
|
||||||
|
|
||||||
def is_whitelisted(s):
|
def is_allowed(s):
|
||||||
for w in whitelist:
|
for w in allowed:
|
||||||
if w in s:
|
if w in s:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
@ -49,7 +49,7 @@ def process(filename, function, lineno, script):
|
||||||
output = e.output.replace(fn.name, function)
|
output = e.output.replace(fn.name, function)
|
||||||
if not output or not output.startswith('possible bashism'):
|
if not output or not output.startswith('possible bashism'):
|
||||||
# Probably starts with or contains only warnings. Dump verbatim
|
# Probably starts with or contains only warnings. Dump verbatim
|
||||||
# with one space indention. Can't do the splitting and whitelist
|
# with one space indention. Can't do the splitting and allowed
|
||||||
# checking below.
|
# checking below.
|
||||||
return '\n'.join([filename,
|
return '\n'.join([filename,
|
||||||
' Unexpected output from checkbashisms.pl'] +
|
' Unexpected output from checkbashisms.pl'] +
|
||||||
|
@ -65,7 +65,7 @@ def process(filename, function, lineno, script):
|
||||||
# ...
|
# ...
|
||||||
# ...
|
# ...
|
||||||
result = []
|
result = []
|
||||||
# Check the results against the whitelist
|
# Check the results against the allowed list
|
||||||
for message, source in zip(output[0::2], output[1::2]):
|
for message, source in zip(output[0::2], output[1::2]):
|
||||||
if not is_whitelisted(source):
|
if not is_whitelisted(source):
|
||||||
if lineno is not None:
|
if lineno is not None:
|
||||||
|
|
Loading…
Reference in New Issue
Block a user