mirror of
git://git.yoctoproject.org/meta-freescale.git
synced 2025-10-22 23:02:20 +02:00
remove True option to getVar calls
getVar() has been defaulting to expanding by default for a long time (2016), thus remove the True option from getVar() calls with a regex search and replace. Search & replace made using the following command: sed -e 's|\(d\.getVar \?\)( \?\([^,()]*\), \?True)|\1(\2)|g' \ -i $(git grep -E 'getVar ?\( ?([^,()]*), ?True\)' \ | cut -d':' -f1 \ | sort -u) Signed-off-by: André Draszik <andre.draszik@jci.com>
This commit is contained in:
parent
9704df97f0
commit
74df05a7f0
|
@ -26,22 +26,22 @@
|
|||
# Copyright 2013-2016 (C) O.S. Systems Software LTDA.
|
||||
|
||||
python __anonymous () {
|
||||
machine_arch_filter = set((d.getVar("MACHINE_ARCH_FILTER", True) or "").split())
|
||||
machine_socarch_filter = set((d.getVar("MACHINE_SOCARCH_FILTER", True) or "").split())
|
||||
machine_arch_filter = set((d.getVar("MACHINE_ARCH_FILTER") or "").split())
|
||||
machine_socarch_filter = set((d.getVar("MACHINE_SOCARCH_FILTER") or "").split())
|
||||
if machine_socarch_filter or machine_arch_filter:
|
||||
provides = set((d.getVar("PROVIDES", True) or "").split())
|
||||
depends = set((d.getVar("DEPENDS", True) or "").split())
|
||||
PN = d.getVar("PN", True)
|
||||
provides = set((d.getVar("PROVIDES") or "").split())
|
||||
depends = set((d.getVar("DEPENDS") or "").split())
|
||||
PN = d.getVar("PN")
|
||||
|
||||
package_arch = None
|
||||
if list(machine_arch_filter & (provides | depends)):
|
||||
package_arch = d.getVar("MACHINE_ARCH", True)
|
||||
package_arch = d.getVar("MACHINE_ARCH")
|
||||
elif list(machine_socarch_filter & (provides | depends)):
|
||||
package_arch = d.getVar("MACHINE_SOCARCH", True)
|
||||
package_arch = d.getVar("MACHINE_SOCARCH")
|
||||
if not package_arch:
|
||||
raise bb.parse.SkipPackage("You must set MACHINE_SOCARCH as MACHINE_SOCARCH_FILTER is set for this SoC.")
|
||||
|
||||
machine_socarch_suffix = d.getVar("MACHINE_SOCARCH_SUFFIX", True)
|
||||
machine_socarch_suffix = d.getVar("MACHINE_SOCARCH_SUFFIX")
|
||||
if not machine_socarch_suffix:
|
||||
raise bb.parse.SkipPackage("You must set MACHINE_SOCARCH_SUFFIX as MACHINE_SOCARCH_FILTER is set for this SoC.")
|
||||
|
||||
|
@ -49,18 +49,18 @@ python __anonymous () {
|
|||
bb.debug(1, "Use '%s' as package architecture for '%s'" % (package_arch, PN))
|
||||
d.setVar("PACKAGE_ARCH", package_arch)
|
||||
|
||||
cur_package_archs = (d.getVar("PACKAGE_ARCHS", True) or "").split()
|
||||
machine_socarch = (d.getVar("MACHINE_SOCARCH", True) or "")
|
||||
cur_package_archs = (d.getVar("PACKAGE_ARCHS") or "").split()
|
||||
machine_socarch = (d.getVar("MACHINE_SOCARCH") or "")
|
||||
if not machine_socarch in cur_package_archs:
|
||||
d.appendVar("PACKAGE_EXTRA_ARCHS", " %s" % machine_socarch)
|
||||
|
||||
if d.getVar("TUNE_ARCH", True) == "arm":
|
||||
if d.getVar("TUNE_ARCH") == "arm":
|
||||
# For ARM we have two possible machine_socarch values, one for the arm and one for the thumb instruction set
|
||||
# add the other value to extra archs also, so that a image recipe searches both for packages.
|
||||
if d.getVar("ARM_INSTRUCTION_SET", True) == "thumb":
|
||||
d.appendVar("PACKAGE_EXTRA_ARCHS", " %s" % d.getVar("ARM_EXTRA_SOCARCH", True))
|
||||
if d.getVar("ARM_INSTRUCTION_SET") == "thumb":
|
||||
d.appendVar("PACKAGE_EXTRA_ARCHS", " %s" % d.getVar("ARM_EXTRA_SOCARCH"))
|
||||
else:
|
||||
d.appendVar("PACKAGE_EXTRA_ARCHS", " %s" % d.getVar("THUMB_EXTRA_SOCARCH", True))
|
||||
d.appendVar("PACKAGE_EXTRA_ARCHS", " %s" % d.getVar("THUMB_EXTRA_SOCARCH"))
|
||||
}
|
||||
|
||||
MACHINE_SOCARCH = "${TUNE_PKGARCH}${MACHINE_SOCARCH_SUFFIX}"
|
||||
|
|
|
@ -11,7 +11,7 @@ LIC_FILES_CHKSUM_append = " file://${FSL_EULA_FILE};md5=ab61cab9599935bfe9f70040
|
|||
LIC_FILES_CHKSUM[vardepsexclude] += "FSL_EULA_FILE"
|
||||
|
||||
python fsl_bin_do_unpack() {
|
||||
src_uri = (d.getVar('SRC_URI', True) or "").split()
|
||||
src_uri = (d.getVar('SRC_URI') or "").split()
|
||||
if len(src_uri) == 0:
|
||||
return
|
||||
|
||||
|
@ -44,9 +44,9 @@ python fsl_bin_do_unpack() {
|
|||
}
|
||||
|
||||
python do_unpack() {
|
||||
eula = d.getVar('ACCEPT_FSL_EULA', True)
|
||||
eula_file = d.getVar('FSL_EULA_FILE', True)
|
||||
pkg = d.getVar('PN', True)
|
||||
eula = d.getVar('ACCEPT_FSL_EULA')
|
||||
eula_file = d.getVar('FSL_EULA_FILE')
|
||||
pkg = d.getVar('PN')
|
||||
if eula == None:
|
||||
bb.fatal("To use '%s' you need to accept the Freescale EULA at '%s'. "
|
||||
"Please read it and in case you accept it, write: "
|
||||
|
|
|
@ -15,18 +15,18 @@
|
|||
# Copyright 2016-2018 (C) O.S. Systems Software LTDA.
|
||||
|
||||
def machine_overrides_extender(d):
|
||||
machine_overrides = (d.getVar('PRISTINE_MACHINEOVERRIDES', True) or '').split(':')
|
||||
machine_overrides = (d.getVar('PRISTINE_MACHINEOVERRIDES') or '').split(':')
|
||||
|
||||
# Gather the list of overrides to filter out
|
||||
machine_overrides_filter_out = []
|
||||
for override in machine_overrides:
|
||||
machine_overrides_filter_out += (d.getVar('MACHINEOVERRIDES_EXTENDER_FILTER_OUT_%s' % override, True) or '').split()
|
||||
machine_overrides_filter_out += (d.getVar('MACHINEOVERRIDES_EXTENDER_FILTER_OUT_%s' % override) or '').split()
|
||||
|
||||
# Drop any overrides of filter_out prior extending
|
||||
machine_overrides = [o for o in machine_overrides if o not in machine_overrides_filter_out]
|
||||
|
||||
for override in machine_overrides:
|
||||
extender = d.getVar('MACHINEOVERRIDES_EXTENDER_%s' % override, True)
|
||||
extender = d.getVar('MACHINEOVERRIDES_EXTENDER_%s' % override)
|
||||
|
||||
if extender:
|
||||
extender = extender.split(':')
|
||||
|
|
|
@ -12,7 +12,7 @@ python () {
|
|||
d.setVar('KERNEL_LD', d.getVar('CCACHE', False) + sys_multilib + '-' + 'ld.bfd' + d.getVar('HOST_LD_KERNEL_ARCH', False) + tc_options)
|
||||
d.setVar('KERNEL_AR', d.getVar('CCACHE', False) + sys_multilib + '-' + 'ar' + d.getVar('HOST_AR_KERNEL_ARCH', False))
|
||||
|
||||
error_qa = d.getVar('ERROR_QA', True)
|
||||
error_qa = d.getVar('ERROR_QA')
|
||||
if 'arch' in error_qa:
|
||||
d.setVar('ERROR_QA', error_qa.replace(' arch', ''))
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ FILESEXTRAPATHS_prepend := "${THISDIR}/qt4:"
|
|||
|
||||
python __anonymous () {
|
||||
families = ['mx6']
|
||||
cur_families = (d.getVar('MACHINEOVERRIDES', True) or '').split(':')
|
||||
cur_families = (d.getVar('MACHINEOVERRIDES') or '').split(':')
|
||||
if any(map(lambda x: x in cur_families,
|
||||
families)):
|
||||
d.appendVarFlag('do_configure', 'depends', ' virtual/kernel:do_shared_workdir')
|
||||
|
|
|
@ -11,9 +11,9 @@ SRCREV= "57401f6dff6507055558eaa6838116baa8a2fd46"
|
|||
S = "${WORKDIR}/git"
|
||||
|
||||
python () {
|
||||
if not d.getVar("QE_UCODE", True):
|
||||
PN = d.getVar("PN", True)
|
||||
FILE = os.path.basename(d.getVar("FILE", True))
|
||||
if not d.getVar("QE_UCODE"):
|
||||
PN = d.getVar("PN")
|
||||
FILE = os.path.basename(d.getVar("FILE"))
|
||||
bb.debug(1, "To build %s, see %s for instructions on \
|
||||
setting up your qe-ucode" % (PN, FILE))
|
||||
raise bb.parse.SkipRecipe("because QE_UCODE is not set")
|
||||
|
|
|
@ -14,7 +14,7 @@ S = "${WORKDIR}/git"
|
|||
|
||||
export PYTHON = "${USRBINPATH}/python2"
|
||||
|
||||
M="${@d.getVar('MACHINE', True).replace('-64b','').replace('-32b','').replace('-${SITEINFO_ENDIANNESS}','')}"
|
||||
M="${@d.getVar('MACHINE').replace('-64b','').replace('-32b','').replace('-${SITEINFO_ENDIANNESS}','')}"
|
||||
|
||||
do_install () {
|
||||
if [ ${M} = ls2088ardb ]; then
|
||||
|
|
|
@ -11,11 +11,11 @@ DEPENDS_append_qoriq-arm = " change-file-endianess-native dtc-native tcl-native"
|
|||
DEPENDS_append_qoriq-ppc = " boot-format-native"
|
||||
|
||||
python () {
|
||||
if d.getVar("TCMODE", True) == "external-fsl":
|
||||
if d.getVar("TCMODE") == "external-fsl":
|
||||
return
|
||||
|
||||
ml = d.getVar("MULTILIB_VARIANTS", True)
|
||||
arch = d.getVar("OVERRIDES", True)
|
||||
ml = d.getVar("MULTILIB_VARIANTS")
|
||||
arch = d.getVar("OVERRIDES")
|
||||
|
||||
if "e5500-64b:" in arch or "e6500-64b:" in arch:
|
||||
if not "lib32" in ml:
|
||||
|
|
|
@ -26,7 +26,7 @@ SRCREV = "c2300d1fdb30a439f555b07f228d3bce498238d5"
|
|||
PROVIDES = "openssl"
|
||||
|
||||
python() {
|
||||
pkgs = d.getVar('PACKAGES', True).split()
|
||||
pkgs = d.getVar('PACKAGES').split()
|
||||
for p in pkgs:
|
||||
if 'openssl-qoriq' in p:
|
||||
d.appendVar("RPROVIDES_%s" % p, p.replace('openssl-qoriq', 'openssl'))
|
||||
|
|
|
@ -17,7 +17,7 @@ PROVIDES = "ptpd"
|
|||
inherit autotools pkgconfig systemd
|
||||
|
||||
python() {
|
||||
pkgs = d.getVar('PACKAGES', True).split()
|
||||
pkgs = d.getVar('PACKAGES').split()
|
||||
for p in pkgs:
|
||||
if 'ptpd-qoriq' in p:
|
||||
d.appendVar("RPROVIDES_%s" % p, p.replace('ptpd-qoriq', 'ptpd'))
|
||||
|
|
|
@ -25,7 +25,7 @@ COMPATIBLE_HOST_mipsarchn64 = "null"
|
|||
PROVIDES = "qemu"
|
||||
|
||||
python() {
|
||||
pkgs = d.getVar('PACKAGES', True).split()
|
||||
pkgs = d.getVar('PACKAGES').split()
|
||||
for p in pkgs:
|
||||
if 'qemu-qoriq' in p:
|
||||
d.appendVar("RPROVIDES_%s" % p, p.replace('qemu-qoriq', 'qemu'))
|
||||
|
|
|
@ -82,7 +82,7 @@ PACKAGES =+ "libclc-imx libclc-imx-dev \
|
|||
libopenvx-imx libopenvx-imx-dev \
|
||||
"
|
||||
python __anonymous () {
|
||||
has_vivante_kernel_driver_support = (d.getVar('MACHINE_HAS_VIVANTE_KERNEL_DRIVER_SUPPORT', True) or '0')
|
||||
has_vivante_kernel_driver_support = (d.getVar('MACHINE_HAS_VIVANTE_KERNEL_DRIVER_SUPPORT') or '0')
|
||||
if has_vivante_kernel_driver_support != '1':
|
||||
raise bb.parse.SkipPackage('The kernel of machine needs to have Vivante kernel driver support for this recipe to be used.')
|
||||
}
|
||||
|
@ -102,7 +102,7 @@ SOLIBS = "${SOLIBSDEV}"
|
|||
python __anonymous() {
|
||||
# FIXME: All binaries lack GNU_HASH in elf binary but as we don't have
|
||||
# the source we cannot fix it. Disable the insane check for now.
|
||||
packages = d.getVar('PACKAGES', True).split()
|
||||
packages = d.getVar('PACKAGES').split()
|
||||
for p in packages:
|
||||
d.appendVar("INSANE_SKIP_%s" % p, " ldflags")
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ PACKAGECONFIG_remove_imxgpu3d = "gles"
|
|||
|
||||
# FIXME: mesa should support 'x11-no-tls' option
|
||||
python () {
|
||||
overrides = d.getVar("OVERRIDES", True).split(":")
|
||||
overrides = d.getVar("OVERRIDES").split(":")
|
||||
if "imxgpu2d" not in overrides:
|
||||
return
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ LICENSE = "GPLv2"
|
|||
LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
|
||||
|
||||
python() {
|
||||
pkgs = d.getVar('PACKAGES', True).split()
|
||||
pkgs = d.getVar('PACKAGES').split()
|
||||
for p in pkgs:
|
||||
if 'cryptodev-qoriq' in p:
|
||||
d.appendVar("RPROVIDES_%s" % p, p.replace('cryptodev-qoriq', 'cryptodev'))
|
||||
|
|
|
@ -43,7 +43,7 @@ python __set_insane_skip() {
|
|||
# Ensure we have PACKAGES expanded
|
||||
bb.build.exec_func("read_subpackage_metadata", d)
|
||||
|
||||
for p in d.getVar('PACKAGES', True).split():
|
||||
for p in d.getVar('PACKAGES').split():
|
||||
# Even though we are packaging libraries those are plugins so we
|
||||
# shouldn't rename the packages to follow its sonames.
|
||||
d.setVar("DEBIAN_NOAUTONAME_%s" % p, "1")
|
||||
|
@ -66,7 +66,7 @@ python __split_libfslcodec_plugins() {
|
|||
output_pattern='imx-codec-%s',
|
||||
description='Freescale i.MX Codec (%s)',
|
||||
extra_depends='')
|
||||
pkgs = d.getVar('PACKAGES', True).split()
|
||||
pkgs = d.getVar('PACKAGES').split()
|
||||
for pkg in pkgs:
|
||||
meta = pkg[10:]
|
||||
if meta != '':
|
||||
|
|
|
@ -30,7 +30,7 @@ python __set_insane_skip() {
|
|||
# FIXME: All binaries lack GNU_HASH in elf binary but as we don't have
|
||||
# the source we cannot fix it. Disable the insane check for now.
|
||||
# FIXME: gst-fsl-plugin looks for the .so files so we need to deploy those
|
||||
for p in d.getVar('PACKAGES', True).split():
|
||||
for p in d.getVar('PACKAGES').split():
|
||||
d.setVar("INSANE_SKIP_%s" % p, "ldflags dev-so textrel")
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user