mirror of
git://git.yoctoproject.org/poky.git
synced 2025-07-19 12:59:02 +02:00

The implementation of BUILDHISTORY_RESET is problematic, particlarly given that people are trying to create an API with it alongside BUILDHISTORY_PRESERVE which simply doesn't exist and can't work reliably. Worse, the code paths with this bolted on implementation are convoluted and near impossible to follow. BUILDHISTORY_PRESERVE is effectively internal API, used to stop buildhistory removing some files which are needed for data, or are created at different parts of the build. Add a comment to explain what it is doing and why these files are listed. Commit 9f68a45aa238ae5fcdfaca71ba0e7015e9cb720e tried to "fix" preserve support with the reset functionality but it didn't fully work and has just exposed futher issues. There is a further fix however I can brely follow the code and in reviewing it, I've concluded we shouldn't be doing this at all. Due to the way BUILDHISTORY_RESET was implemented, horrible races were introduced making it unclear what happens to the data if builds fail for example, or how sstate interacts with the build since things get reset but stamps do not and tasks may not rerun. It also interacts badly with any additions to the preserve list, due to misunderstandings on what that variable does. Having stared long and hard at the code, and really struggled to understand it, I', of the view that "reset" for CI purposes should be done by the CI itself. The CI can choose to remove some files or all files and decide how to handle failures. It has to handle the buildhistory directory anyway. Therefore drop BUILDHISTORY_RESET support, allowing the "old" codepaths to be dropped. BUILDHISTORY_PRESERVE is better documented to hint that it is internal API and to show what it is really for. If we really do want some functionality list this, it needs to be implemented in a way you can follow the code, and have tests. (From OE-Core rev: 15c5258fd0063ace425b7e904521f1695ffb2a85) Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
962 lines
36 KiB
Plaintext
962 lines
36 KiB
Plaintext
#
|
|
# Records history of build output in order to detect regressions
|
|
#
|
|
# Based in part on testlab.bbclass and packagehistory.bbclass
|
|
#
|
|
# Copyright (C) 2011-2016 Intel Corporation
|
|
# Copyright (C) 2007-2011 Koen Kooi <koen@openembedded.org>
|
|
#
|
|
# SPDX-License-Identifier: MIT
|
|
#
|
|
|
|
IMAGE_CLASSES += "image-artifact-names"
|
|
|
|
BUILDHISTORY_FEATURES ?= "image package sdk"
|
|
BUILDHISTORY_DIR ?= "${TOPDIR}/buildhistory"
|
|
BUILDHISTORY_DIR_IMAGE = "${BUILDHISTORY_DIR}/images/${MACHINE_ARCH}/${TCLIBC}/${IMAGE_BASENAME}"
|
|
BUILDHISTORY_DIR_PACKAGE = "${BUILDHISTORY_DIR}/packages/${MULTIMACH_TARGET_SYS}/${PN}"
|
|
|
|
BUILDHISTORY_DIR_SDK = "${BUILDHISTORY_DIR}/sdk/${SDK_NAME}${SDK_EXT}/${IMAGE_BASENAME}"
|
|
BUILDHISTORY_IMAGE_FILES ?= "/etc/passwd /etc/group"
|
|
BUILDHISTORY_SDK_FILES ?= "conf/local.conf conf/bblayers.conf conf/auto.conf conf/locked-sigs.inc conf/devtool.conf"
|
|
BUILDHISTORY_COMMIT ?= "1"
|
|
BUILDHISTORY_COMMIT_AUTHOR ?= "buildhistory <buildhistory@${DISTRO}>"
|
|
BUILDHISTORY_PUSH_REPO ?= ""
|
|
BUILDHISTORY_TAG ?= "build"
|
|
BUILDHISTORY_PATH_PREFIX_STRIP ?= ""
|
|
|
|
# We want to avoid influencing the signatures of the task so use vardepsexclude
|
|
do_populate_sysroot[postfuncs] += "buildhistory_emit_sysroot"
|
|
do_populate_sysroot_setscene[postfuncs] += "buildhistory_emit_sysroot"
|
|
do_populate_sysroot[vardepsexclude] += "buildhistory_emit_sysroot"
|
|
|
|
do_package[postfuncs] += "buildhistory_list_pkg_files"
|
|
do_package_setscene[postfuncs] += "buildhistory_list_pkg_files"
|
|
do_package[vardepsexclude] += "buildhistory_list_pkg_files"
|
|
|
|
do_packagedata[postfuncs] += "buildhistory_emit_pkghistory"
|
|
do_packagedata_setscene[postfuncs] += "buildhistory_emit_pkghistory"
|
|
do_packagedata[vardepsexclude] += "buildhistory_emit_pkghistory"
|
|
|
|
# Similarly for our function that gets the output signatures
|
|
SSTATEPOSTUNPACKFUNCS:append = " buildhistory_emit_outputsigs"
|
|
sstate_installpkgdir[vardepsexclude] += "buildhistory_emit_outputsigs"
|
|
SSTATEPOSTUNPACKFUNCS[vardepvalueexclude] .= "| buildhistory_emit_outputsigs"
|
|
|
|
# All items except those listed here will be removed from a recipe's
|
|
# build history directory by buildhistory_emit_pkghistory(). This is
|
|
# necessary because some of these items (package directories, files that
|
|
# we no longer emit) might be obsolete.
|
|
#
|
|
# The files listed here are either written by tasks that aren't do_package (e.g.
|
|
# latest_srcrev from do_fetch) so do_package must not remove them, or, they're
|
|
# used to read values in do_package before always being overwritten, e.g. latest,
|
|
# for version backwards checks.
|
|
BUILDHISTORY_PRESERVE = "latest latest_srcrev sysroot"
|
|
|
|
PATCH_GIT_USER_EMAIL ?= "buildhistory@oe"
|
|
PATCH_GIT_USER_NAME ?= "OpenEmbedded"
|
|
|
|
#
|
|
# Write out the contents of the sysroot
|
|
#
|
|
buildhistory_emit_sysroot() {
|
|
mkdir --parents ${BUILDHISTORY_DIR_PACKAGE}
|
|
case ${CLASSOVERRIDE} in
|
|
class-native|class-cross|class-crosssdk)
|
|
BASE=${SYSROOT_DESTDIR}/${STAGING_DIR_NATIVE}
|
|
;;
|
|
*)
|
|
BASE=${SYSROOT_DESTDIR}
|
|
;;
|
|
esac
|
|
buildhistory_list_files_no_owners $BASE ${BUILDHISTORY_DIR_PACKAGE}/sysroot
|
|
}
|
|
|
|
#
|
|
# Write out metadata about this package for comparison when writing future packages
|
|
#
|
|
python buildhistory_emit_pkghistory() {
|
|
import re
|
|
import json
|
|
import shlex
|
|
import errno
|
|
import shutil
|
|
|
|
if not "package" in (d.getVar('BUILDHISTORY_FEATURES') or "").split():
|
|
return 0
|
|
|
|
pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE')
|
|
|
|
class RecipeInfo:
|
|
def __init__(self, name):
|
|
self.name = name
|
|
self.pe = "0"
|
|
self.pv = "0"
|
|
self.pr = "r0"
|
|
self.depends = ""
|
|
self.packages = ""
|
|
self.srcrev = ""
|
|
self.layer = ""
|
|
self.license = ""
|
|
self.config = ""
|
|
self.src_uri = ""
|
|
|
|
|
|
class PackageInfo:
|
|
def __init__(self, name):
|
|
self.name = name
|
|
self.pe = "0"
|
|
self.pv = "0"
|
|
self.pr = "r0"
|
|
# pkg/pkge/pkgv/pkgr should be empty because we want to be able to default them
|
|
self.pkg = ""
|
|
self.pkge = ""
|
|
self.pkgv = ""
|
|
self.pkgr = ""
|
|
self.size = 0
|
|
self.depends = ""
|
|
self.rprovides = ""
|
|
self.rdepends = ""
|
|
self.rrecommends = ""
|
|
self.rsuggests = ""
|
|
self.rreplaces = ""
|
|
self.rconflicts = ""
|
|
self.files = ""
|
|
self.filelist = ""
|
|
# Variables that need to be written to their own separate file
|
|
self.filevars = dict.fromkeys(['pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm'])
|
|
|
|
# Should check PACKAGES here to see if anything was removed
|
|
|
|
def readPackageInfo(pkg, histfile):
|
|
pkginfo = PackageInfo(pkg)
|
|
with open(histfile, "r") as f:
|
|
for line in f:
|
|
lns = line.split('=', 1)
|
|
name = lns[0].strip()
|
|
value = lns[1].strip(" \t\r\n").strip('"')
|
|
if name == "PE":
|
|
pkginfo.pe = value
|
|
elif name == "PV":
|
|
pkginfo.pv = value
|
|
elif name == "PR":
|
|
pkginfo.pr = value
|
|
elif name == "PKG":
|
|
pkginfo.pkg = value
|
|
elif name == "PKGE":
|
|
pkginfo.pkge = value
|
|
elif name == "PKGV":
|
|
pkginfo.pkgv = value
|
|
elif name == "PKGR":
|
|
pkginfo.pkgr = value
|
|
elif name == "RPROVIDES":
|
|
pkginfo.rprovides = value
|
|
elif name == "RDEPENDS":
|
|
pkginfo.rdepends = value
|
|
elif name == "RRECOMMENDS":
|
|
pkginfo.rrecommends = value
|
|
elif name == "RSUGGESTS":
|
|
pkginfo.rsuggests = value
|
|
elif name == "RREPLACES":
|
|
pkginfo.rreplaces = value
|
|
elif name == "RCONFLICTS":
|
|
pkginfo.rconflicts = value
|
|
elif name == "PKGSIZE":
|
|
pkginfo.size = int(value)
|
|
elif name == "FILES":
|
|
pkginfo.files = value
|
|
elif name == "FILELIST":
|
|
pkginfo.filelist = value
|
|
# Apply defaults
|
|
if not pkginfo.pkg:
|
|
pkginfo.pkg = pkginfo.name
|
|
if not pkginfo.pkge:
|
|
pkginfo.pkge = pkginfo.pe
|
|
if not pkginfo.pkgv:
|
|
pkginfo.pkgv = pkginfo.pv
|
|
if not pkginfo.pkgr:
|
|
pkginfo.pkgr = pkginfo.pr
|
|
return pkginfo
|
|
|
|
def getlastpkgversion(pkg):
|
|
try:
|
|
histfile = os.path.join(pkghistdir, pkg, "latest")
|
|
return readPackageInfo(pkg, histfile)
|
|
except EnvironmentError:
|
|
return None
|
|
|
|
def sortpkglist(string):
|
|
pkgiter = re.finditer(r'[a-zA-Z0-9.+-]+( \([><=]+[^)]+\))?', string, 0)
|
|
pkglist = [p.group(0) for p in pkgiter]
|
|
pkglist.sort()
|
|
return ' '.join(pkglist)
|
|
|
|
def sortlist(string):
|
|
items = string.split(' ')
|
|
items.sort()
|
|
return ' '.join(items)
|
|
|
|
pn = d.getVar('PN')
|
|
pe = d.getVar('PE') or "0"
|
|
pv = d.getVar('PV')
|
|
pr = d.getVar('PR')
|
|
layer = bb.utils.get_file_layer(d.getVar('FILE'), d)
|
|
license = d.getVar('LICENSE')
|
|
|
|
pkgdata_dir = d.getVar('PKGDATA_DIR')
|
|
packages = ""
|
|
try:
|
|
with open(os.path.join(pkgdata_dir, pn)) as f:
|
|
for line in f.readlines():
|
|
if line.startswith('PACKAGES: '):
|
|
packages = oe.utils.squashspaces(line.split(': ', 1)[1])
|
|
break
|
|
except IOError as e:
|
|
if e.errno == errno.ENOENT:
|
|
# Probably a -cross recipe, just ignore
|
|
return 0
|
|
else:
|
|
raise
|
|
|
|
packagelist = packages.split()
|
|
preserve = d.getVar('BUILDHISTORY_PRESERVE').split()
|
|
if not os.path.exists(pkghistdir):
|
|
bb.utils.mkdirhier(pkghistdir)
|
|
else:
|
|
# Remove files for packages that no longer exist
|
|
for item in os.listdir(pkghistdir):
|
|
if item not in preserve:
|
|
if item not in packagelist:
|
|
itempath = os.path.join(pkghistdir, item)
|
|
if os.path.isdir(itempath):
|
|
for subfile in os.listdir(itempath):
|
|
os.unlink(os.path.join(itempath, subfile))
|
|
os.rmdir(itempath)
|
|
else:
|
|
os.unlink(itempath)
|
|
|
|
rcpinfo = RecipeInfo(pn)
|
|
rcpinfo.pe = pe
|
|
rcpinfo.pv = pv
|
|
rcpinfo.pr = pr
|
|
rcpinfo.depends = sortlist(oe.utils.squashspaces(d.getVar('DEPENDS') or ""))
|
|
rcpinfo.packages = packages
|
|
rcpinfo.layer = layer
|
|
rcpinfo.license = license
|
|
rcpinfo.config = sortlist(oe.utils.squashspaces(d.getVar('PACKAGECONFIG') or ""))
|
|
rcpinfo.src_uri = oe.utils.squashspaces(d.getVar('SRC_URI') or "")
|
|
write_recipehistory(rcpinfo, d)
|
|
|
|
bb.build.exec_func("read_subpackage_metadata", d)
|
|
|
|
for pkg in packagelist:
|
|
localdata = d.createCopy()
|
|
localdata.setVar('OVERRIDES', d.getVar("OVERRIDES", False) + ":" + pkg)
|
|
|
|
pkge = localdata.getVar("PKGE") or '0'
|
|
pkgv = localdata.getVar("PKGV")
|
|
pkgr = localdata.getVar("PKGR")
|
|
#
|
|
# Find out what the last version was
|
|
# Make sure the version did not decrease
|
|
#
|
|
lastversion = getlastpkgversion(pkg)
|
|
if lastversion:
|
|
last_pkge = lastversion.pkge
|
|
last_pkgv = lastversion.pkgv
|
|
last_pkgr = lastversion.pkgr
|
|
r = bb.utils.vercmp((pkge, pkgv, pkgr), (last_pkge, last_pkgv, last_pkgr))
|
|
if r < 0:
|
|
msg = "Package version for package %s went backwards which would break package feeds (from %s:%s-%s to %s:%s-%s)" % (pkg, last_pkge, last_pkgv, last_pkgr, pkge, pkgv, pkgr)
|
|
oe.qa.handle_error("version-going-backwards", msg, d)
|
|
|
|
pkginfo = PackageInfo(pkg)
|
|
# Apparently the version can be different on a per-package basis (see Python)
|
|
pkginfo.pe = localdata.getVar("PE") or '0'
|
|
pkginfo.pv = localdata.getVar("PV")
|
|
pkginfo.pr = localdata.getVar("PR")
|
|
pkginfo.pkg = localdata.getVar("PKG")
|
|
pkginfo.pkge = pkge
|
|
pkginfo.pkgv = pkgv
|
|
pkginfo.pkgr = pkgr
|
|
pkginfo.rprovides = sortpkglist(oe.utils.squashspaces(localdata.getVar("RPROVIDES") or ""))
|
|
pkginfo.rdepends = sortpkglist(oe.utils.squashspaces(localdata.getVar("RDEPENDS") or ""))
|
|
pkginfo.rrecommends = sortpkglist(oe.utils.squashspaces(localdata.getVar("RRECOMMENDS") or ""))
|
|
pkginfo.rsuggests = sortpkglist(oe.utils.squashspaces(localdata.getVar("RSUGGESTS") or ""))
|
|
pkginfo.replaces = sortpkglist(oe.utils.squashspaces(localdata.getVar("RREPLACES") or ""))
|
|
pkginfo.rconflicts = sortpkglist(oe.utils.squashspaces(localdata.getVar("RCONFLICTS") or ""))
|
|
pkginfo.files = oe.utils.squashspaces(localdata.getVar("FILES") or "")
|
|
for filevar in pkginfo.filevars:
|
|
pkginfo.filevars[filevar] = localdata.getVar(filevar) or ""
|
|
|
|
# Gather information about packaged files
|
|
val = localdata.getVar('FILES_INFO') or ''
|
|
dictval = json.loads(val)
|
|
filelist = list(dictval.keys())
|
|
filelist.sort()
|
|
pkginfo.filelist = " ".join([shlex.quote(x) for x in filelist])
|
|
|
|
pkginfo.size = int(localdata.getVar('PKGSIZE') or '0')
|
|
|
|
write_pkghistory(pkginfo, d)
|
|
|
|
oe.qa.exit_if_errors(d)
|
|
}
|
|
|
|
python buildhistory_emit_outputsigs() {
|
|
if not "task" in (d.getVar('BUILDHISTORY_FEATURES') or "").split():
|
|
return
|
|
|
|
import hashlib
|
|
|
|
taskoutdir = os.path.join(d.getVar('BUILDHISTORY_DIR'), 'task', 'output')
|
|
bb.utils.mkdirhier(taskoutdir)
|
|
currenttask = d.getVar('BB_CURRENTTASK')
|
|
pn = d.getVar('PN')
|
|
taskfile = os.path.join(taskoutdir, '%s.%s' % (pn, currenttask))
|
|
|
|
cwd = os.getcwd()
|
|
filesigs = {}
|
|
for root, _, files in os.walk(cwd):
|
|
for fname in files:
|
|
if fname == 'fixmepath':
|
|
continue
|
|
fullpath = os.path.join(root, fname)
|
|
try:
|
|
if os.path.islink(fullpath):
|
|
sha256 = hashlib.sha256(os.readlink(fullpath).encode('utf-8')).hexdigest()
|
|
elif os.path.isfile(fullpath):
|
|
sha256 = bb.utils.sha256_file(fullpath)
|
|
else:
|
|
continue
|
|
except OSError:
|
|
bb.warn('buildhistory: unable to read %s to get output signature' % fullpath)
|
|
continue
|
|
filesigs[os.path.relpath(fullpath, cwd)] = sha256
|
|
with open(taskfile, 'w') as f:
|
|
for fpath, fsig in sorted(filesigs.items(), key=lambda item: item[0]):
|
|
f.write('%s %s\n' % (fpath, fsig))
|
|
}
|
|
|
|
|
|
def write_recipehistory(rcpinfo, d):
|
|
bb.debug(2, "Writing recipe history")
|
|
|
|
pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE')
|
|
|
|
infofile = os.path.join(pkghistdir, "latest")
|
|
with open(infofile, "w") as f:
|
|
if rcpinfo.pe != "0":
|
|
f.write(u"PE = %s\n" % rcpinfo.pe)
|
|
f.write(u"PV = %s\n" % rcpinfo.pv)
|
|
f.write(u"PR = %s\n" % rcpinfo.pr)
|
|
f.write(u"DEPENDS = %s\n" % rcpinfo.depends)
|
|
f.write(u"PACKAGES = %s\n" % rcpinfo.packages)
|
|
f.write(u"LAYER = %s\n" % rcpinfo.layer)
|
|
f.write(u"LICENSE = %s\n" % rcpinfo.license)
|
|
f.write(u"CONFIG = %s\n" % rcpinfo.config)
|
|
f.write(u"SRC_URI = %s\n" % rcpinfo.src_uri)
|
|
|
|
write_latest_srcrev(d, pkghistdir)
|
|
|
|
def write_pkghistory(pkginfo, d):
|
|
bb.debug(2, "Writing package history for package %s" % pkginfo.name)
|
|
|
|
pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE')
|
|
|
|
pkgpath = os.path.join(pkghistdir, pkginfo.name)
|
|
if not os.path.exists(pkgpath):
|
|
bb.utils.mkdirhier(pkgpath)
|
|
|
|
infofile = os.path.join(pkgpath, "latest")
|
|
with open(infofile, "w") as f:
|
|
if pkginfo.pe != "0":
|
|
f.write(u"PE = %s\n" % pkginfo.pe)
|
|
f.write(u"PV = %s\n" % pkginfo.pv)
|
|
f.write(u"PR = %s\n" % pkginfo.pr)
|
|
|
|
if pkginfo.pkg != pkginfo.name:
|
|
f.write(u"PKG = %s\n" % pkginfo.pkg)
|
|
if pkginfo.pkge != pkginfo.pe:
|
|
f.write(u"PKGE = %s\n" % pkginfo.pkge)
|
|
if pkginfo.pkgv != pkginfo.pv:
|
|
f.write(u"PKGV = %s\n" % pkginfo.pkgv)
|
|
if pkginfo.pkgr != pkginfo.pr:
|
|
f.write(u"PKGR = %s\n" % pkginfo.pkgr)
|
|
f.write(u"RPROVIDES = %s\n" % pkginfo.rprovides)
|
|
f.write(u"RDEPENDS = %s\n" % pkginfo.rdepends)
|
|
f.write(u"RRECOMMENDS = %s\n" % pkginfo.rrecommends)
|
|
if pkginfo.rsuggests:
|
|
f.write(u"RSUGGESTS = %s\n" % pkginfo.rsuggests)
|
|
if pkginfo.rreplaces:
|
|
f.write(u"RREPLACES = %s\n" % pkginfo.rreplaces)
|
|
if pkginfo.rconflicts:
|
|
f.write(u"RCONFLICTS = %s\n" % pkginfo.rconflicts)
|
|
f.write(u"PKGSIZE = %d\n" % pkginfo.size)
|
|
f.write(u"FILES = %s\n" % pkginfo.files)
|
|
f.write(u"FILELIST = %s\n" % pkginfo.filelist)
|
|
|
|
for filevar in pkginfo.filevars:
|
|
filevarpath = os.path.join(pkgpath, "latest.%s" % filevar)
|
|
val = pkginfo.filevars[filevar]
|
|
if val:
|
|
with open(filevarpath, "w") as f:
|
|
f.write(val)
|
|
else:
|
|
if os.path.exists(filevarpath):
|
|
os.unlink(filevarpath)
|
|
|
|
#
|
|
# rootfs_type can be: image, sdk_target, sdk_host
|
|
#
|
|
def buildhistory_list_installed(d, rootfs_type="image"):
|
|
from oe.rootfs import image_list_installed_packages
|
|
from oe.sdk import sdk_list_installed_packages
|
|
from oe.utils import format_pkg_list
|
|
|
|
process_list = [('file', 'bh_installed_pkgs_%s.txt' % os.getpid()),\
|
|
('deps', 'bh_installed_pkgs_deps_%s.txt' % os.getpid())]
|
|
|
|
if rootfs_type == "image":
|
|
pkgs = image_list_installed_packages(d)
|
|
else:
|
|
pkgs = sdk_list_installed_packages(d, rootfs_type == "sdk_target")
|
|
|
|
if rootfs_type == "sdk_host":
|
|
pkgdata_dir = d.getVar('PKGDATA_DIR_SDK')
|
|
else:
|
|
pkgdata_dir = d.getVar('PKGDATA_DIR')
|
|
|
|
for output_type, output_file in process_list:
|
|
output_file_full = os.path.join(d.getVar('WORKDIR'), output_file)
|
|
|
|
with open(output_file_full, 'w') as output:
|
|
output.write(format_pkg_list(pkgs, output_type, pkgdata_dir))
|
|
|
|
python buildhistory_list_installed_image() {
|
|
buildhistory_list_installed(d)
|
|
}
|
|
|
|
python buildhistory_list_installed_sdk_target() {
|
|
buildhistory_list_installed(d, "sdk_target")
|
|
}
|
|
|
|
python buildhistory_list_installed_sdk_host() {
|
|
buildhistory_list_installed(d, "sdk_host")
|
|
}
|
|
|
|
buildhistory_get_installed() {
|
|
mkdir -p $1
|
|
|
|
# Get list of installed packages
|
|
pkgcache="$1/installed-packages.tmp"
|
|
cat ${WORKDIR}/bh_installed_pkgs_${PID}.txt | sort > $pkgcache && rm ${WORKDIR}/bh_installed_pkgs_${PID}.txt
|
|
|
|
cat $pkgcache | awk '{ print $1 }' > $1/installed-package-names.txt
|
|
|
|
if [ -s $pkgcache ] ; then
|
|
cat $pkgcache | awk '{ print $2 }' | xargs -n1 basename > $1/installed-packages.txt
|
|
else
|
|
printf "" > $1/installed-packages.txt
|
|
fi
|
|
|
|
# Produce dependency graph
|
|
# First, quote each name to handle characters that cause issues for dot
|
|
sed 's:\([^| ]*\):"\1":g' ${WORKDIR}/bh_installed_pkgs_deps_${PID}.txt > $1/depends.tmp &&
|
|
rm ${WORKDIR}/bh_installed_pkgs_deps_${PID}.txt
|
|
# Remove lines with rpmlib(...) and config(...) dependencies, change the
|
|
# delimiter from pipe to "->", set the style for recommend lines and
|
|
# turn versioned dependencies into edge labels.
|
|
sed -i -e '/rpmlib(/d' \
|
|
-e '/config(/d' \
|
|
-e 's:|: -> :' \
|
|
-e 's:"\[REC\]":[style=dotted]:' \
|
|
-e 's:"\([<>=]\+\)" "\([^"]*\)":[label="\1 \2"]:' \
|
|
-e 's:"\([*]\+\)" "\([^"]*\)":[label="\2"]:' \
|
|
-e 's:"\[RPROVIDES\]":[style=dashed]:' \
|
|
$1/depends.tmp
|
|
# Add header, sorted and de-duped contents and footer and then delete the temp file
|
|
printf "digraph depends {\n node [shape=plaintext]\n" > $1/depends.dot
|
|
cat $1/depends.tmp | sort -u >> $1/depends.dot
|
|
echo "}" >> $1/depends.dot
|
|
rm $1/depends.tmp
|
|
|
|
# Set correct pkgdatadir
|
|
pkgdatadir=${PKGDATA_DIR}
|
|
if [ "$2" = "sdk" ] && [ "$3" = "host" ] ; then
|
|
pkgdatadir="${PKGDATA_DIR_SDK}"
|
|
fi
|
|
|
|
# Produce installed package sizes list
|
|
oe-pkgdata-util -p $pkgdatadir read-value "PKGSIZE" -n -f $pkgcache > $1/installed-package-sizes.tmp
|
|
cat $1/installed-package-sizes.tmp | awk '{print $2 "\tKiB\t" $1}' | sort -n -r > $1/installed-package-sizes.txt
|
|
rm $1/installed-package-sizes.tmp
|
|
|
|
# Produce package info: runtime_name, buildtime_name, recipe, version, size
|
|
oe-pkgdata-util -p $pkgdatadir read-value "PACKAGE,PN,PV,PKGSIZE" -n -f $pkgcache > $1/installed-package-info.tmp
|
|
cat $1/installed-package-info.tmp | sort -n -r -k 5 > $1/installed-package-info.txt
|
|
rm $1/installed-package-info.tmp
|
|
|
|
# We're now done with the cache, delete it
|
|
rm $pkgcache
|
|
|
|
if [ "$2" != "sdk" ] ; then
|
|
# Produce some cut-down graphs (for readability)
|
|
grep -v kernel-image $1/depends.dot | grep -v kernel-3 | grep -v kernel-4 > $1/depends-nokernel.dot
|
|
grep -v libc6 $1/depends-nokernel.dot | grep -v libgcc > $1/depends-nokernel-nolibc.dot
|
|
grep -v update- $1/depends-nokernel-nolibc.dot > $1/depends-nokernel-nolibc-noupdate.dot
|
|
grep -v kernel-module $1/depends-nokernel-nolibc-noupdate.dot > $1/depends-nokernel-nolibc-noupdate-nomodules.dot
|
|
fi
|
|
|
|
# Add complementary package information
|
|
if [ -e ${WORKDIR}/complementary_pkgs.txt ]; then
|
|
cp ${WORKDIR}/complementary_pkgs.txt $1
|
|
fi
|
|
}
|
|
|
|
buildhistory_get_image_installed() {
|
|
# Anything requiring the use of the packaging system should be done in here
|
|
# in case the packaging files are going to be removed for this image
|
|
|
|
if [ "${@bb.utils.contains('BUILDHISTORY_FEATURES', 'image', '1', '0', d)}" = "0" ] ; then
|
|
return
|
|
fi
|
|
|
|
buildhistory_get_installed ${BUILDHISTORY_DIR_IMAGE}
|
|
}
|
|
|
|
buildhistory_get_sdk_installed() {
|
|
# Anything requiring the use of the packaging system should be done in here
|
|
# in case the packaging files are going to be removed for this SDK
|
|
|
|
if [ "${@bb.utils.contains('BUILDHISTORY_FEATURES', 'sdk', '1', '0', d)}" = "0" ] ; then
|
|
return
|
|
fi
|
|
|
|
buildhistory_get_installed ${BUILDHISTORY_DIR_SDK}/$1 sdk $1
|
|
}
|
|
|
|
buildhistory_get_sdk_installed_host() {
|
|
buildhistory_get_sdk_installed host
|
|
}
|
|
|
|
buildhistory_get_sdk_installed_target() {
|
|
buildhistory_get_sdk_installed target
|
|
}
|
|
|
|
buildhistory_list_files() {
|
|
# List the files in the specified directory, but exclude date/time etc.
|
|
# This is somewhat messy, but handles cases where the size is not printed for device files under pseudo
|
|
( cd $1
|
|
find_cmd='find . ! -path . -printf "%M %-10u %-10g %10s %p -> %l\n"'
|
|
if [ "$3" = "fakeroot" ] ; then
|
|
eval ${FAKEROOTENV} ${FAKEROOTCMD} $find_cmd
|
|
else
|
|
eval $find_cmd
|
|
fi | sort -k5 | sed 's/ * -> $//' > $2 )
|
|
}
|
|
|
|
buildhistory_list_files_no_owners() {
|
|
# List the files in the specified directory, but exclude date/time etc.
|
|
# Also don't output the ownership data, but instead output just - - so
|
|
# that the same parsing code as for _list_files works.
|
|
# This is somewhat messy, but handles cases where the size is not printed for device files under pseudo
|
|
( cd $1
|
|
find_cmd='find . ! -path . -printf "%M - - %10s %p -> %l\n"'
|
|
if [ "$3" = "fakeroot" ] ; then
|
|
eval ${FAKEROOTENV} ${FAKEROOTCMD} "$find_cmd"
|
|
else
|
|
eval "$find_cmd"
|
|
fi | sort -k5 | sed 's/ * -> $//' > $2 )
|
|
}
|
|
|
|
buildhistory_list_pkg_files() {
|
|
if [ "${@bb.utils.contains('BUILDHISTORY_FEATURES', 'package', '1', '0', d)}" = "0" ] ; then
|
|
return
|
|
fi
|
|
|
|
# Create individual files-in-package for each recipe's package
|
|
pkgdirlist=$(find ${PKGDEST}/* -maxdepth 0 -type d)
|
|
for pkgdir in $pkgdirlist; do
|
|
pkgname=$(basename $pkgdir)
|
|
outfolder="${BUILDHISTORY_DIR_PACKAGE}/$pkgname"
|
|
outfile="$outfolder/files-in-package.txt"
|
|
mkdir -p $outfolder
|
|
buildhistory_list_files $pkgdir $outfile fakeroot
|
|
done
|
|
}
|
|
|
|
buildhistory_get_imageinfo() {
|
|
if [ "${@bb.utils.contains('BUILDHISTORY_FEATURES', 'image', '1', '0', d)}" = "0" ] ; then
|
|
return
|
|
fi
|
|
|
|
mkdir -p ${BUILDHISTORY_DIR_IMAGE}
|
|
buildhistory_list_files ${IMAGE_ROOTFS} ${BUILDHISTORY_DIR_IMAGE}/files-in-image.txt
|
|
|
|
# Collect files requested in BUILDHISTORY_IMAGE_FILES
|
|
rm -rf ${BUILDHISTORY_DIR_IMAGE}/image-files
|
|
for f in ${BUILDHISTORY_IMAGE_FILES}; do
|
|
if [ -f ${IMAGE_ROOTFS}/$f ] ; then
|
|
mkdir -p ${BUILDHISTORY_DIR_IMAGE}/image-files/`dirname $f`
|
|
cp ${IMAGE_ROOTFS}/$f ${BUILDHISTORY_DIR_IMAGE}/image-files/$f
|
|
fi
|
|
done
|
|
|
|
# Record some machine-readable meta-information about the image
|
|
printf "" > ${BUILDHISTORY_DIR_IMAGE}/image-info.txt
|
|
cat >> ${BUILDHISTORY_DIR_IMAGE}/image-info.txt <<END
|
|
${@buildhistory_get_imagevars(d)}
|
|
END
|
|
imagesize=`du -ks ${IMAGE_ROOTFS} | awk '{ print $1 }'`
|
|
echo "IMAGESIZE = $imagesize" >> ${BUILDHISTORY_DIR_IMAGE}/image-info.txt
|
|
|
|
# Add some configuration information
|
|
echo "${MACHINE}: ${IMAGE_BASENAME} configured for ${DISTRO} ${DISTRO_VERSION}" > ${BUILDHISTORY_DIR_IMAGE}/build-id.txt
|
|
|
|
cat >> ${BUILDHISTORY_DIR_IMAGE}/build-id.txt <<END
|
|
${@buildhistory_get_build_id(d)}
|
|
END
|
|
}
|
|
|
|
buildhistory_get_sdkinfo() {
|
|
if [ "${@bb.utils.contains('BUILDHISTORY_FEATURES', 'sdk', '1', '0', d)}" = "0" ] ; then
|
|
return
|
|
fi
|
|
|
|
buildhistory_list_files ${SDK_OUTPUT} ${BUILDHISTORY_DIR_SDK}/files-in-sdk.txt
|
|
|
|
# Collect files requested in BUILDHISTORY_SDK_FILES
|
|
rm -rf ${BUILDHISTORY_DIR_SDK}/sdk-files
|
|
for f in ${BUILDHISTORY_SDK_FILES}; do
|
|
if [ -f ${SDK_OUTPUT}/${SDKPATH}/$f ] ; then
|
|
mkdir -p ${BUILDHISTORY_DIR_SDK}/sdk-files/`dirname $f`
|
|
cp ${SDK_OUTPUT}/${SDKPATH}/$f ${BUILDHISTORY_DIR_SDK}/sdk-files/$f
|
|
fi
|
|
done
|
|
|
|
# Record some machine-readable meta-information about the SDK
|
|
printf "" > ${BUILDHISTORY_DIR_SDK}/sdk-info.txt
|
|
cat >> ${BUILDHISTORY_DIR_SDK}/sdk-info.txt <<END
|
|
${@buildhistory_get_sdkvars(d)}
|
|
END
|
|
sdksize=`du -ks ${SDK_OUTPUT} | awk '{ print $1 }'`
|
|
echo "SDKSIZE = $sdksize" >> ${BUILDHISTORY_DIR_SDK}/sdk-info.txt
|
|
}
|
|
|
|
python buildhistory_get_extra_sdkinfo() {
|
|
import operator
|
|
from oe.sdk import get_extra_sdkinfo
|
|
|
|
sstate_dir = d.expand('${SDK_OUTPUT}/${SDKPATH}/sstate-cache')
|
|
extra_info = get_extra_sdkinfo(sstate_dir)
|
|
|
|
if d.getVar('BB_CURRENTTASK') == 'populate_sdk_ext' and \
|
|
"sdk" in (d.getVar('BUILDHISTORY_FEATURES') or "").split():
|
|
with open(d.expand('${BUILDHISTORY_DIR_SDK}/sstate-package-sizes.txt'), 'w') as f:
|
|
filesizes_sorted = sorted(extra_info['filesizes'].items(), key=operator.itemgetter(1, 0), reverse=True)
|
|
for fn, size in filesizes_sorted:
|
|
f.write('%10d KiB %s\n' % (size, fn))
|
|
with open(d.expand('${BUILDHISTORY_DIR_SDK}/sstate-task-sizes.txt'), 'w') as f:
|
|
tasksizes_sorted = sorted(extra_info['tasksizes'].items(), key=operator.itemgetter(1, 0), reverse=True)
|
|
for task, size in tasksizes_sorted:
|
|
f.write('%10d KiB %s\n' % (size, task))
|
|
}
|
|
|
|
# By using ROOTFS_POSTUNINSTALL_COMMAND we get in after uninstallation of
|
|
# unneeded packages but before the removal of packaging files
|
|
ROOTFS_POSTUNINSTALL_COMMAND += "buildhistory_list_installed_image"
|
|
ROOTFS_POSTUNINSTALL_COMMAND += "buildhistory_get_image_installed"
|
|
ROOTFS_POSTUNINSTALL_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_image| buildhistory_get_image_installed"
|
|
ROOTFS_POSTUNINSTALL_COMMAND[vardepsexclude] += "buildhistory_list_installed_image buildhistory_get_image_installed"
|
|
|
|
IMAGE_POSTPROCESS_COMMAND += "buildhistory_get_imageinfo"
|
|
IMAGE_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_imageinfo"
|
|
IMAGE_POSTPROCESS_COMMAND[vardepsexclude] += "buildhistory_get_imageinfo"
|
|
|
|
# We want these to be the last run so that we get called after complementary package installation
|
|
POPULATE_SDK_POST_TARGET_COMMAND:append = " buildhistory_list_installed_sdk_target"
|
|
POPULATE_SDK_POST_TARGET_COMMAND:append = " buildhistory_get_sdk_installed_target"
|
|
POPULATE_SDK_POST_TARGET_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_sdk_target| buildhistory_get_sdk_installed_target"
|
|
POPULATE_SDK_POST_TARGET_COMMAND[vardepsexclude] += "buildhistory_list_installed_sdk_target buildhistory_get_sdk_installed_target"
|
|
|
|
POPULATE_SDK_POST_HOST_COMMAND:append = " buildhistory_list_installed_sdk_host"
|
|
POPULATE_SDK_POST_HOST_COMMAND:append = " buildhistory_get_sdk_installed_host"
|
|
POPULATE_SDK_POST_HOST_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_sdk_host| buildhistory_get_sdk_installed_host"
|
|
POPULATE_SDK_POST_HOST_COMMAND[vardepsexclude] += "buildhistory_list_installed_sdk_host buildhistory_get_sdk_installed_host"
|
|
|
|
SDK_POSTPROCESS_COMMAND:append = " buildhistory_get_sdkinfo buildhistory_get_extra_sdkinfo"
|
|
SDK_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_sdkinfo buildhistory_get_extra_sdkinfo"
|
|
SDK_POSTPROCESS_COMMAND[vardepsexclude] += "buildhistory_get_sdkinfo buildhistory_get_extra_sdkinfo"
|
|
|
|
python buildhistory_write_sigs() {
|
|
if not "task" in (d.getVar('BUILDHISTORY_FEATURES') or "").split():
|
|
return
|
|
|
|
# Create sigs file
|
|
if hasattr(bb.parse.siggen, 'dump_siglist'):
|
|
taskoutdir = os.path.join(d.getVar('BUILDHISTORY_DIR'), 'task')
|
|
bb.utils.mkdirhier(taskoutdir)
|
|
bb.parse.siggen.dump_siglist(os.path.join(taskoutdir, 'tasksigs.txt'), d.getVar("BUILDHISTORY_PATH_PREFIX_STRIP"))
|
|
}
|
|
|
|
def buildhistory_get_build_id(d):
|
|
if d.getVar('BB_WORKERCONTEXT') != '1':
|
|
return ""
|
|
localdata = bb.data.createCopy(d)
|
|
statuslines = []
|
|
for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata):
|
|
g = globals()
|
|
if func not in g:
|
|
bb.warn("Build configuration function '%s' does not exist" % func)
|
|
else:
|
|
flines = g[func](localdata)
|
|
if flines:
|
|
statuslines.extend(flines)
|
|
|
|
statusheader = d.getVar('BUILDCFG_HEADER')
|
|
return('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
|
|
|
|
def buildhistory_get_metadata_revs(d):
|
|
# We want an easily machine-readable format here
|
|
revisions = oe.buildcfg.get_layer_revisions(d)
|
|
medadata_revs = ["%-17s = %s:%s%s" % (r[1], r[2], r[3], r[4]) for r in revisions]
|
|
return '\n'.join(medadata_revs)
|
|
|
|
def outputvars(vars, listvars, d):
|
|
vars = vars.split()
|
|
listvars = listvars.split()
|
|
ret = ""
|
|
for var in vars:
|
|
value = d.getVar(var) or ""
|
|
if var in listvars:
|
|
# Squash out spaces
|
|
value = oe.utils.squashspaces(value)
|
|
ret += "%s = %s\n" % (var, value)
|
|
return ret.rstrip('\n')
|
|
|
|
def buildhistory_get_imagevars(d):
|
|
if d.getVar('BB_WORKERCONTEXT') != '1':
|
|
return ""
|
|
imagevars = "DISTRO DISTRO_VERSION USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE ROOTFS_POSTPROCESS_COMMAND IMAGE_POSTPROCESS_COMMAND"
|
|
listvars = "USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS PACKAGE_EXCLUDE"
|
|
return outputvars(imagevars, listvars, d)
|
|
|
|
def buildhistory_get_sdkvars(d):
|
|
if d.getVar('BB_WORKERCONTEXT') != '1':
|
|
return ""
|
|
sdkvars = "DISTRO DISTRO_VERSION SDK_NAME SDK_VERSION SDKMACHINE SDKIMAGE_FEATURES TOOLCHAIN_HOST_TASK TOOLCHAIN_TARGET_TASK BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE"
|
|
if d.getVar('BB_CURRENTTASK') == 'populate_sdk_ext':
|
|
# Extensible SDK uses some additional variables
|
|
sdkvars += " ESDK_LOCALCONF_ALLOW ESDK_LOCALCONF_REMOVE ESDK_CLASS_INHERIT_DISABLE SDK_UPDATE_URL SDK_EXT_TYPE SDK_RECRDEP_TASKS SDK_INCLUDE_PKGDATA SDK_INCLUDE_TOOLCHAIN"
|
|
listvars = "SDKIMAGE_FEATURES BAD_RECOMMENDATIONS PACKAGE_EXCLUDE ESDK_LOCALCONF_ALLOW ESDK_LOCALCONF_REMOVE ESDK_CLASS_INHERIT_DISABLE"
|
|
return outputvars(sdkvars, listvars, d)
|
|
|
|
|
|
def buildhistory_get_cmdline(d):
|
|
argv = d.getVar('BB_CMDLINE', False)
|
|
if argv:
|
|
if argv[0].endswith('bin/bitbake'):
|
|
bincmd = 'bitbake'
|
|
else:
|
|
bincmd = argv[0]
|
|
return '%s %s' % (bincmd, ' '.join(argv[1:]))
|
|
return ''
|
|
|
|
|
|
buildhistory_single_commit() {
|
|
if [ "$3" = "" ] ; then
|
|
commitopts="${BUILDHISTORY_DIR}/ --allow-empty"
|
|
shortlogprefix="No changes: "
|
|
else
|
|
commitopts=""
|
|
shortlogprefix=""
|
|
fi
|
|
if [ "${BUILDHISTORY_BUILD_FAILURES}" = "0" ] ; then
|
|
result="succeeded"
|
|
else
|
|
result="failed"
|
|
fi
|
|
case ${BUILDHISTORY_BUILD_INTERRUPTED} in
|
|
1)
|
|
result="$result (interrupted)"
|
|
;;
|
|
2)
|
|
result="$result (force interrupted)"
|
|
;;
|
|
esac
|
|
commitmsgfile=`mktemp`
|
|
cat > $commitmsgfile << END
|
|
${shortlogprefix}Build ${BUILDNAME} of ${DISTRO} ${DISTRO_VERSION} for machine ${MACHINE} on $2
|
|
|
|
cmd: $1
|
|
|
|
result: $result
|
|
|
|
metadata revisions:
|
|
END
|
|
cat ${BUILDHISTORY_DIR}/metadata-revs >> $commitmsgfile
|
|
git commit $commitopts -F $commitmsgfile --author "${BUILDHISTORY_COMMIT_AUTHOR}" > /dev/null
|
|
rm $commitmsgfile
|
|
}
|
|
|
|
buildhistory_commit() {
|
|
if [ ! -d ${BUILDHISTORY_DIR} ] ; then
|
|
# Code above that creates this dir never executed, so there can't be anything to commit
|
|
return
|
|
fi
|
|
|
|
# Create a machine-readable list of metadata revisions for each layer
|
|
cat > ${BUILDHISTORY_DIR}/metadata-revs <<END
|
|
${@buildhistory_get_metadata_revs(d)}
|
|
END
|
|
|
|
( cd ${BUILDHISTORY_DIR}/
|
|
# Initialise the repo if necessary
|
|
if [ ! -e .git ] ; then
|
|
git init -q
|
|
else
|
|
git tag -f --no-sign ${BUILDHISTORY_TAG}-minus-3 ${BUILDHISTORY_TAG}-minus-2 > /dev/null 2>&1 || true
|
|
git tag -f --no-sign ${BUILDHISTORY_TAG}-minus-2 ${BUILDHISTORY_TAG}-minus-1 > /dev/null 2>&1 || true
|
|
git tag -f --no-sign ${BUILDHISTORY_TAG}-minus-1 > /dev/null 2>&1 || true
|
|
fi
|
|
|
|
check_git_config
|
|
|
|
# Check if there are new/changed files to commit (other than metadata-revs)
|
|
repostatus=`git status --porcelain | grep -v " metadata-revs$"`
|
|
HOSTNAME=`hostname 2>/dev/null || echo unknown`
|
|
CMDLINE="${@buildhistory_get_cmdline(d)}"
|
|
if [ "$repostatus" != "" ] ; then
|
|
git add -A .
|
|
# Porcelain output looks like "?? packages/foo/bar"
|
|
# Ensure we commit metadata-revs with the first commit
|
|
buildhistory_single_commit "$CMDLINE" "$HOSTNAME" dummy
|
|
else
|
|
buildhistory_single_commit "$CMDLINE" "$HOSTNAME"
|
|
fi
|
|
if [ "${BUILDHISTORY_PUSH_REPO}" != "" ] ; then
|
|
git push -q ${BUILDHISTORY_PUSH_REPO}
|
|
fi) || true
|
|
}
|
|
|
|
python buildhistory_eventhandler() {
|
|
if (e.data.getVar('BUILDHISTORY_FEATURES') or "").strip():
|
|
if isinstance(e, bb.event.BuildCompleted):
|
|
if e.data.getVar("BUILDHISTORY_COMMIT") == "1":
|
|
bb.note("Writing buildhistory")
|
|
bb.build.exec_func("buildhistory_write_sigs", d)
|
|
import time
|
|
start=time.time()
|
|
localdata = bb.data.createCopy(e.data)
|
|
localdata.setVar('BUILDHISTORY_BUILD_FAILURES', str(e._failures))
|
|
interrupted = getattr(e, '_interrupted', 0)
|
|
localdata.setVar('BUILDHISTORY_BUILD_INTERRUPTED', str(interrupted))
|
|
bb.build.exec_func("buildhistory_commit", localdata)
|
|
stop=time.time()
|
|
bb.note("Writing buildhistory took: %s seconds" % round(stop-start))
|
|
else:
|
|
bb.note("No commit since BUILDHISTORY_COMMIT != '1'")
|
|
}
|
|
|
|
addhandler buildhistory_eventhandler
|
|
buildhistory_eventhandler[eventmask] = "bb.event.BuildCompleted bb.event.BuildStarted"
|
|
|
|
|
|
# FIXME this ought to be moved into the fetcher
|
|
def _get_srcrev_values(d):
|
|
"""
|
|
Return the version strings for the current recipe
|
|
"""
|
|
|
|
scms = []
|
|
fetcher = bb.fetch.Fetch(d.getVar('SRC_URI').split(), d)
|
|
urldata = fetcher.ud
|
|
for u in urldata:
|
|
if urldata[u].method.supports_srcrev():
|
|
scms.append(u)
|
|
|
|
dict_srcrevs = {}
|
|
dict_tag_srcrevs = {}
|
|
for scm in scms:
|
|
ud = urldata[scm]
|
|
autoinc, rev = ud.method.sortable_revision(ud, d, ud.name)
|
|
dict_srcrevs[ud.name] = rev
|
|
if 'tag' in ud.parm:
|
|
tag = ud.parm['tag'];
|
|
key = ud.name+'_'+tag
|
|
dict_tag_srcrevs[key] = rev
|
|
return (dict_srcrevs, dict_tag_srcrevs)
|
|
|
|
do_fetch[postfuncs] += "write_srcrev"
|
|
do_fetch[vardepsexclude] += "write_srcrev"
|
|
python write_srcrev() {
|
|
write_latest_srcrev(d, d.getVar('BUILDHISTORY_DIR_PACKAGE'))
|
|
}
|
|
|
|
def write_latest_srcrev(d, pkghistdir):
|
|
srcrevfile = os.path.join(pkghistdir, 'latest_srcrev')
|
|
|
|
srcrevs, tag_srcrevs = _get_srcrev_values(d)
|
|
if srcrevs:
|
|
if not os.path.exists(pkghistdir):
|
|
bb.utils.mkdirhier(pkghistdir)
|
|
old_tag_srcrevs = {}
|
|
if os.path.exists(srcrevfile):
|
|
with open(srcrevfile) as f:
|
|
for line in f:
|
|
if line.startswith('# tag_'):
|
|
key, value = line.split("=", 1)
|
|
key = key.replace('# tag_', '').strip()
|
|
value = value.replace('"', '').strip()
|
|
old_tag_srcrevs[key] = value
|
|
with open(srcrevfile, 'w') as f:
|
|
for name, srcrev in sorted(srcrevs.items()):
|
|
suffix = "_" + name
|
|
if name == "default":
|
|
suffix = ""
|
|
orig_srcrev = d.getVar('SRCREV%s' % suffix, False)
|
|
if orig_srcrev:
|
|
f.write('# SRCREV%s = "%s"\n' % (suffix, orig_srcrev))
|
|
f.write('SRCREV%s = "%s"\n' % (suffix, srcrev))
|
|
for name, srcrev in sorted(tag_srcrevs.items()):
|
|
f.write('# tag_%s = "%s"\n' % (name, srcrev))
|
|
if name in old_tag_srcrevs and old_tag_srcrevs[name] != srcrev:
|
|
pkg = d.getVar('PN')
|
|
bb.warn("Revision for tag %s in package %s was changed since last build (from %s to %s)" % (name, pkg, old_tag_srcrevs[name], srcrev))
|
|
|
|
else:
|
|
if os.path.exists(srcrevfile):
|
|
os.remove(srcrevfile)
|
|
|
|
do_testimage[postfuncs] += "write_ptest_result"
|
|
do_testimage[vardepsexclude] += "write_ptest_result"
|
|
|
|
python write_ptest_result() {
|
|
write_latest_ptest_result(d, d.getVar('BUILDHISTORY_DIR'))
|
|
}
|
|
|
|
def write_latest_ptest_result(d, histdir):
|
|
import glob
|
|
import subprocess
|
|
test_log_dir = d.getVar('TEST_LOG_DIR')
|
|
input_ptest = os.path.join(test_log_dir, 'ptest_log')
|
|
output_ptest = os.path.join(histdir, 'ptest')
|
|
if os.path.exists(input_ptest):
|
|
try:
|
|
# Lock it to avoid race issue
|
|
lock = bb.utils.lockfile(output_ptest + "/ptest.lock")
|
|
bb.utils.mkdirhier(output_ptest)
|
|
oe.path.copytree(input_ptest, output_ptest)
|
|
# Sort test result
|
|
for result in glob.glob('%s/pass.fail.*' % output_ptest):
|
|
bb.debug(1, 'Processing %s' % result)
|
|
cmd = ['sort', result, '-o', result]
|
|
bb.debug(1, 'Running %s' % cmd)
|
|
ret = subprocess.call(cmd)
|
|
if ret != 0:
|
|
bb.error('Failed to run %s!' % cmd)
|
|
finally:
|
|
bb.utils.unlockfile(lock)
|