mirror of
git://git.yoctoproject.org/poky.git
synced 2025-07-19 12:59:02 +02:00
base/package: Move source revision information from PV to PKGV
Source control information being present in PV used to be a hard requirement for bitbake to operate correctly. Now that hashes are a required part of task stamps, this requirement no longer exists. This means we can defer the hash pieces to PKGV and simplify PV. Use new bitbake fetcher API to inject the source revisions directly into the hash allowing removal of some horrible code from base.bbclass and avoiding any hardcoding about how SRCREV may or may not be used. Use that API to object the string to append to PKGV and append that directly. The user visible effect of this change is that PV will no longer have revision information in it and this will now be appended to PV through PKGV when the packages are written. Since PV is used in STAMP and WORKDIR, users will see small directory naming and stamp naming changes. This will mean that sstate reuse through hash equivalence where the source revision changes but the output does not will become possible as the sstate naming will become less specific and no longer contain the revision. The SRCPV variable will no longer be needed in PV and is effectively now just a null operation. Usage can be removed over time. (From OE-Core rev: a8e7b0f932b9ea69b3a218fca18041676c65aba0) Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
parent
871a4ac6e7
commit
62afa02d01
|
@ -1,16 +1,4 @@
|
|||
SUMMARY = "Test recipe for fetching git submodules"
|
||||
HOMEPAGE = "https://git.yoctoproject.org/git/matchbox-panel-2"
|
||||
LICENSE = "GPL-2.0-or-later"
|
||||
LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f"
|
||||
require gitunpackoffline.inc
|
||||
|
||||
INHIBIT_DEFAULT_DEPS = "1"
|
||||
|
||||
TAGVALUE = "2.10"
|
||||
|
||||
# Deliberately have a tag which has to be resolved but ensure do_unpack doesn't access the network again.
|
||||
SRC_URI = "git://git.yoctoproject.org/git/matchbox-panel-2;branch=master;protocol=https"
|
||||
SRC_URI:append:gitunpack-enable-recipe = ";tag=${TAGVALUE}"
|
||||
SRCREV = "f82ca3f42510fb3ef10f598b393eb373a2c34ca7"
|
||||
SRCREV:gitunpack-enable-recipe = ""
|
||||
|
||||
S = "${WORKDIR}/git"
|
||||
# Clear the base.bbclass magic srcrev call
|
||||
fetcher_hashes_dummyfunc[vardepvalue] = ""
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
require gitunpackoffline-fail.bb
|
||||
require gitunpackoffline.inc
|
||||
|
||||
TAGVALUE = "2.11"
|
||||
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
SUMMARY = "Test recipe for fetching git submodules"
|
||||
HOMEPAGE = "https://git.yoctoproject.org/git/matchbox-panel-2"
|
||||
LICENSE = "GPL-2.0-or-later"
|
||||
LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f"
|
||||
|
||||
INHIBIT_DEFAULT_DEPS = "1"
|
||||
|
||||
TAGVALUE = "2.10"
|
||||
|
||||
# Deliberately have a tag which has to be resolved but ensure do_unpack doesn't access the network again.
|
||||
SRC_URI = "git://git.yoctoproject.org/git/matchbox-panel-2;branch=master;protocol=https"
|
||||
SRC_URI:append:gitunpack-enable-recipe = ";tag=${TAGVALUE}"
|
||||
SRCREV = "f82ca3f42510fb3ef10f598b393eb373a2c34ca7"
|
||||
SRCREV:gitunpack-enable-recipe = ""
|
||||
|
||||
S = "${WORKDIR}/git"
|
|
@ -126,11 +126,18 @@ def setup_hosttools_dir(dest, toolsvar, d, fatal=True):
|
|||
if notfound and fatal:
|
||||
bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n %s" % " ".join(notfound))
|
||||
|
||||
# We can't use vardepvalue against do_fetch directly since that would overwrite
|
||||
# the other task dependencies so we use an indirect function.
|
||||
python fetcher_hashes_dummyfunc() {
|
||||
return
|
||||
}
|
||||
fetcher_hashes_dummyfunc[vardepvalue] = "${@bb.fetch.get_hashvalue(d)}"
|
||||
|
||||
addtask fetch
|
||||
do_fetch[dirs] = "${DL_DIR}"
|
||||
do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
|
||||
do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
|
||||
do_fetch[vardeps] += "SRCREV"
|
||||
do_fetch[prefuncs] += "fetcher_hashes_dummyfunc"
|
||||
do_fetch[network] = "1"
|
||||
python base_do_fetch() {
|
||||
|
||||
|
@ -606,7 +613,6 @@ python () {
|
|||
bb.debug(1, "Skipping recipe %s because of incompatible license(s): %s" % (pn, ' '.join(incompatible_lic)))
|
||||
raise bb.parse.SkipRecipe("it has incompatible license(s): %s" % ' '.join(incompatible_lic))
|
||||
|
||||
needsrcrev = False
|
||||
srcuri = d.getVar('SRC_URI')
|
||||
for uri_string in srcuri.split():
|
||||
uri = bb.fetch.URI(uri_string)
|
||||
|
@ -619,24 +625,17 @@ python () {
|
|||
|
||||
# Svn packages should DEPEND on subversion-native
|
||||
if uri.scheme == "svn":
|
||||
needsrcrev = True
|
||||
d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot')
|
||||
|
||||
# Git packages should DEPEND on git-native
|
||||
elif uri.scheme in ("git", "gitsm"):
|
||||
needsrcrev = True
|
||||
d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot')
|
||||
|
||||
# Mercurial packages should DEPEND on mercurial-native
|
||||
elif uri.scheme == "hg":
|
||||
needsrcrev = True
|
||||
d.appendVar("EXTRANATIVEPATH", ' python3-native ')
|
||||
d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot')
|
||||
|
||||
# Perforce packages support SRCREV = "${AUTOREV}"
|
||||
elif uri.scheme == "p4":
|
||||
needsrcrev = True
|
||||
|
||||
# OSC packages should DEPEND on osc-native
|
||||
elif uri.scheme == "osc":
|
||||
d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot')
|
||||
|
@ -645,7 +644,6 @@ python () {
|
|||
d.appendVarFlag('do_fetch', 'depends', ' nodejs-native:do_populate_sysroot')
|
||||
|
||||
elif uri.scheme == "repo":
|
||||
needsrcrev = True
|
||||
d.appendVarFlag('do_fetch', 'depends', ' repo-native:do_populate_sysroot')
|
||||
|
||||
# *.lz4 should DEPEND on lz4-native for unpacking
|
||||
|
@ -676,21 +674,6 @@ python () {
|
|||
elif path.endswith('.deb'):
|
||||
d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
|
||||
|
||||
if needsrcrev:
|
||||
d.setVar("SRCPV", "${@bb.fetch2.get_srcrev(d)}")
|
||||
|
||||
# Gather all named SRCREVs to add to the sstate hash calculation
|
||||
# This anonymous python snippet is called multiple times so we
|
||||
# need to be careful to not double up the appends here and cause
|
||||
# the base hash to mismatch the task hash
|
||||
for uri in srcuri.split():
|
||||
parm = bb.fetch.decodeurl(uri)[5]
|
||||
uri_names = parm.get("name", "").split(",")
|
||||
for uri_name in filter(None, uri_names):
|
||||
srcrev_name = "SRCREV_{}".format(uri_name)
|
||||
if srcrev_name not in (d.getVarFlag("do_fetch", "vardeps") or "").split():
|
||||
d.appendVarFlag("do_fetch", "vardeps", " {}".format(srcrev_name))
|
||||
|
||||
set_packagetriplet(d)
|
||||
|
||||
# 'multimachine' handling
|
||||
|
|
|
@ -315,13 +315,21 @@ python package_get_auto_pr() {
|
|||
# Package functions suitable for inclusion in PACKAGEFUNCS
|
||||
#
|
||||
|
||||
python package_convert_pr_autoinc() {
|
||||
python package_setup_pkgv() {
|
||||
pkgv = d.getVar("PKGV")
|
||||
# Expand SRCPV into PKGV if not present
|
||||
srcpv = bb.fetch.get_pkgv_string(d)
|
||||
if srcpv and "+" in pkgv:
|
||||
d.appendVar("PKGV", srcpv)
|
||||
pkgv = d.getVar("PKGV")
|
||||
|
||||
# Adjust pkgv as necessary...
|
||||
if 'AUTOINC' in pkgv:
|
||||
d.setVar("PKGV", pkgv.replace("AUTOINC", "${PRSERV_PV_AUTOINC}"))
|
||||
}
|
||||
|
||||
|
||||
python package_convert_pr_autoinc() {
|
||||
# Change PRSERV_PV_AUTOINC and EXTENDPRAUTO usage to special values
|
||||
d.setVar('PRSERV_PV_AUTOINC', '@PRSERV_PV_AUTOINC@')
|
||||
d.setVar('EXTENDPRAUTO', '@EXTENDPRAUTO@')
|
||||
|
@ -494,6 +502,7 @@ python do_package () {
|
|||
oe.qa.handle_error("var-undefined", msg, d)
|
||||
return
|
||||
|
||||
bb.build.exec_func("package_setup_pkgv", d)
|
||||
bb.build.exec_func("package_convert_pr_autoinc", d)
|
||||
|
||||
# Check for conflict between renamed packages and existing ones
|
||||
|
@ -577,6 +586,7 @@ addtask do_package_setscene
|
|||
# Copy from PKGDESTWORK to tempdirectory as tempdirectory can be cleaned at both
|
||||
# do_package_setscene and do_packagedata_setscene leading to races
|
||||
python do_packagedata () {
|
||||
bb.build.exec_func("package_setup_pkgv", d)
|
||||
bb.build.exec_func("package_get_auto_pr", d)
|
||||
|
||||
src = d.expand("${PKGDESTWORK}")
|
||||
|
|
|
@ -63,6 +63,7 @@ python () {
|
|||
else:
|
||||
d.setVar('B', '${WORKDIR}/${BPN}-${PV}')
|
||||
|
||||
bb.fetch.get_hashvalue(d)
|
||||
local_srcuri = []
|
||||
fetch = bb.fetch2.Fetch((d.getVar('SRC_URI') or '').split(), d)
|
||||
for url in fetch.urls:
|
||||
|
@ -126,6 +127,9 @@ python () {
|
|||
d.setVarFlag('do_compile', 'file-checksums', '${@srctree_hash_files(d)}')
|
||||
d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}')
|
||||
|
||||
d.appendVarFlag('do_compile', 'prefuncs', ' fetcher_hashes_dummyfunc')
|
||||
d.appendVarFlag('do_configure', 'prefuncs', ' fetcher_hashes_dummyfunc')
|
||||
|
||||
# We don't want the workdir to go away
|
||||
d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN'))
|
||||
|
||||
|
|
|
@ -735,10 +735,7 @@ SRC_URI[vardepsexclude] += "\
|
|||
SRCDATE = "${DATE}"
|
||||
SRCREV ??= "INVALID"
|
||||
AUTOREV = "${@bb.fetch2.get_autorev(d)}"
|
||||
AUTOREV[vardepvalue] = "${SRCPV}"
|
||||
# Set Dynamically in base.bbclass
|
||||
# SRCPV = "${@bb.fetch2.get_srcrev(d)}"
|
||||
SRCPV[vardepvalue] = "${SRCPV}"
|
||||
SRCPV = ""
|
||||
|
||||
SRC_URI = ""
|
||||
|
||||
|
|
|
@ -103,7 +103,7 @@ class Dependencies(OESelftestTestCase):
|
|||
|
||||
r = """
|
||||
LICENSE="CLOSED"
|
||||
SRC_URI="git://example.com/repo;branch=master"
|
||||
SRC_URI="git://example.com/repo;branch=master;rev=ffffffffffffffffffffffffffffffffffffffff"
|
||||
"""
|
||||
f = self.write_recipe(textwrap.dedent(r), tempdir)
|
||||
d = tinfoil.parse_recipe_file(f)
|
||||
|
|
Loading…
Reference in New Issue
Block a user