mirror of
git://git.yoctoproject.org/yocto-autobuilder-helper.git
synced 2025-07-19 20:59:02 +02:00
scripts: use argparse for command line parsing
[RP: Minor tweaks made to adpat to buildbot and option naming Fixed shared-repo-unpack for publishing when no cache-dir supplied] Signed-off-by: Paul Eggleton <paul.eggleton@linux.intel.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
parent
c41c2e0a5a
commit
d7d1a2b345
|
@ -2,9 +2,6 @@
|
||||||
#
|
#
|
||||||
# Move the repositories into the correct layout and generate bblayers.conf
|
# Move the repositories into the correct layout and generate bblayers.conf
|
||||||
#
|
#
|
||||||
# Called with $1 - The autobuilder working directory
|
|
||||||
# $2 - The target to filter the repos to
|
|
||||||
#
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
@ -14,22 +11,24 @@ import errno
|
||||||
|
|
||||||
import utils
|
import utils
|
||||||
|
|
||||||
if len(sys.argv) != 3:
|
|
||||||
print("Incorrect number of parameters, please call as %s <autobuilder-workdir> <target>" % sys.argv[0])
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
targetdir = sys.argv[1]
|
parser = utils.ArgParser(description='Moves the repositories into the correct layout and generates bblayers.conf.')
|
||||||
target = sys.argv[2]
|
|
||||||
targetbuilddir = targetdir
|
parser.add_argument('abworkdir',
|
||||||
|
help="The autobuilder working directory")
|
||||||
|
parser.add_argument('target',
|
||||||
|
help="The target to filter the repos to")
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
ourconfig = utils.loadconfig()
|
ourconfig = utils.loadconfig()
|
||||||
|
|
||||||
def bitbakecmd(targetbuilddir, cmd):
|
def bitbakecmd(targetdir, cmd):
|
||||||
ret = subprocess.call(". ./oe-init-build-env; %s" % cmd, shell=True, cwd=targetbuilddir)
|
ret = subprocess.call(". ./oe-init-build-env; %s" % cmd, shell=True, cwd=targetdir)
|
||||||
if ret:
|
if ret:
|
||||||
utils.printheader("ERROR: Command %s failed with exit code %d, see errors above." % (cmd, ret))
|
utils.printheader("ERROR: Command %s failed with exit code %d, see errors above." % (cmd, ret))
|
||||||
|
|
||||||
needrepos = utils.getconfigvar("NEEDREPOS", ourconfig, target, None)
|
needrepos = utils.getconfigvar("NEEDREPOS", ourconfig, args.target, None)
|
||||||
|
|
||||||
callinit = False
|
callinit = False
|
||||||
|
|
||||||
|
@ -42,15 +41,15 @@ for repo in needrepos:
|
||||||
callinit = True
|
callinit = True
|
||||||
if "checkout-dirname" in repos[repo]:
|
if "checkout-dirname" in repos[repo]:
|
||||||
checkdir = repos[repo]["checkout-dirname"]
|
checkdir = repos[repo]["checkout-dirname"]
|
||||||
utils.mkdir(targetbuilddir + "/" + checkdir)
|
utils.mkdir(args.abworkdir + "/" + checkdir)
|
||||||
for f in os.listdir(targetdir + "/repos/" + repo):
|
for f in os.listdir(args.abworkdir + "/repos/" + repo):
|
||||||
subprocess.check_call(['mv', targetdir + "/repos/" + repo + "/" + f, targetbuilddir + "/" + checkdir + "/"])
|
subprocess.check_call(['mv', args.abworkdir + "/repos/" + repo + "/" + f, args.abworkdir + "/" + checkdir + "/"])
|
||||||
|
|
||||||
if callinit:
|
if callinit:
|
||||||
subprocess.check_call(". ./oe-init-build-env", shell=True, cwd=targetbuilddir)
|
subprocess.check_call(". ./oe-init-build-env", shell=True, cwd=args.abworkdir)
|
||||||
|
|
||||||
for repo in needrepos:
|
for repo in needrepos:
|
||||||
if repo in repos and "no-layer-add" in repos[repo] and repos[repo]["no-layer-add"]:
|
if repo in repos and "no-layer-add" in repos[repo] and repos[repo]["no-layer-add"]:
|
||||||
continue
|
continue
|
||||||
bitbakecmd(targetbuilddir, "bitbake-layers add-layer %s" % (targetbuilddir + "/" + repo))
|
bitbakecmd(args.abworkdir, "bitbake-layers add-layer %s" % (args.abworkdir + "/" + repo))
|
||||||
|
|
||||||
|
|
|
@ -2,10 +2,6 @@
|
||||||
#
|
#
|
||||||
# Iterate over a set of repositories in a json file and setup a shared directory containing them
|
# Iterate over a set of repositories in a json file and setup a shared directory containing them
|
||||||
#
|
#
|
||||||
# Called with $1 - The json file containing the repositories to use
|
|
||||||
# $2 - The shared directory where the repos are to be transferred
|
|
||||||
# $3 - Directory to publish artefacts to
|
|
||||||
#
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
@ -15,25 +11,28 @@ import errno
|
||||||
|
|
||||||
import utils
|
import utils
|
||||||
|
|
||||||
if len(sys.argv) != 4:
|
|
||||||
print("Incorrect number of parameters, please call as %s <repo.json> <shared-sources-dir> <publish-dir>" % sys.argv[0])
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
repojson = sys.argv[1]
|
parser = utils.ArgParser(description='Iterates over a set of repositories in a json file and sets up a shared directory containing them.')
|
||||||
shared = sys.argv[2]
|
|
||||||
publish = None
|
parser.add_argument('repojson',
|
||||||
if sys.argv[3] != "None":
|
help="The json file containing the repositories to use")
|
||||||
publish = sys.argv[3]
|
parser.add_argument('sharedsrcdir',
|
||||||
|
help="The shared directory where the repos are to be transferred")
|
||||||
|
parser.add_argument('-p', '--publish-dir',
|
||||||
|
action='store',
|
||||||
|
help="Where to publish artefacts to (optional)")
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
ourconfig = utils.loadconfig()
|
ourconfig = utils.loadconfig()
|
||||||
|
|
||||||
with open(repojson) as f:
|
with open(args.repojson) as f:
|
||||||
repos = json.load(f)
|
repos = json.load(f)
|
||||||
|
|
||||||
stashdir = utils.getconfig("REPO_STASH_DIR", ourconfig)
|
stashdir = utils.getconfig("REPO_STASH_DIR", ourconfig)
|
||||||
|
|
||||||
for repo in sorted(repos.keys()):
|
for repo in sorted(repos.keys()):
|
||||||
utils.printheader("Intially fetching repo %s" % repo)
|
utils.printheader("Intially fetching repo %s" % repo)
|
||||||
utils.fetchgitrepo(shared, repo, repos[repo], stashdir)
|
utils.fetchgitrepo(args.sharedsrcdir, repo, repos[repo], stashdir)
|
||||||
if publish:
|
if args.publish_dir:
|
||||||
utils.publishrepo(shared, repo, publish)
|
utils.publishrepo(args.sharedsrcdir, repo, args.publish_dir)
|
||||||
|
|
|
@ -2,16 +2,6 @@
|
||||||
#
|
#
|
||||||
# Iterate over a set of configurations from json.conf, calling setup-config for each one, then running the build.
|
# Iterate over a set of configurations from json.conf, calling setup-config for each one, then running the build.
|
||||||
#
|
#
|
||||||
# Called with $1 - The 'nightly' target the autobuilder is running
|
|
||||||
# $2 - The target build directory to configure
|
|
||||||
# $3 - The poky branch name the build is running on
|
|
||||||
# $4 - The name of the repository the build is running on
|
|
||||||
# $5 - The directory to publish sstate into
|
|
||||||
# $6 - A build-appliance SRCREV to use
|
|
||||||
# $7 - Where to publish artefacts to (or None)
|
|
||||||
# $8 - URL back to this build (for the error reporting system)
|
|
||||||
# $9 - Disable echoing logs to stdout (if specified)
|
|
||||||
#
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
@ -21,44 +11,55 @@ import errno
|
||||||
|
|
||||||
import utils
|
import utils
|
||||||
|
|
||||||
if len(sys.argv) != 9 and len(sys.argv) != 10:
|
parser = utils.ArgParser(description='Runs configurations in json.conf.')
|
||||||
print("Incorrect number of parameters, please call as %s <nightly-target> <target-builddir> <branch-name> <repo-name> <sstate-publish-dir> <build-app-srcrev> <publish-dir> <error-report-url>" % sys.argv[0])
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
target = sys.argv[1]
|
parser.add_argument('target',
|
||||||
builddir = sys.argv[2]
|
help="The 'nightly' target the autobuilder is running")
|
||||||
branchname = sys.argv[3]
|
parser.add_argument('builddir',
|
||||||
reponame = sys.argv[4]
|
help="The target build directory to configure")
|
||||||
sstate_release = sys.argv[5]
|
parser.add_argument('branchname',
|
||||||
buildappsrcrev = sys.argv[6]
|
help="The poky branch name the build is running on")
|
||||||
publish = None
|
parser.add_argument('reponame',
|
||||||
if sys.argv[7] != "None":
|
help="The name of the repository the build is running on")
|
||||||
publish = sys.argv[7]
|
parser.add_argument('-s', '--sstateprefix',
|
||||||
errorurl = None
|
default='',
|
||||||
if sys.argv[8] != "None":
|
help="The directory prefix to publish sstate into")
|
||||||
errorurl = sys.argv[8]
|
parser.add_argument('-b', '--buildappsrcrev',
|
||||||
echologs = True
|
default='',
|
||||||
if len(sys.argv) == 10:
|
help="A build appliance SRCREV to use")
|
||||||
echologs = False
|
parser.add_argument('-p', '--publish-dir',
|
||||||
|
action='store',
|
||||||
|
help="Where to publish artefacts to (optional)")
|
||||||
|
parser.add_argument('-u', '--build-url',
|
||||||
|
action='store',
|
||||||
|
help="URL back to this build (for the error reporting system)")
|
||||||
|
parser.add_argument('-t', '--test',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help="Test mode - perform setup and dry-run of commands only")
|
||||||
|
parser.add_argument('-q', '--quietlogging',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help="Quiet mode - don't echo bitbake logs to stdout")
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
scriptsdir = os.path.dirname(os.path.realpath(__file__))
|
scriptsdir = os.path.dirname(os.path.realpath(__file__))
|
||||||
ourconfig = utils.loadconfig()
|
ourconfig = utils.loadconfig()
|
||||||
|
|
||||||
testmode = False
|
testmode = args.test
|
||||||
if "ABHELPERTEST" in os.environ:
|
|
||||||
testmode = True
|
|
||||||
|
|
||||||
# Find out the number of steps this target has
|
# Find out the number of steps this target has
|
||||||
maxsteps = 1
|
maxsteps = 1
|
||||||
if target in ourconfig['overrides']:
|
if args.target in ourconfig['overrides']:
|
||||||
for v in ourconfig['overrides'][target]:
|
for v in ourconfig['overrides'][args.target]:
|
||||||
if v.startswith("step"):
|
if v.startswith("step"):
|
||||||
n = int(v[4:])
|
n = int(v[4:])
|
||||||
if n <= maxsteps:
|
if n <= maxsteps:
|
||||||
continue
|
continue
|
||||||
maxsteps = n
|
maxsteps = n
|
||||||
|
|
||||||
utils.printheader("Target task %s has %d steps" % (target, maxsteps))
|
utils.printheader("Target task %s has %d steps" % (args.target, maxsteps))
|
||||||
|
|
||||||
finalret = 0
|
finalret = 0
|
||||||
|
|
||||||
|
@ -73,8 +74,8 @@ def logname(path):
|
||||||
return path + "/command.log.%s" % lognum
|
return path + "/command.log.%s" % lognum
|
||||||
|
|
||||||
revision = "unknown"
|
revision = "unknown"
|
||||||
report = utils.ErrorReport(ourconfig, target, builddir, branchname, revision)
|
report = utils.ErrorReport(ourconfig, args.target, args.builddir, args.branchname, revision)
|
||||||
errordir = utils.errorreportdir(builddir)
|
errordir = utils.errorreportdir(args.builddir)
|
||||||
utils.mkdir(errordir)
|
utils.mkdir(errordir)
|
||||||
|
|
||||||
|
|
||||||
|
@ -100,7 +101,7 @@ def bitbakecmd(builddir, cmd, report, stepnum, oeenv=True):
|
||||||
|
|
||||||
with subprocess.Popen(cmd, shell=True, cwd=builddir + "/..", stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=1) as p, open(log, 'ab') as f:
|
with subprocess.Popen(cmd, shell=True, cwd=builddir + "/..", stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=1) as p, open(log, 'ab') as f:
|
||||||
for line in p.stdout:
|
for line in p.stdout:
|
||||||
if echologs:
|
if not args.quietlogging:
|
||||||
sys.stdout.buffer.write(line)
|
sys.stdout.buffer.write(line)
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
f.write(line)
|
f.write(line)
|
||||||
|
@ -124,54 +125,54 @@ def runcmd(cmd, *args, **kwargs):
|
||||||
return
|
return
|
||||||
subprocess.check_call(cmd, *args, **kwargs)
|
subprocess.check_call(cmd, *args, **kwargs)
|
||||||
|
|
||||||
bh_path, remoterepo, remotebranch, baseremotebranch = utils.getbuildhistoryconfig(ourconfig, builddir, target, reponame, branchname)
|
bh_path, remoterepo, remotebranch, baseremotebranch = utils.getbuildhistoryconfig(ourconfig, args.builddir, args.target, args.reponame, args.branchname)
|
||||||
if bh_path:
|
if bh_path:
|
||||||
runcmd([os.path.join(scriptsdir, "buildhistory-init"), bh_path, remoterepo, remotebranch, baseremotebranch])
|
runcmd([os.path.join(scriptsdir, "buildhistory-init"), bh_path, remoterepo, remotebranch, baseremotebranch])
|
||||||
|
|
||||||
for stepnum in range(1, maxsteps + 1):
|
for stepnum in range(1, maxsteps + 1):
|
||||||
# Add any layers specified
|
# Add any layers specified
|
||||||
layers = utils.getconfiglist("ADDLAYER", ourconfig, target, stepnum)
|
layers = utils.getconfiglist("ADDLAYER", ourconfig, args.target, stepnum)
|
||||||
for layer in layers:
|
for layer in layers:
|
||||||
bitbakecmd(builddir, "bitbake-layers add-layer %s" % layer, report, stepnum)
|
bitbakecmd(args.builddir, "bitbake-layers add-layer %s" % layer, report, stepnum)
|
||||||
|
|
||||||
flush()
|
flush()
|
||||||
# Generate the configuration files needed for this step
|
# Generate the configuration files needed for this step
|
||||||
if utils.getconfigvar("WRITECONFIG", ourconfig, target, stepnum):
|
if utils.getconfigvar("WRITECONFIG", ourconfig, args.target, stepnum):
|
||||||
runcmd([scriptsdir + "/setup-config", target, str(stepnum - 1), builddir, branchname, reponame, sstate_release, buildappsrcrev])
|
runcmd([scriptsdir + "/setup-config", args.target, str(stepnum - 1), args.builddir, args.branchname, args.reponame, "-s", args.sstateprefix, "-b", args.buildappsrcrev])
|
||||||
|
|
||||||
# Execute the targets for this configuration
|
# Execute the targets for this configuration
|
||||||
targets = utils.getconfigvar("BBTARGETS", ourconfig, target, stepnum)
|
targets = utils.getconfigvar("BBTARGETS", ourconfig, args.target, stepnum)
|
||||||
if targets:
|
if targets:
|
||||||
utils.printheader("Step %s/%s: Running bitbake %s" % (stepnum, maxsteps, targets))
|
utils.printheader("Step %s/%s: Running bitbake %s" % (stepnum, maxsteps, targets))
|
||||||
bitbakecmd(builddir, "bitbake %s" % targets, report, stepnum)
|
bitbakecmd(args.builddir, "bitbake %s" % targets, report, stepnum)
|
||||||
|
|
||||||
# Execute the sanity targets for this configuration
|
# Execute the sanity targets for this configuration
|
||||||
sanitytargets = utils.getconfigvar("SANITYTARGETS", ourconfig, target, stepnum)
|
sanitytargets = utils.getconfigvar("SANITYTARGETS", ourconfig, args.target, stepnum)
|
||||||
if sanitytargets:
|
if sanitytargets:
|
||||||
utils.printheader("Step %s/%s: Running bitbake %s" % (stepnum, maxsteps, sanitytargets))
|
utils.printheader("Step %s/%s: Running bitbake %s" % (stepnum, maxsteps, sanitytargets))
|
||||||
bitbakecmd(builddir, "%s/checkvnc; DISPLAY=:1 bitbake %s" % (scriptsdir, sanitytargets), report, stepnum)
|
bitbakecmd(args.builddir, "%s/checkvnc; DISPLAY=:1 bitbake %s" % (scriptsdir, sanitytargets), report, stepnum)
|
||||||
|
|
||||||
# Run any extra commands specified
|
# Run any extra commands specified
|
||||||
cmds = utils.getconfiglist("EXTRACMDS", ourconfig, target, stepnum)
|
cmds = utils.getconfiglist("EXTRACMDS", ourconfig, args.target, stepnum)
|
||||||
for cmd in cmds:
|
for cmd in cmds:
|
||||||
utils.printheader("Step %s/%s: Running command %s" % (stepnum, maxsteps, cmd))
|
utils.printheader("Step %s/%s: Running command %s" % (stepnum, maxsteps, cmd))
|
||||||
bitbakecmd(builddir, cmd, report, stepnum)
|
bitbakecmd(args.builddir, cmd, report, stepnum)
|
||||||
cmds = utils.getconfiglist("EXTRAPLAINCMDS", ourconfig, target, stepnum)
|
cmds = utils.getconfiglist("EXTRAPLAINCMDS", ourconfig, args.target, stepnum)
|
||||||
for cmd in cmds:
|
for cmd in cmds:
|
||||||
utils.printheader("Step %s/%s: Running 'plain' command %s" % (stepnum, maxsteps, cmd))
|
utils.printheader("Step %s/%s: Running 'plain' command %s" % (stepnum, maxsteps, cmd))
|
||||||
bitbakecmd(builddir, cmd, report, stepnum, oeenv=False)
|
bitbakecmd(args.builddir, cmd, report, stepnum, oeenv=False)
|
||||||
|
|
||||||
# Remove any layers we added in a reverse order
|
# Remove any layers we added in a reverse order
|
||||||
for layer in reversed(layers):
|
for layer in reversed(layers):
|
||||||
bitbakecmd(builddir, "bitbake-layers remove-layer %s" % layer, report, stepnum)
|
bitbakecmd(args.builddir, "bitbake-layers remove-layer %s" % layer, report, stepnum)
|
||||||
|
|
||||||
if publish:
|
if args.publish_dir:
|
||||||
utils.printheader("Running publish artefacts")
|
utils.printheader("Running publish artefacts")
|
||||||
runcmd([scriptsdir + "/publish-artefacts", builddir, publish, target])
|
runcmd([scriptsdir + "/publish-artefacts", args.builddir, args.publish_dir, args.target])
|
||||||
|
|
||||||
if errorurl and utils.getconfigvar("SENDERRORS", ourconfig, target, stepnum):
|
if args.build_url and utils.getconfigvar("SENDERRORS", ourconfig, args.target, stepnum):
|
||||||
utils.printheader("Sending any error reports")
|
utils.printheader("Sending any error reports")
|
||||||
runcmd([scriptsdir + "/upload-error-reports", builddir, errorurl])
|
runcmd([scriptsdir + "/upload-error-reports", args.builddir, args.build_url])
|
||||||
|
|
||||||
if finalret:
|
if finalret:
|
||||||
utils.printheader("There were %s failures" % finalret)
|
utils.printheader("There were %s failures" % finalret)
|
||||||
|
|
|
@ -1,11 +1,6 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
#
|
#
|
||||||
# Iterate over a set of configurations from json.conf, calling setup-config for each one, then running the build.
|
# Send email about the build to prompt QA to begin testing
|
||||||
#
|
|
||||||
# Called with $1 - The json file containing the repositories to use
|
|
||||||
# $2 - Where the artefacts were published
|
|
||||||
# $3 - The build/release 'name' for release purposes
|
|
||||||
# $4 - The shared repos directory (to resolve the repo revision hashes)
|
|
||||||
#
|
#
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
@ -16,30 +11,38 @@ import errno
|
||||||
|
|
||||||
import utils
|
import utils
|
||||||
|
|
||||||
if len(sys.argv) != 6:
|
|
||||||
print("Incorrect number of parameters, please call as %s <send-email> <repojson> <publish-dir> <release-name> <sharedrepodir>" % sys.argv[0])
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
send = sys.argv[1]
|
parser = utils.ArgParser(description='Sends an email about the build to prompt QA to begin testing.')
|
||||||
repojson = sys.argv[2]
|
|
||||||
publish = sys.argv[3]
|
|
||||||
rel_name = sys.argv[4]
|
|
||||||
repodir = sys.argv[5]
|
|
||||||
|
|
||||||
if send != "True" or publish == "None" or rel_name == "None":
|
parser.add_argument('send',
|
||||||
|
help="True to send email, otherwise the script will display a message and exit")
|
||||||
|
parser.add_argument('repojson',
|
||||||
|
help="The json file containing the repositories to use")
|
||||||
|
parser.add_argument('sharedrepodir',
|
||||||
|
help="The shared repos directory (to resolve the repo revision hashes)")
|
||||||
|
parser.add_argument('-p', '--publish-dir',
|
||||||
|
action='store',
|
||||||
|
help="Where the artefacts were published")
|
||||||
|
parser.add_argument('-r', '--release',
|
||||||
|
action='store',
|
||||||
|
help="The build/release 'name' for release purposes (optional)")
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if args.send.lower() != 'true' or not args.publish_dir or not args.release:
|
||||||
utils.printheader("Not sending QA email")
|
utils.printheader("Not sending QA email")
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
scriptsdir = os.path.dirname(os.path.realpath(__file__))
|
scriptsdir = os.path.dirname(os.path.realpath(__file__))
|
||||||
ourconfig = utils.loadconfig()
|
ourconfig = utils.loadconfig()
|
||||||
|
|
||||||
with open(repojson) as f:
|
with open(args.repojson) as f:
|
||||||
repos = json.load(f)
|
repos = json.load(f)
|
||||||
|
|
||||||
buildhashes = ""
|
buildhashes = ""
|
||||||
for repo in sorted(repos.keys()):
|
for repo in sorted(repos.keys()):
|
||||||
# Need the finalised revisions (not 'HEAD')
|
# Need the finalised revisions (not 'HEAD')
|
||||||
targetrepodir = "%s/%s" % (repodir, repo)
|
targetrepodir = "%s/%s" % (args.sharedrepodir, repo)
|
||||||
revision = subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=targetrepodir).decode('utf-8').strip()
|
revision = subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=targetrepodir).decode('utf-8').strip()
|
||||||
buildhashes += "%s: %s\n" % (repo, revision)
|
buildhashes += "%s: %s\n" % (repo, revision)
|
||||||
|
|
||||||
|
@ -57,7 +60,7 @@ mailbcc = utils.getconfig("QAMAIL_BCC", ourconfig)
|
||||||
if mailbcc:
|
if mailbcc:
|
||||||
email += "Bcc: " + mailbcc + "\n"
|
email += "Bcc: " + mailbcc + "\n"
|
||||||
|
|
||||||
email += "Subject: " + "QA notification for completed autobuilder build (%s)\n" % rel_name
|
email += "Subject: " + "QA notification for completed autobuilder build (%s)\n" % args.release
|
||||||
email += '''\n
|
email += '''\n
|
||||||
A build flagged for QA (%s) was completed on the autobuilder and is available at:\n\n
|
A build flagged for QA (%s) was completed on the autobuilder and is available at:\n\n
|
||||||
%s\n\n
|
%s\n\n
|
||||||
|
@ -66,7 +69,7 @@ Build hash information: \n
|
||||||
|
|
||||||
\nThis is an automated message from the Yocto Project Autobuilder\nGit: git://git.yoctoproject.org/yocto-autobuilder2\nEmail: richard.purdie@linuxfoundation.org\n
|
\nThis is an automated message from the Yocto Project Autobuilder\nGit: git://git.yoctoproject.org/yocto-autobuilder2\nEmail: richard.purdie@linuxfoundation.org\n
|
||||||
|
|
||||||
''' % (rel_name, publish.replace(web_root, web_url), buildhashes)
|
''' % (args.release, args.publish_dir.replace(web_root, web_url), buildhashes)
|
||||||
|
|
||||||
utils.printheader("Sending QA email")
|
utils.printheader("Sending QA email")
|
||||||
subprocess.check_call('echo "' + email +' " | sendmail -t', shell=True)
|
subprocess.check_call('echo "' + email +' " | sendmail -t', shell=True)
|
||||||
|
|
|
@ -2,14 +2,6 @@
|
||||||
#
|
#
|
||||||
# Generate an auto.conf and associated other config files for a given autobuilder configuration
|
# Generate an auto.conf and associated other config files for a given autobuilder configuration
|
||||||
#
|
#
|
||||||
# Called with $1 - The 'nightly' target the autobuilder is running
|
|
||||||
# $2 - The autobuilder step number (a given target can run multiple steps with different configurations)
|
|
||||||
# $3 - The target build directory to configure
|
|
||||||
# $4 - The poky branch name the build is running on
|
|
||||||
# $5 - The name of the repository the build is running on
|
|
||||||
# $6 - The directory to publish sstate into
|
|
||||||
# $7 - A build-appliance SRCREV to use
|
|
||||||
#
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
@ -17,44 +9,57 @@ import sys
|
||||||
import subprocess
|
import subprocess
|
||||||
import errno
|
import errno
|
||||||
import copy
|
import copy
|
||||||
|
|
||||||
import utils
|
import utils
|
||||||
|
|
||||||
if len(sys.argv) != 8:
|
|
||||||
print("Incorrect number of parameters, please call as %s <nightly-target> <stepnumber> <target-builddir> <branch-name> <repo-name> <sstate-publish-dir> <build-app-srcrev>" % sys.argv[0])
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
target = sys.argv[1]
|
parser = utils.ArgParser(description='Generates an auto.conf and associated other config files for a given autobuilder configuration.')
|
||||||
stepnum = int(sys.argv[2]) + 1 # Our step numbering is 1 2 3 etc., not 0 of buildbot
|
|
||||||
builddir = sys.argv[3]
|
parser.add_argument('target',
|
||||||
branchname = sys.argv[4]
|
help="The 'nightly' target the autobuilder is running")
|
||||||
reponame = sys.argv[5]
|
parser.add_argument('stepnumber',
|
||||||
sstate_release = sys.argv[6]
|
help="The autobuilder step number (a given target can run multiple steps with different configurations)")
|
||||||
buildappsrcrev = sys.argv[7]
|
parser.add_argument('builddir',
|
||||||
|
help="The target build directory to configure")
|
||||||
|
parser.add_argument('branchname',
|
||||||
|
help="The poky branch name the build is running on")
|
||||||
|
parser.add_argument('reponame',
|
||||||
|
help="The name of the repository the build is running on")
|
||||||
|
parser.add_argument('-s', '--sstateprefix',
|
||||||
|
default='',
|
||||||
|
help="The directory prefix to publish sstate into")
|
||||||
|
parser.add_argument('-b', '--buildappsrcrev',
|
||||||
|
default='',
|
||||||
|
help="A build appliance SRCREV to use")
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
stepnum = int(args.stepnumber) + 1 # Our step numbering is 1 2 3 etc., not 0 of buildbot
|
||||||
|
|
||||||
ourconfig = utils.loadconfig()
|
ourconfig = utils.loadconfig()
|
||||||
|
|
||||||
variables = []
|
variables = []
|
||||||
|
|
||||||
autoconf = os.path.join(builddir, "conf", "auto.conf")
|
autoconf = os.path.join(args.builddir, "conf", "auto.conf")
|
||||||
if os.path.exists(autoconf):
|
if os.path.exists(autoconf):
|
||||||
os.remove(autoconf)
|
os.remove(autoconf)
|
||||||
|
|
||||||
# Ensure autoconf's directory exists
|
# Ensure autoconf's directory exists
|
||||||
utils.mkdir(os.path.dirname(autoconf))
|
utils.mkdir(os.path.dirname(autoconf))
|
||||||
|
|
||||||
sdkextraconf = os.path.join(builddir, "conf", "sdk-extra.conf")
|
sdkextraconf = os.path.join(args.builddir, "conf", "sdk-extra.conf")
|
||||||
if os.path.exists(sdkextraconf):
|
if os.path.exists(sdkextraconf):
|
||||||
os.remove(sdkextraconf)
|
os.remove(sdkextraconf)
|
||||||
|
|
||||||
for v in ["MACHINE", "DISTRO", "SDKMACHINE"]:
|
for v in ["MACHINE", "DISTRO", "SDKMACHINE"]:
|
||||||
value = utils.getconfigvar(v, ourconfig, target, stepnum)
|
value = utils.getconfigvar(v, ourconfig, args.target, stepnum)
|
||||||
if value and value != "None":
|
if value and value != "None":
|
||||||
variables.append(v + ' = "%s"' % value)
|
variables.append(v + ' = "%s"' % value)
|
||||||
|
|
||||||
distro = utils.getconfigvar("DISTRO", ourconfig, target, stepnum)
|
distro = utils.getconfigvar("DISTRO", ourconfig, args.target, stepnum)
|
||||||
|
|
||||||
for v in ["DLDIR", "PRSERV"]:
|
for v in ["DLDIR", "PRSERV"]:
|
||||||
value = utils.getconfigvar(v, ourconfig, target, stepnum)
|
value = utils.getconfigvar(v, ourconfig, args.target, stepnum)
|
||||||
if value:
|
if value:
|
||||||
variables.append(value)
|
variables.append(value)
|
||||||
|
|
||||||
|
@ -63,30 +68,30 @@ for v in ["DLDIR", "PRSERV"]:
|
||||||
# have a directory of symlinks to sstate objects
|
# have a directory of symlinks to sstate objects
|
||||||
# that can be published for the release
|
# that can be published for the release
|
||||||
key = "SSTATEDIR"
|
key = "SSTATEDIR"
|
||||||
if sstate_release != "None":
|
if args.sstateprefix:
|
||||||
key = "SSTATEDIR_RELEASE"
|
key = "SSTATEDIR_RELEASE"
|
||||||
value = utils.getconfiglist(key, ourconfig, target, stepnum)
|
value = utils.getconfiglist(key, ourconfig, args.target, stepnum)
|
||||||
for v in value:
|
for v in value:
|
||||||
v = v.replace("@RELEASENUM@", sstate_release)
|
v = v.replace("@RELEASENUM@", args.sstateprefix)
|
||||||
variables.append(v)
|
variables.append(v)
|
||||||
|
|
||||||
if buildappsrcrev != "None" and buildappsrcrev != "DEFAULT":
|
if args.buildappsrcrev and args.buildappsrcrev != "DEFAULT":
|
||||||
if buildappsrcrev == "AUTOREV":
|
if args.buildappsrcrev == "AUTOREV":
|
||||||
buildappsrcrev = "${AUTOREV}"
|
args.buildappsrcrev = "${AUTOREV}"
|
||||||
value = utils.getconfiglist("BUILDAPP_SRCREV", ourconfig, target, stepnum)
|
value = utils.getconfiglist("BUILDAPP_SRCREV", ourconfig, args.target, stepnum)
|
||||||
for v in value:
|
for v in value:
|
||||||
v = v.replace("@SRCREV@", buildappsrcrev)
|
v = v.replace("@SRCREV@", args.buildappsrcrev)
|
||||||
variables.append(v)
|
variables.append(v)
|
||||||
|
|
||||||
if utils.getconfigvar("BUILDINFO", ourconfig, target, stepnum):
|
if utils.getconfigvar("BUILDINFO", ourconfig, args.target, stepnum):
|
||||||
infovars = utils.getconfiglist("BUILDINFOVARS", ourconfig, target, stepnum)
|
infovars = utils.getconfiglist("BUILDINFOVARS", ourconfig, args.target, stepnum)
|
||||||
variables.extend(infovars)
|
variables.extend(infovars)
|
||||||
|
|
||||||
extravars = utils.getconfiglist("extravars", ourconfig, target, stepnum)
|
extravars = utils.getconfiglist("extravars", ourconfig, args.target, stepnum)
|
||||||
if extravars:
|
if extravars:
|
||||||
variables.extend(extravars)
|
variables.extend(extravars)
|
||||||
|
|
||||||
bh_path, remoterepo, remotebranch, baseremotebranch = utils.getbuildhistoryconfig(ourconfig, builddir, target, reponame, branchname)
|
bh_path, remoterepo, remotebranch, baseremotebranch = utils.getbuildhistoryconfig(ourconfig, args.builddir, args.target, args.reponame, args.branchname)
|
||||||
if bh_path:
|
if bh_path:
|
||||||
variables.append('INHERIT += "buildhistory"')
|
variables.append('INHERIT += "buildhistory"')
|
||||||
variables.append('BUILDHISTORY_DIR = "%s"' % bh_path)
|
variables.append('BUILDHISTORY_DIR = "%s"' % bh_path)
|
||||||
|
@ -105,10 +110,10 @@ with open(autoconf, "w") as f:
|
||||||
|
|
||||||
utils.printheader("Writing %s with contents:" % sdkextraconf)
|
utils.printheader("Writing %s with contents:" % sdkextraconf)
|
||||||
with open(sdkextraconf, "w") as f:
|
with open(sdkextraconf, "w") as f:
|
||||||
for v in utils.getconfiglist("SDKEXTRAS", ourconfig, target, stepnum):
|
for v in utils.getconfiglist("SDKEXTRAS", ourconfig, args.target, stepnum):
|
||||||
replace = ""
|
replace = ""
|
||||||
if sstate_release != "None":
|
if args.sstateprefix:
|
||||||
replace = sstate_release + "/"
|
replace = args.sstateprefix + "/"
|
||||||
v = v.replace("@RELEASENUM@", replace)
|
v = v.replace("@RELEASENUM@", replace)
|
||||||
print(" " + v)
|
print(" " + v)
|
||||||
f.write(v + "\n")
|
f.write(v + "\n")
|
||||||
|
|
|
@ -2,12 +2,6 @@
|
||||||
#
|
#
|
||||||
# Unpack a shared directory of repos to the autobuilder working directory
|
# Unpack a shared directory of repos to the autobuilder working directory
|
||||||
#
|
#
|
||||||
# Called with $1 - The json file containing the repositories to use
|
|
||||||
# $2 - The shared directory where the repos are to be transferred from (can be 'None')
|
|
||||||
# $3 - The autobuilder working directory
|
|
||||||
# $4 - The target to filter the repos to
|
|
||||||
# $5 - Directory to publish artefacts to
|
|
||||||
#
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
@ -19,43 +13,49 @@ import random
|
||||||
|
|
||||||
import utils
|
import utils
|
||||||
|
|
||||||
if len(sys.argv) != 6:
|
|
||||||
print("Incorrect number of parameters, please call as %s repo.json <shared-sources-dir> <autobuilder-workdir> <target> <publish-dir>" % sys.argv[0])
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
repojson = sys.argv[1]
|
parser = utils.ArgParser(description='Unpacks a shared directory of repos to the autobuilder working directory.')
|
||||||
shared = sys.argv[2]
|
|
||||||
targetdir = sys.argv[3]
|
|
||||||
target = sys.argv[4]
|
|
||||||
publish = None
|
|
||||||
if sys.argv[5] != "None":
|
|
||||||
publish = sys.argv[5]
|
|
||||||
|
|
||||||
|
parser.add_argument('repojson',
|
||||||
|
help="The json file containing the repositories to use")
|
||||||
|
parser.add_argument('abworkdir',
|
||||||
|
help="The autobuilder working directory")
|
||||||
|
parser.add_argument('target',
|
||||||
|
help="The target we want to unpack repos for")
|
||||||
|
parser.add_argument('-c', '--cache-dir',
|
||||||
|
action='store',
|
||||||
|
help="The shared cache directory where the repos may be transferred from")
|
||||||
|
parser.add_argument('-p', '--publish-dir',
|
||||||
|
action='store',
|
||||||
|
help="Where to publish artefacts to (optional)")
|
||||||
|
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
scriptsdir = os.path.dirname(os.path.realpath(__file__))
|
scriptsdir = os.path.dirname(os.path.realpath(__file__))
|
||||||
ourconfig = utils.loadconfig()
|
ourconfig = utils.loadconfig()
|
||||||
|
|
||||||
stashdir = utils.getconfig("REPO_STASH_DIR", ourconfig)
|
stashdir = utils.getconfig("REPO_STASH_DIR", ourconfig)
|
||||||
|
|
||||||
needrepos = utils.getconfigvar("NEEDREPOS", ourconfig, target, None)
|
needrepos = utils.getconfigvar("NEEDREPOS", ourconfig, args.target, None)
|
||||||
|
|
||||||
with open(repojson) as f:
|
with open(args.repojson) as f:
|
||||||
repos = json.load(f)
|
repos = json.load(f)
|
||||||
|
|
||||||
targetsubdir = targetdir + "/repos"
|
targetsubdir = args.abworkdir + "/repos"
|
||||||
|
|
||||||
for repo in sorted(repos.keys()):
|
for repo in sorted(repos.keys()):
|
||||||
if repo not in needrepos:
|
if repo not in needrepos:
|
||||||
continue
|
continue
|
||||||
targetrepodir = "%s/%s" % (targetsubdir, repo)
|
targetrepodir = "%s/%s" % (targetsubdir, repo)
|
||||||
if shared != "None":
|
if args.cache_dir:
|
||||||
utils.printheader("Copying in repo %s" % repo)
|
utils.printheader("Copying in repo %s" % repo)
|
||||||
utils.mkdir(targetrepodir)
|
utils.mkdir(targetrepodir)
|
||||||
subprocess.check_call(["rsync", "-a", "%s/%s" % (shared, repo), targetsubdir])
|
subprocess.check_call(["rsync", "-a", "%s/%s" % (args.cache_dir, repo), targetsubdir])
|
||||||
else:
|
else:
|
||||||
utils.printheader("Fetching repo %s" % repo)
|
utils.printheader("Fetching repo %s" % repo)
|
||||||
utils.fetchgitrepo(targetsubdir, repo, repos[repo], stashdir)
|
utils.fetchgitrepo(targetsubdir, repo, repos[repo], stashdir)
|
||||||
if publish:
|
if args.publish_dir:
|
||||||
utils.publishrepo(shared, repo, publish)
|
utils.publishrepo(targetsubdir, repo, args.publish_dir)
|
||||||
|
|
||||||
subprocess.check_call([scriptsdir + "/layer-config", targetdir, target])
|
subprocess.check_call([scriptsdir + "/layer-config", args.abworkdir, args.target])
|
||||||
|
|
|
@ -7,6 +7,7 @@ import time
|
||||||
import codecs
|
import codecs
|
||||||
import sys
|
import sys
|
||||||
import re
|
import re
|
||||||
|
import argparse
|
||||||
|
|
||||||
#
|
#
|
||||||
# Check if config contains all the listed params
|
# Check if config contains all the listed params
|
||||||
|
@ -302,3 +303,9 @@ class ErrorReport(object):
|
||||||
with codecs.open(filename, 'w', 'utf-8') as f:
|
with codecs.open(filename, 'w', 'utf-8') as f:
|
||||||
json.dump(report, f, indent=4, sort_keys=True)
|
json.dump(report, f, indent=4, sort_keys=True)
|
||||||
|
|
||||||
|
class ArgParser(argparse.ArgumentParser):
|
||||||
|
def error(self, message):
|
||||||
|
# Show the help if there's an argument parsing error (e.g. no arguments, missing argument, ...)
|
||||||
|
sys.stderr.write('error: %s\n' % message)
|
||||||
|
self.print_help()
|
||||||
|
sys.exit(2)
|
||||||
|
|
Loading…
Reference in New Issue
Block a user