
The rysnc of 20,000 files (650MB) onto the nas is slow taking ~3 minutes at idle and worse at load. This is due to the number of files which is a pain point for NFS. This piece of the build is also a bottleneck since the rest of a build depends on it happening. If we switch to zstd compressed tar, it takes 2.49s. Other compression methods were much slower but zstd seems 'accptable' and speeds things up too. Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2.5 KiB
Executable File
#!/usr/bin/env python3
Unpack a shared directory of repos to the autobuilder working directory
import json import os import sys import subprocess import errno import time import random
import utils
parser = utils.ArgParser(description='Unpacks a shared directory of repos to the autobuilder working directory.')
parser.add_argument('repojson', help="The json file containing the repositories to use") parser.add_argument('abworkdir', help="The autobuilder working directory") parser.add_argument('target', help="The target we want to unpack repos for") parser.add_argument('-c', '--cache-dir', action='store', help="The shared cache directory where the repos may be transferred from") parser.add_argument('-p', '--publish-dir', action='store', help="Where to publish artefacts to (optional)") parser.add_argument('--workername', action='store', default=None, help="the name of the worker the build is running on")
args = parser.parse_args()
scriptsdir = os.path.dirname(os.path.realpath(file)) ourconfig = utils.loadconfig()
stashdir = utils.getconfig("REPO_STASH_DIR", ourconfig)
needrepos = utils.getconfigvar("NEEDREPOS", ourconfig, args.target, None)
with open(args.repojson) as f: repos = json.load(f)
targetsubdir = args.abworkdir + "/repos" needrepos_baseddirs = [r.split('/')[0] for r in needrepos] for repo in sorted(repos.keys()): if repo not in needrepos_baseddirs: continue if args.cache_dir: utils.printheader("Copying in repo %s" % repo) utils.mkdir(targetsubdir) if args.target in ["a-full", "a-quick"]: # full/quick need all repo data due to send-qa-email subprocess.check_call(["tar", "-I", "zstd", "-C", targetsubdir, "-xf", "%s.tar.zst" % args.cache_dir]) else: subprocess.check_call(["tar", "-I", "zstd", "-C", targetsubdir, "-xf", "%s.tar.zst" % args.cache_dir, "./" + repo]) else: utils.printheader("Fetching repo %s" % repo) utils.fetchgitrepo(targetsubdir, repo, repos[repo], stashdir) if args.publish_dir: utils.publishrepo(targetsubdir, repo, args.publish_dir) utils.flush()
utils.setup_buildtools_tarball(ourconfig, args.workername, args.abworkdir + "/buildtools")
try: subprocess.check_call([scriptsdir + "/layer-config", args.abworkdir, args.target]) except subprocess.CalledProcessError as e: sys.exit(e.returncode)