#!/usr/bin/env python3 # # Unpack a shared directory of repos to the autobuilder working directory # # Called with $1 - The json file containing the repositories to use # $2 - The shared directory where the repos are to be transferred from (can be 'None') # $3 - The autobuilder working directory # $4 - The target to filter the repos to # import json import os import sys import subprocess import errno import time import random import utils if len(sys.argv) != 5: print("Incorrect number of parameters, please call as %s repo.json " % sys.argv[0]) sys.exit(1) repojson = sys.argv[1] shared = sys.argv[2] targetdir = sys.argv[3] target = sys.argv[4] scriptsdir = os.path.dirname(os.path.realpath(__file__)) ourconfig = utils.loadconfig(__file__) stashdir = ourconfig["REPO_STASH_DIR"] trashdir = ourconfig["TRASH_DIR"] needrepos = utils.getconfigvar("NEEDREPOS", ourconfig, target, None) with open(repojson) as f: repos = json.load(f) # Move any existing directory to the trash collection directory (deferred ionice deletion) if os.path.exists(targetdir) and len(os.listdir(targetdir)): trashdest = trashdir + "/" + target + "/" + str(int(time.time())) + '-' + str(random.randrange(100, 100000, 2)) utils.mkdir(trashdest) subprocess.check_call(['mv', targetdir + '/*', trashdest]) if len(os.listdir(targetdir)): subprocess.check_call(['mv', targetdir +'/.[!.]*', trashdest]) for repo in sorted(repos.keys()): if repo not in needrepos: continue targetrepodir = "%s/%s" % (targetdir, repo) if shared: utils.printheader("Copying in repo %s" % repo) utils.mkdir(targetrepodir) subprocess.check_call(["rsync", "-a", "%s/%s" % (shared, repo), targetdir]) else: utils.printheader("Fetching repo %s" % repo) utils.fetchgitrepo(targetdir, repo, repos[repo], stashdir) subprocess.check_call([scriptsdir + "/layer-config", targetdir, target])