poky/scripts/gen-lockedsig-cache
Richard Purdie 4cb1b4b409 sstate: Add extra directory level
We're having speed issues on the autobuilder due to the numbers of files in sstate
directories. We previously split these by the first two characters of the hash.
This change extends this to split by the next two characters as well, creating
more layers of directories.

This should signifiantly speed up eSDK builds on the autobuilder as the current
sstate layout simply isn't scaling there but addresses a general complaint.

gen-lockedsig-cache needed to be updated for the new split level sstate.

Also update tests for new layout.

(From OE-Core rev: d05bde16bdad761ed8f4c0a48de60c649aa33e85)

Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2020-01-06 23:54:39 +00:00

3.5 KiB
Executable File

#!/usr/bin/env python3

SPDX-License-Identifier: GPL-2.0-only

import os import sys import shutil import errno import time

def mkdir(d): try: os.makedirs(d) except OSError as e: if e.errno != errno.EEXIST: raise e

extract the hash from past the last colon to last underscore

def extract_sha(filename): return filename.split(':')[7].split('_')[0]

get all files in a directory, extract hash and make

a map from hash to list of file with that hash

def map_sha_to_files(dir_, prefix, sha_map): sstate_prefix_path = dir_ + '/' + prefix + '/' if not os.path.exists(sstate_prefix_path): return sstate_files = os.listdir(sstate_prefix_path) for f in sstate_files: try: sha = extract_sha(f) if sha not in sha_map: sha_map[sha] = [] sha_map[sha].append(sstate_prefix_path + f) except IndexError: continue

given a prefix build a map of hash to list of files

def build_sha_cache(prefix): sha_map = {}

sstate_dir = sys.argv[2]
map_sha_to_files(sstate_dir, prefix, sha_map)

native_sstate_dir = sys.argv[2] + '/' + sys.argv[4]
map_sha_to_files(native_sstate_dir, prefix, sha_map)

return sha_map

if len(sys.argv) < 5: print("Incorrect number of arguments specified") print("syntax: gen-lockedsig-cache <locked-sigs.inc> [filterfile]") sys.exit(1)

filterlist = [] if len(sys.argv) > 5: print('Reading filter file %s' % sys.argv[5]) with open(sys.argv[5]) as f: for l in f.readlines(): if ":" in l: filterlist.append(l.rstrip())

print('Reading %s' % sys.argv[1]) sigs = [] with open(sys.argv[1]) as f: for l in f.readlines(): if ":" in l: task, sig = l.split()[0].rsplit(':', 1) if filterlist and not task in filterlist: print('Filtering out %s' % task) else: sigs.append(sig)

print('Gathering file list') start_time = time.perf_counter() files = set() sstate_content_cache = {} for s in sigs: prefix = s[:2] prefix2 = s[2:4] if prefix not in sstate_content_cache: sstate_content_cache[prefix] = build_sha_cache(prefix) if prefix2 not in sstate_content_cache[prefix]: sstate_content_cache[prefix][prefix2] = build_sha_cache(prefix + "/" + prefix2)

if s in sstate_content_cache[prefix]:
    for f in sstate_content_cache[prefix][s]:
        files.add(f)
if s in sstate_content_cache[prefix][prefix2]:
    for f in sstate_content_cache[prefix][prefix2][s]:
        files.add(f)

elapsed = time.perf_counter() - start_time print("Gathering file list took %.1fs" % elapsed)

print('Processing files') for f in files: sys.stdout.write('Processing %s... ' % f) _, ext = os.path.splitext(f) if not ext in ['.tgz', '.siginfo', '.sig']: # Most likely a temp file, skip it print('skipping') continue dst = os.path.join(sys.argv[3], os.path.relpath(f, sys.argv[2])) destdir = os.path.dirname(dst) mkdir(destdir)

src = os.path.realpath(f)
if os.path.exists(dst):
    os.remove(dst)
if (os.stat(src).st_dev == os.stat(destdir).st_dev):
    print('linking')
    try:
        os.link(src, dst)
    except OSError as e:
        print('hard linking failed, copying')
        shutil.copyfile(src, dst)
else:
    print('copying')
    shutil.copyfile(src, dst)

print('Done!')