summaryrefslogtreecommitdiffstats
path: root/scripts/gen-lockedsig-cache
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/gen-lockedsig-cache')
-rwxr-xr-xscripts/gen-lockedsig-cache122
1 files changed, 0 insertions, 122 deletions
diff --git a/scripts/gen-lockedsig-cache b/scripts/gen-lockedsig-cache
deleted file mode 100755
index 023015ec41..0000000000
--- a/scripts/gen-lockedsig-cache
+++ /dev/null
@@ -1,122 +0,0 @@
1#!/usr/bin/env python3
2#
3#
4# Copyright OpenEmbedded Contributors
5#
6# SPDX-License-Identifier: GPL-2.0-only
7#
8
9import os
10import sys
11import shutil
12import errno
13import time
14
15def mkdir(d):
16 try:
17 os.makedirs(d)
18 except OSError as e:
19 if e.errno != errno.EEXIST:
20 raise e
21
22# extract the hash from past the last colon to last underscore
23def extract_sha(filename):
24 return filename.split(':')[7].split('_')[0]
25
26# get all files in a directory, extract hash and make
27# a map from hash to list of file with that hash
28def map_sha_to_files(dir_, prefix, sha_map):
29 sstate_prefix_path = dir_ + '/' + prefix + '/'
30 if not os.path.exists(sstate_prefix_path):
31 return
32 sstate_files = os.listdir(sstate_prefix_path)
33 for f in sstate_files:
34 try:
35 sha = extract_sha(f)
36 if sha not in sha_map:
37 sha_map[sha] = []
38 sha_map[sha].append(sstate_prefix_path + f)
39 except IndexError:
40 continue
41
42# given a prefix build a map of hash to list of files
43def build_sha_cache(prefix):
44 sha_map = {}
45
46 sstate_dir = sys.argv[2]
47 map_sha_to_files(sstate_dir, prefix, sha_map)
48
49 native_sstate_dir = sys.argv[2] + '/' + sys.argv[4]
50 map_sha_to_files(native_sstate_dir, prefix, sha_map)
51
52 return sha_map
53
54if len(sys.argv) < 5:
55 print("Incorrect number of arguments specified")
56 print("syntax: gen-lockedsig-cache <locked-sigs.inc> <input-cachedir> <output-cachedir> <nativelsbstring> [filterfile]")
57 sys.exit(1)
58
59filterlist = []
60if len(sys.argv) > 5:
61 print('Reading filter file %s' % sys.argv[5])
62 with open(sys.argv[5]) as f:
63 for l in f.readlines():
64 if ":" in l:
65 filterlist.append(l.rstrip())
66
67print('Reading %s' % sys.argv[1])
68sigs = []
69with open(sys.argv[1]) as f:
70 for l in f.readlines():
71 if ":" in l:
72 task, sig = l.split()[0].rsplit(':', 1)
73 if filterlist and not task in filterlist:
74 print('Filtering out %s' % task)
75 else:
76 sigs.append(sig)
77
78print('Gathering file list')
79start_time = time.perf_counter()
80files = set()
81sstate_content_cache = {}
82for s in sigs:
83 prefix = s[:2]
84 prefix2 = s[2:4]
85 if prefix not in sstate_content_cache:
86 sstate_content_cache[prefix] = {}
87 if prefix2 not in sstate_content_cache[prefix]:
88 sstate_content_cache[prefix][prefix2] = build_sha_cache(prefix + "/" + prefix2)
89
90 if s in sstate_content_cache[prefix][prefix2]:
91 for f in sstate_content_cache[prefix][prefix2][s]:
92 files.add(f)
93
94elapsed = time.perf_counter() - start_time
95print("Gathering file list took %.1fs" % elapsed)
96
97print('Processing files')
98for f in files:
99 sys.stdout.write('Processing %s... ' % f)
100 if not f.endswith(('.tar.zst', '.siginfo', '.sig')):
101 # Most likely a temp file, skip it
102 print('skipping')
103 continue
104 dst = os.path.join(sys.argv[3], os.path.relpath(f, sys.argv[2]))
105 destdir = os.path.dirname(dst)
106 mkdir(destdir)
107
108 src = os.path.realpath(f)
109 if os.path.exists(dst):
110 os.remove(dst)
111 if (os.stat(src).st_dev == os.stat(destdir).st_dev):
112 print('linking')
113 try:
114 os.link(src, dst)
115 except OSError as e:
116 print('hard linking failed, copying')
117 shutil.copyfile(src, dst)
118 else:
119 print('copying')
120 shutil.copyfile(src, dst)
121
122print('Done!')