diff options
Diffstat (limited to 'scripts')
-rwxr-xr-x | scripts/contrib/improve_kernel_cve_report.py | 467 | ||||
-rwxr-xr-x | scripts/lib/devtool/ide_sdk.py | 2 | ||||
-rw-r--r-- | scripts/lib/devtool/standard.py | 12 | ||||
-rw-r--r-- | scripts/lib/devtool/upgrade.py | 2 | ||||
-rw-r--r-- | scripts/lib/recipetool/append.py | 8 | ||||
-rw-r--r-- | scripts/lib/recipetool/create.py | 24 | ||||
-rw-r--r-- | scripts/lib/recipetool/create_go.py | 677 | ||||
-rwxr-xr-x | scripts/test-remote-image | 9 | ||||
-rwxr-xr-x | scripts/wic | 49 |
9 files changed, 578 insertions, 672 deletions
diff --git a/scripts/contrib/improve_kernel_cve_report.py b/scripts/contrib/improve_kernel_cve_report.py new file mode 100755 index 0000000000..829cc4cd30 --- /dev/null +++ b/scripts/contrib/improve_kernel_cve_report.py | |||
@@ -0,0 +1,467 @@ | |||
1 | #! /usr/bin/env python3 | ||
2 | # | ||
3 | # Copyright OpenEmbedded Contributors | ||
4 | # | ||
5 | # The script uses another source of CVE information from linux-vulns | ||
6 | # to enrich the cve-summary from cve-check or vex. | ||
7 | # It can also use the list of compiled files from the kernel spdx to ignore CVEs | ||
8 | # that are not affected since the files are not compiled. | ||
9 | # | ||
10 | # It creates a new json file with updated CVE information | ||
11 | # | ||
12 | # Compiled files can be extracted adding the following in local.conf | ||
13 | # SPDX_INCLUDE_COMPILED_SOURCES:pn-linux-yocto = "1" | ||
14 | # | ||
15 | # Tested with the following CVE sources: | ||
16 | # - https://git.kernel.org/pub/scm/linux/security/vulns.git | ||
17 | # - https://github.com/CVEProject/cvelistV5 | ||
18 | # | ||
19 | # Example: | ||
20 | # python3 ./openembedded-core/scripts/contrib/improve_kernel_cve_report.py --spdx tmp/deploy/spdx/3.0.1/qemux86_64/recipes/recipe-linux-yocto.spdx.json --kernel-version 6.12.27 --datadir ./vulns | ||
21 | # python3 ./openembedded-core/scripts/contrib/improve_kernel_cve_report.py --spdx tmp/deploy/spdx/3.0.1/qemux86_64/recipes/recipe-linux-yocto.spdx.json --datadir ./vulns --old-cve-report build/tmp/log/cve/cve-summary.json | ||
22 | # | ||
23 | # SPDX-License-Identifier: GPLv2 | ||
24 | |||
25 | import argparse | ||
26 | import json | ||
27 | import sys | ||
28 | import logging | ||
29 | import glob | ||
30 | import os | ||
31 | import pathlib | ||
32 | from packaging.version import Version | ||
33 | |||
34 | def is_linux_cve(cve_info): | ||
35 | '''Return true is the CVE belongs to Linux''' | ||
36 | if not "affected" in cve_info["containers"]["cna"]: | ||
37 | return False | ||
38 | for affected in cve_info["containers"]["cna"]["affected"]: | ||
39 | if not "product" in affected: | ||
40 | return False | ||
41 | if affected["product"] == "Linux" and affected["vendor"] == "Linux": | ||
42 | return True | ||
43 | return False | ||
44 | |||
45 | def get_kernel_cves(datadir, compiled_files, version): | ||
46 | """ | ||
47 | Get CVEs for the kernel | ||
48 | """ | ||
49 | cves = {} | ||
50 | |||
51 | check_config = len(compiled_files) > 0 | ||
52 | |||
53 | base_version = Version(f"{version.major}.{version.minor}") | ||
54 | |||
55 | # Check all CVES from kernel vulns | ||
56 | pattern = os.path.join(datadir, '**', "CVE-*.json") | ||
57 | cve_files = glob.glob(pattern, recursive=True) | ||
58 | not_applicable_config = 0 | ||
59 | fixed_as_later_backport = 0 | ||
60 | vulnerable = 0 | ||
61 | not_vulnerable = 0 | ||
62 | for cve_file in sorted(cve_files): | ||
63 | cve_info = {} | ||
64 | with open(cve_file, "r", encoding='ISO-8859-1') as f: | ||
65 | cve_info = json.load(f) | ||
66 | |||
67 | if len(cve_info) == 0: | ||
68 | logging.error("Not valid data in %s. Aborting", cve_file) | ||
69 | break | ||
70 | |||
71 | if not is_linux_cve(cve_info): | ||
72 | continue | ||
73 | cve_id = os.path.basename(cve_file)[:-5] | ||
74 | description = cve_info["containers"]["cna"]["descriptions"][0]["value"] | ||
75 | if cve_file.find("rejected") >= 0: | ||
76 | logging.debug("%s is rejected by the CNA", cve_id) | ||
77 | cves[cve_id] = { | ||
78 | "id": cve_id, | ||
79 | "status": "Ignored", | ||
80 | "detail": "rejected", | ||
81 | "summary": description, | ||
82 | "description": f"Rejected by CNA" | ||
83 | } | ||
84 | continue | ||
85 | if any(elem in cve_file for elem in ["review", "reverved", "testing"]): | ||
86 | continue | ||
87 | |||
88 | is_vulnerable, first_affected, last_affected, better_match_first, better_match_last, affected_versions = get_cpe_applicability(cve_info, version) | ||
89 | |||
90 | logging.debug("%s: %s (%s - %s) (%s - %s)", cve_id, is_vulnerable, better_match_first, better_match_last, first_affected, last_affected) | ||
91 | |||
92 | if is_vulnerable is None: | ||
93 | logging.warning("%s doesn't have good metadata", cve_id) | ||
94 | if is_vulnerable: | ||
95 | is_affected = True | ||
96 | affected_files = [] | ||
97 | if check_config: | ||
98 | is_affected, affected_files = check_kernel_compiled_files(compiled_files, cve_info) | ||
99 | |||
100 | if not is_affected and len(affected_files) > 0: | ||
101 | logging.debug( | ||
102 | "%s - not applicable configuration since affected files not compiled: %s", | ||
103 | cve_id, affected_files) | ||
104 | cves[cve_id] = { | ||
105 | "id": cve_id, | ||
106 | "status": "Ignored", | ||
107 | "detail": "not-applicable-config", | ||
108 | "summary": description, | ||
109 | "description": f"Source code not compiled by config. {affected_files}" | ||
110 | } | ||
111 | not_applicable_config +=1 | ||
112 | # Check if we have backport | ||
113 | else: | ||
114 | if not better_match_last: | ||
115 | fixed_in = last_affected | ||
116 | else: | ||
117 | fixed_in = better_match_last | ||
118 | logging.debug("%s needs backporting (fixed from %s)", cve_id, fixed_in) | ||
119 | cves[cve_id] = { | ||
120 | "id": cve_id, | ||
121 | "status": "Unpatched", | ||
122 | "detail": "version-in-range", | ||
123 | "summary": description, | ||
124 | "description": f"Needs backporting (fixed from {fixed_in})" | ||
125 | } | ||
126 | vulnerable += 1 | ||
127 | if (better_match_last and | ||
128 | Version(f"{better_match_last.major}.{better_match_last.minor}") == base_version): | ||
129 | fixed_as_later_backport += 1 | ||
130 | # Not vulnerable | ||
131 | else: | ||
132 | if not first_affected: | ||
133 | logging.debug("%s - not known affected %s", | ||
134 | cve_id, | ||
135 | better_match_last) | ||
136 | cves[cve_id] = { | ||
137 | "id": cve_id, | ||
138 | "status": "Patched", | ||
139 | "detail": "version-not-in-range", | ||
140 | "summary": description, | ||
141 | "description": "No CPE match" | ||
142 | } | ||
143 | not_vulnerable += 1 | ||
144 | continue | ||
145 | backport_base = Version(f"{better_match_last.major}.{better_match_last.minor}") | ||
146 | if version < first_affected: | ||
147 | logging.debug('%s - fixed-version: only affects %s onwards', | ||
148 | cve_id, | ||
149 | first_affected) | ||
150 | cves[cve_id] = { | ||
151 | "id": cve_id, | ||
152 | "status": "Patched", | ||
153 | "detail": "fixed-version", | ||
154 | "summary": description, | ||
155 | "description": f"only affects {first_affected} onwards" | ||
156 | } | ||
157 | not_vulnerable += 1 | ||
158 | elif last_affected <= version: | ||
159 | logging.debug("%s - fixed-version: Fixed from version %s", | ||
160 | cve_id, | ||
161 | last_affected) | ||
162 | cves[cve_id] = { | ||
163 | "id": cve_id, | ||
164 | "status": "Patched", | ||
165 | "detail": "fixed-version", | ||
166 | "summary": description, | ||
167 | "description": f"fixed-version: Fixed from version {last_affected}" | ||
168 | } | ||
169 | not_vulnerable += 1 | ||
170 | elif backport_base == base_version: | ||
171 | logging.debug("%s - cpe-stable-backport: Backported in %s", | ||
172 | cve_id, | ||
173 | better_match_last) | ||
174 | cves[cve_id] = { | ||
175 | "id": cve_id, | ||
176 | "status": "Patched", | ||
177 | "detail": "cpe-stable-backport", | ||
178 | "summary": description, | ||
179 | "description": f"Backported in {better_match_last}" | ||
180 | } | ||
181 | not_vulnerable += 1 | ||
182 | else: | ||
183 | logging.debug("%s - version not affected %s", cve_id, str(affected_versions)) | ||
184 | cves[cve_id] = { | ||
185 | "id": cve_id, | ||
186 | "status": "Patched", | ||
187 | "detail": "version-not-in-range", | ||
188 | "summary": description, | ||
189 | "description": f"Range {affected_versions}" | ||
190 | } | ||
191 | not_vulnerable += 1 | ||
192 | |||
193 | logging.info("Total CVEs ignored due to not applicable config: %d", not_applicable_config) | ||
194 | logging.info("Total CVEs not vulnerable due version-not-in-range: %d", not_vulnerable) | ||
195 | logging.info("Total vulnerable CVEs: %d", vulnerable) | ||
196 | |||
197 | logging.info("Total CVEs already backported in %s: %s", base_version, | ||
198 | fixed_as_later_backport) | ||
199 | return cves | ||
200 | |||
201 | def read_spdx(spdx_file): | ||
202 | '''Open SPDX file and extract compiled files''' | ||
203 | with open(spdx_file, 'r', encoding='ISO-8859-1') as f: | ||
204 | spdx = json.load(f) | ||
205 | if "spdxVersion" in spdx: | ||
206 | if spdx["spdxVersion"] == "SPDX-2.2": | ||
207 | return read_spdx2(spdx) | ||
208 | if "@graph" in spdx: | ||
209 | return read_spdx3(spdx) | ||
210 | return [] | ||
211 | |||
212 | def read_spdx2(spdx): | ||
213 | ''' | ||
214 | Read spdx2 compiled files from spdx | ||
215 | ''' | ||
216 | cfiles = set() | ||
217 | if 'files' not in spdx: | ||
218 | return cfiles | ||
219 | for item in spdx['files']: | ||
220 | for ftype in item['fileTypes']: | ||
221 | if ftype == "SOURCE": | ||
222 | filename = item["fileName"][item["fileName"].find("/")+1:] | ||
223 | cfiles.add(filename) | ||
224 | return cfiles | ||
225 | |||
226 | def read_spdx3(spdx): | ||
227 | ''' | ||
228 | Read spdx3 compiled files from spdx | ||
229 | ''' | ||
230 | cfiles = set() | ||
231 | for item in spdx["@graph"]: | ||
232 | if "software_primaryPurpose" not in item: | ||
233 | continue | ||
234 | if item["software_primaryPurpose"] == "source": | ||
235 | filename = item['name'][item['name'].find("/")+1:] | ||
236 | cfiles.add(filename) | ||
237 | return cfiles | ||
238 | |||
239 | def check_kernel_compiled_files(compiled_files, cve_info): | ||
240 | """ | ||
241 | Return if a CVE affected us depending on compiled files | ||
242 | """ | ||
243 | files_affected = set() | ||
244 | is_affected = False | ||
245 | |||
246 | for item in cve_info['containers']['cna']['affected']: | ||
247 | if "programFiles" in item: | ||
248 | for f in item['programFiles']: | ||
249 | if f not in files_affected: | ||
250 | files_affected.add(f) | ||
251 | |||
252 | if len(files_affected) > 0: | ||
253 | for f in files_affected: | ||
254 | if f in compiled_files: | ||
255 | logging.debug("File match: %s", f) | ||
256 | is_affected = True | ||
257 | return is_affected, files_affected | ||
258 | |||
259 | def get_cpe_applicability(cve_info, v): | ||
260 | ''' | ||
261 | Check if version is affected and return affected versions | ||
262 | ''' | ||
263 | base_branch = Version(f"{v.major}.{v.minor}") | ||
264 | affected = [] | ||
265 | if not 'cpeApplicability' in cve_info["containers"]["cna"]: | ||
266 | return None, None, None, None, None, None | ||
267 | |||
268 | for nodes in cve_info["containers"]["cna"]["cpeApplicability"]: | ||
269 | for node in nodes.values(): | ||
270 | vulnerable = False | ||
271 | matched_branch = False | ||
272 | first_affected = Version("5000") | ||
273 | last_affected = Version("0") | ||
274 | better_match_first = Version("0") | ||
275 | better_match_last = Version("5000") | ||
276 | |||
277 | if len(node[0]['cpeMatch']) == 0: | ||
278 | first_affected = None | ||
279 | last_affected = None | ||
280 | better_match_first = None | ||
281 | better_match_last = None | ||
282 | |||
283 | for cpe_match in node[0]['cpeMatch']: | ||
284 | version_start_including = Version("0") | ||
285 | version_end_excluding = Version("0") | ||
286 | if 'versionStartIncluding' in cpe_match: | ||
287 | version_start_including = Version(cpe_match['versionStartIncluding']) | ||
288 | else: | ||
289 | version_start_including = Version("0") | ||
290 | # if versionEndExcluding is missing we are in a branch, which is not fixed. | ||
291 | if "versionEndExcluding" in cpe_match: | ||
292 | version_end_excluding = Version(cpe_match["versionEndExcluding"]) | ||
293 | else: | ||
294 | # if versionEndExcluding is missing we are in a branch, which is not fixed. | ||
295 | version_end_excluding = Version( | ||
296 | f"{version_start_including.major}.{version_start_including.minor}.5000" | ||
297 | ) | ||
298 | affected.append(f" {version_start_including}-{version_end_excluding}") | ||
299 | # Detect if versionEnd is in fixed in base branch. It has precedence over the rest | ||
300 | branch_end = Version(f"{version_end_excluding.major}.{version_end_excluding.minor}") | ||
301 | if branch_end == base_branch: | ||
302 | if version_start_including <= v < version_end_excluding: | ||
303 | vulnerable = cpe_match['vulnerable'] | ||
304 | # If we don't match in our branch, we are not vulnerable, | ||
305 | # since we have a backport | ||
306 | matched_branch = True | ||
307 | better_match_first = version_start_including | ||
308 | better_match_last = version_end_excluding | ||
309 | if version_start_including <= v < version_end_excluding and not matched_branch: | ||
310 | if version_end_excluding < better_match_last: | ||
311 | better_match_first = max(version_start_including, better_match_first) | ||
312 | better_match_last = min(better_match_last, version_end_excluding) | ||
313 | vulnerable = cpe_match['vulnerable'] | ||
314 | matched_branch = True | ||
315 | |||
316 | first_affected = min(version_start_including, first_affected) | ||
317 | last_affected = max(version_end_excluding, last_affected) | ||
318 | # Not a better match, we use the first and last affected instead of the fake .5000 | ||
319 | if vulnerable and better_match_last == Version(f"{base_branch}.5000"): | ||
320 | better_match_last = last_affected | ||
321 | better_match_first = first_affected | ||
322 | return vulnerable, first_affected, last_affected, better_match_first, better_match_last, affected | ||
323 | |||
324 | def copy_data(old, new): | ||
325 | '''Update dictionary with new entries, while keeping the old ones''' | ||
326 | for k in new.keys(): | ||
327 | old[k] = new[k] | ||
328 | return old | ||
329 | |||
330 | # Function taken from cve_check.bbclass. Adapted to cve fields | ||
331 | def cve_update(cve_data, cve, entry): | ||
332 | # If no entry, just add it | ||
333 | if cve not in cve_data: | ||
334 | cve_data[cve] = entry | ||
335 | return | ||
336 | # If we are updating, there might be change in the status | ||
337 | if cve_data[cve]['status'] == "Unknown": | ||
338 | cve_data[cve] = copy_data(cve_data[cve], entry) | ||
339 | return | ||
340 | if cve_data[cve]['status'] == entry['status']: | ||
341 | return | ||
342 | if entry['status'] == "Unpatched" and cve_data[cve]['status'] == "Patched": | ||
343 | logging.warning("CVE entry %s update from Patched to Unpatched from the scan result", cve) | ||
344 | cve_data[cve] = copy_data(cve_data[cve], entry) | ||
345 | return | ||
346 | if entry['status'] == "Patched" and cve_data[cve]['status'] == "Unpatched": | ||
347 | logging.warning("CVE entry %s update from Unpatched to Patched from the scan result", cve) | ||
348 | cve_data[cve] = copy_data(cve_data[cve], entry) | ||
349 | return | ||
350 | # If we have an "Ignored", it has a priority | ||
351 | if cve_data[cve]['status'] == "Ignored": | ||
352 | logging.debug("CVE %s not updating because Ignored", cve) | ||
353 | return | ||
354 | # If we have an "Ignored", it has a priority | ||
355 | if entry['status'] == "Ignored": | ||
356 | cve_data[cve] = copy_data(cve_data[cve], entry) | ||
357 | logging.debug("CVE entry %s updated from Unpatched to Ignored", cve) | ||
358 | return | ||
359 | logging.warning("Unhandled CVE entry update for %s %s from %s %s to %s", | ||
360 | cve, cve_data[cve]['status'], cve_data[cve]['detail'], entry['status'], entry['detail']) | ||
361 | |||
362 | def main(): | ||
363 | parser = argparse.ArgumentParser( | ||
364 | description="Update cve-summary with kernel compiled files and kernel CVE information" | ||
365 | ) | ||
366 | parser.add_argument( | ||
367 | "-s", | ||
368 | "--spdx", | ||
369 | help="SPDX2/3 for the kernel. Needs to include compiled sources", | ||
370 | ) | ||
371 | parser.add_argument( | ||
372 | "--datadir", | ||
373 | type=pathlib.Path, | ||
374 | help="Directory where CVE data is", | ||
375 | required=True | ||
376 | ) | ||
377 | parser.add_argument( | ||
378 | "--old-cve-report", | ||
379 | help="CVE report to update. (Optional)", | ||
380 | ) | ||
381 | parser.add_argument( | ||
382 | "--kernel-version", | ||
383 | help="Kernel version. Needed if old cve_report is not provided (Optional)", | ||
384 | type=Version | ||
385 | ) | ||
386 | parser.add_argument( | ||
387 | "--new-cve-report", | ||
388 | help="Output file", | ||
389 | default="cve-summary-enhance.json" | ||
390 | ) | ||
391 | parser.add_argument( | ||
392 | "-D", | ||
393 | "--debug", | ||
394 | help='Enable debug ', | ||
395 | action="store_true") | ||
396 | |||
397 | args = parser.parse_args() | ||
398 | |||
399 | if args.debug: | ||
400 | log_level=logging.DEBUG | ||
401 | else: | ||
402 | log_level=logging.INFO | ||
403 | logging.basicConfig(format='[%(filename)s:%(lineno)d] %(message)s', level=log_level) | ||
404 | |||
405 | if not args.kernel_version and not args.old_cve_report: | ||
406 | parser.error("either --kernel-version or --old-cve-report are needed") | ||
407 | return -1 | ||
408 | |||
409 | # by default we don't check the compiled files, unless provided | ||
410 | compiled_files = [] | ||
411 | if args.spdx: | ||
412 | compiled_files = read_spdx(args.spdx) | ||
413 | logging.info("Total compiled files %d", len(compiled_files)) | ||
414 | |||
415 | if args.old_cve_report: | ||
416 | with open(args.old_cve_report, encoding='ISO-8859-1') as f: | ||
417 | cve_report = json.load(f) | ||
418 | else: | ||
419 | #If summary not provided, we create one | ||
420 | cve_report = { | ||
421 | "version": "1", | ||
422 | "package": [ | ||
423 | { | ||
424 | "name": "linux-yocto", | ||
425 | "version": str(args.kernel_version), | ||
426 | "products": [ | ||
427 | { | ||
428 | "product": "linux_kernel", | ||
429 | "cvesInRecord": "Yes" | ||
430 | } | ||
431 | ], | ||
432 | "issue": [] | ||
433 | } | ||
434 | ] | ||
435 | } | ||
436 | |||
437 | for pkg in cve_report['package']: | ||
438 | is_kernel = False | ||
439 | for product in pkg['products']: | ||
440 | if product['product'] == "linux_kernel": | ||
441 | is_kernel=True | ||
442 | if not is_kernel: | ||
443 | continue | ||
444 | |||
445 | kernel_cves = get_kernel_cves(args.datadir, | ||
446 | compiled_files, | ||
447 | Version(pkg["version"])) | ||
448 | logging.info("Total kernel cves from kernel CNA: %s", len(kernel_cves)) | ||
449 | cves = {issue["id"]: issue for issue in pkg["issue"]} | ||
450 | logging.info("Total kernel before processing cves: %s", len(cves)) | ||
451 | |||
452 | for cve in kernel_cves: | ||
453 | cve_update(cves, cve, kernel_cves[cve]) | ||
454 | |||
455 | pkg["issue"] = [] | ||
456 | for cve in sorted(cves): | ||
457 | pkg["issue"].extend([cves[cve]]) | ||
458 | logging.info("Total kernel cves after processing: %s", len(pkg['issue'])) | ||
459 | |||
460 | with open(args.new_cve_report, "w", encoding='ISO-8859-1') as f: | ||
461 | json.dump(cve_report, f, indent=2) | ||
462 | |||
463 | return 0 | ||
464 | |||
465 | if __name__ == "__main__": | ||
466 | sys.exit(main()) | ||
467 | |||
diff --git a/scripts/lib/devtool/ide_sdk.py b/scripts/lib/devtool/ide_sdk.py index f8cf65f4a8..931408fa74 100755 --- a/scripts/lib/devtool/ide_sdk.py +++ b/scripts/lib/devtool/ide_sdk.py | |||
@@ -334,7 +334,7 @@ class RecipeModified: | |||
334 | self.srctree = workspace[workspacepn]['srctree'] | 334 | self.srctree = workspace[workspacepn]['srctree'] |
335 | # Need to grab this here in case the source is within a subdirectory | 335 | # Need to grab this here in case the source is within a subdirectory |
336 | self.real_srctree = get_real_srctree( | 336 | self.real_srctree = get_real_srctree( |
337 | self.srctree, recipe_d.getVar('S'), recipe_d.getVar('WORKDIR')) | 337 | self.srctree, recipe_d.getVar('S'), recipe_d.getVar('UNPACKDIR')) |
338 | self.bbappend = workspace[workspacepn]['bbappend'] | 338 | self.bbappend = workspace[workspacepn]['bbappend'] |
339 | 339 | ||
340 | self.ide_sdk_dir = os.path.join( | 340 | self.ide_sdk_dir = os.path.join( |
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py index cdfdba43ee..1fd5947c41 100644 --- a/scripts/lib/devtool/standard.py +++ b/scripts/lib/devtool/standard.py | |||
@@ -625,7 +625,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
625 | srcsubdir = f.read() | 625 | srcsubdir = f.read() |
626 | except FileNotFoundError as e: | 626 | except FileNotFoundError as e: |
627 | raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e)) | 627 | raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e)) |
628 | srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir')) | 628 | srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir', os.path.relpath(d.getVar('UNPACKDIR'), d.getVar('WORKDIR')))) |
629 | 629 | ||
630 | # Check if work-shared is empty, if yes | 630 | # Check if work-shared is empty, if yes |
631 | # find source and copy to work-shared | 631 | # find source and copy to work-shared |
@@ -742,13 +742,13 @@ def get_staging_kbranch(srcdir): | |||
742 | staging_kbranch = "".join(branch.split('\n')[0]) | 742 | staging_kbranch = "".join(branch.split('\n')[0]) |
743 | return staging_kbranch | 743 | return staging_kbranch |
744 | 744 | ||
745 | def get_real_srctree(srctree, s, workdir): | 745 | def get_real_srctree(srctree, s, unpackdir): |
746 | # Check that recipe isn't using a shared workdir | 746 | # Check that recipe isn't using a shared workdir |
747 | s = os.path.abspath(s) | 747 | s = os.path.abspath(s) |
748 | workdir = os.path.abspath(workdir) | 748 | unpackdir = os.path.abspath(unpackdir) |
749 | if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir: | 749 | if s.startswith(unpackdir) and s != unpackdir and os.path.dirname(s) != unpackdir: |
750 | # Handle if S is set to a subdirectory of the source | 750 | # Handle if S is set to a subdirectory of the source |
751 | srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1] | 751 | srcsubdir = os.path.relpath(s, unpackdir).split(os.sep, 1)[1] |
752 | srctree = os.path.join(srctree, srcsubdir) | 752 | srctree = os.path.join(srctree, srcsubdir) |
753 | return srctree | 753 | return srctree |
754 | 754 | ||
@@ -907,7 +907,7 @@ def modify(args, config, basepath, workspace): | |||
907 | 907 | ||
908 | # Need to grab this here in case the source is within a subdirectory | 908 | # Need to grab this here in case the source is within a subdirectory |
909 | srctreebase = srctree | 909 | srctreebase = srctree |
910 | srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR')) | 910 | srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('UNPACKDIR')) |
911 | 911 | ||
912 | bb.utils.mkdirhier(os.path.dirname(appendfile)) | 912 | bb.utils.mkdirhier(os.path.dirname(appendfile)) |
913 | with open(appendfile, 'w') as f: | 913 | with open(appendfile, 'w') as f: |
diff --git a/scripts/lib/devtool/upgrade.py b/scripts/lib/devtool/upgrade.py index 0dace1fb24..d9aca6e2db 100644 --- a/scripts/lib/devtool/upgrade.py +++ b/scripts/lib/devtool/upgrade.py | |||
@@ -571,7 +571,7 @@ def upgrade(args, config, basepath, workspace): | |||
571 | else: | 571 | else: |
572 | srctree = standard.get_default_srctree(config, pn) | 572 | srctree = standard.get_default_srctree(config, pn) |
573 | 573 | ||
574 | srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR')) | 574 | srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('UNPACKDIR')) |
575 | 575 | ||
576 | # try to automatically discover latest version and revision if not provided on command line | 576 | # try to automatically discover latest version and revision if not provided on command line |
577 | if not args.version and not args.srcrev: | 577 | if not args.version and not args.srcrev: |
diff --git a/scripts/lib/recipetool/append.py b/scripts/lib/recipetool/append.py index 10945d6008..041d79f162 100644 --- a/scripts/lib/recipetool/append.py +++ b/scripts/lib/recipetool/append.py | |||
@@ -317,7 +317,7 @@ def appendsrc(args, files, rd, extralines=None): | |||
317 | import oe.recipeutils | 317 | import oe.recipeutils |
318 | 318 | ||
319 | srcdir = rd.getVar('S') | 319 | srcdir = rd.getVar('S') |
320 | workdir = rd.getVar('WORKDIR') | 320 | unpackdir = rd.getVar('UNPACKDIR') |
321 | 321 | ||
322 | import bb.fetch | 322 | import bb.fetch |
323 | simplified = {} | 323 | simplified = {} |
@@ -336,10 +336,10 @@ def appendsrc(args, files, rd, extralines=None): | |||
336 | src_destdir = os.path.dirname(srcfile) | 336 | src_destdir = os.path.dirname(srcfile) |
337 | if not args.use_workdir: | 337 | if not args.use_workdir: |
338 | if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'): | 338 | if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'): |
339 | srcdir = os.path.join(workdir, 'git') | 339 | srcdir = os.path.join(unpackdir, rd.getVar('BB_GIT_DEFAULT_DESTSUFFIX')) |
340 | if not bb.data.inherits_class('kernel-yocto', rd): | 340 | if not bb.data.inherits_class('kernel-yocto', rd): |
341 | logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${WORKDIR}/git') | 341 | logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${UNPACKDIR}/${BB_GIT_DEFAULT_DESTSUFFIX}') |
342 | src_destdir = os.path.join(os.path.relpath(srcdir, workdir), src_destdir) | 342 | src_destdir = os.path.join(os.path.relpath(srcdir, unpackdir), src_destdir) |
343 | src_destdir = os.path.normpath(src_destdir) | 343 | src_destdir = os.path.normpath(src_destdir) |
344 | 344 | ||
345 | if src_destdir and src_destdir != '.': | 345 | if src_destdir and src_destdir != '.': |
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py index 3c6ef6719f..ef0ba974a9 100644 --- a/scripts/lib/recipetool/create.py +++ b/scripts/lib/recipetool/create.py | |||
@@ -18,6 +18,7 @@ from urllib.parse import urlparse, urldefrag, urlsplit | |||
18 | import hashlib | 18 | import hashlib |
19 | import bb.fetch2 | 19 | import bb.fetch2 |
20 | logger = logging.getLogger('recipetool') | 20 | logger = logging.getLogger('recipetool') |
21 | from oe.license import tidy_licenses | ||
21 | from oe.license_finder import find_licenses | 22 | from oe.license_finder import find_licenses |
22 | 23 | ||
23 | tinfoil = None | 24 | tinfoil = None |
@@ -638,7 +639,6 @@ def create_recipe(args): | |||
638 | if len(splitline) > 1: | 639 | if len(splitline) > 1: |
639 | if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): | 640 | if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): |
640 | srcuri = reformat_git_uri(splitline[1]) + ';branch=master' | 641 | srcuri = reformat_git_uri(splitline[1]) + ';branch=master' |
641 | srcsubdir = 'git' | ||
642 | break | 642 | break |
643 | 643 | ||
644 | if args.src_subdir: | 644 | if args.src_subdir: |
@@ -736,7 +736,7 @@ def create_recipe(args): | |||
736 | if srcsubdir and not args.binary: | 736 | if srcsubdir and not args.binary: |
737 | # (for binary packages we explicitly specify subdir= when fetching to | 737 | # (for binary packages we explicitly specify subdir= when fetching to |
738 | # match the default value of S, so we don't need to set it in that case) | 738 | # match the default value of S, so we don't need to set it in that case) |
739 | lines_before.append('S = "${WORKDIR}/%s"' % srcsubdir) | 739 | lines_before.append('S = "${UNPACKDIR}/%s"' % srcsubdir) |
740 | lines_before.append('') | 740 | lines_before.append('') |
741 | 741 | ||
742 | if pkgarch: | 742 | if pkgarch: |
@@ -765,6 +765,7 @@ def create_recipe(args): | |||
765 | extrafiles = extravalues.pop('extrafiles', {}) | 765 | extrafiles = extravalues.pop('extrafiles', {}) |
766 | extra_pn = extravalues.pop('PN', None) | 766 | extra_pn = extravalues.pop('PN', None) |
767 | extra_pv = extravalues.pop('PV', None) | 767 | extra_pv = extravalues.pop('PV', None) |
768 | run_tasks = extravalues.pop('run_tasks', "").split() | ||
768 | 769 | ||
769 | if extra_pv and not realpv: | 770 | if extra_pv and not realpv: |
770 | realpv = extra_pv | 771 | realpv = extra_pv |
@@ -825,7 +826,8 @@ def create_recipe(args): | |||
825 | extraoutdir = os.path.join(os.path.dirname(outfile), pn) | 826 | extraoutdir = os.path.join(os.path.dirname(outfile), pn) |
826 | bb.utils.mkdirhier(extraoutdir) | 827 | bb.utils.mkdirhier(extraoutdir) |
827 | for destfn, extrafile in extrafiles.items(): | 828 | for destfn, extrafile in extrafiles.items(): |
828 | shutil.move(extrafile, os.path.join(extraoutdir, destfn)) | 829 | fn = destfn.format(pn=pn, pv=realpv) |
830 | shutil.move(extrafile, os.path.join(extraoutdir, fn)) | ||
829 | 831 | ||
830 | lines = lines_before | 832 | lines = lines_before |
831 | lines_before = [] | 833 | lines_before = [] |
@@ -840,7 +842,7 @@ def create_recipe(args): | |||
840 | line = line.replace(realpv, '${PV}') | 842 | line = line.replace(realpv, '${PV}') |
841 | if pn: | 843 | if pn: |
842 | line = line.replace(pn, '${BPN}') | 844 | line = line.replace(pn, '${BPN}') |
843 | if line == 'S = "${WORKDIR}/${BPN}-${PV}"': | 845 | if line == 'S = "${UNPACKDIR}/${BPN}-${PV}"' or 'tmp-recipetool-' in line: |
844 | skipblank = True | 846 | skipblank = True |
845 | continue | 847 | continue |
846 | elif line.startswith('SRC_URI = '): | 848 | elif line.startswith('SRC_URI = '): |
@@ -918,6 +920,10 @@ def create_recipe(args): | |||
918 | log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool) | 920 | log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool) |
919 | tinfoil.modified_files() | 921 | tinfoil.modified_files() |
920 | 922 | ||
923 | for task in run_tasks: | ||
924 | logger.info("Running task %s" % task) | ||
925 | tinfoil.build_file_sync(outfile, task) | ||
926 | |||
921 | if tempsrc: | 927 | if tempsrc: |
922 | if args.keep_temp: | 928 | if args.keep_temp: |
923 | logger.info('Preserving temporary directory %s' % tempsrc) | 929 | logger.info('Preserving temporary directory %s' % tempsrc) |
@@ -945,16 +951,6 @@ def fixup_license(value): | |||
945 | return '(' + value + ')' | 951 | return '(' + value + ')' |
946 | return value | 952 | return value |
947 | 953 | ||
948 | def tidy_licenses(value): | ||
949 | """Flat, split and sort licenses""" | ||
950 | from oe.license import flattened_licenses | ||
951 | def _choose(a, b): | ||
952 | str_a, str_b = sorted((" & ".join(a), " & ".join(b)), key=str.casefold) | ||
953 | return ["(%s | %s)" % (str_a, str_b)] | ||
954 | if not isinstance(value, str): | ||
955 | value = " & ".join(value) | ||
956 | return sorted(list(set(flattened_licenses(value, _choose))), key=str.casefold) | ||
957 | |||
958 | def handle_license_vars(srctree, lines_before, handled, extravalues, d): | 954 | def handle_license_vars(srctree, lines_before, handled, extravalues, d): |
959 | lichandled = [x for x in handled if x[0] == 'license'] | 955 | lichandled = [x for x in handled if x[0] == 'license'] |
960 | if lichandled: | 956 | if lichandled: |
diff --git a/scripts/lib/recipetool/create_go.py b/scripts/lib/recipetool/create_go.py index 5cc53931f0..4b1fa39d13 100644 --- a/scripts/lib/recipetool/create_go.py +++ b/scripts/lib/recipetool/create_go.py | |||
@@ -10,13 +10,7 @@ | |||
10 | # | 10 | # |
11 | 11 | ||
12 | 12 | ||
13 | from collections import namedtuple | ||
14 | from enum import Enum | ||
15 | from html.parser import HTMLParser | ||
16 | from recipetool.create import RecipeHandler, handle_license_vars | 13 | from recipetool.create import RecipeHandler, handle_license_vars |
17 | from recipetool.create import find_licenses, tidy_licenses, fixup_license | ||
18 | from recipetool.create import determine_from_url | ||
19 | from urllib.error import URLError, HTTPError | ||
20 | 14 | ||
21 | import bb.utils | 15 | import bb.utils |
22 | import json | 16 | import json |
@@ -25,33 +19,20 @@ import os | |||
25 | import re | 19 | import re |
26 | import subprocess | 20 | import subprocess |
27 | import sys | 21 | import sys |
28 | import shutil | ||
29 | import tempfile | 22 | import tempfile |
30 | import urllib.parse | ||
31 | import urllib.request | ||
32 | 23 | ||
33 | 24 | ||
34 | GoImport = namedtuple('GoImport', 'root vcs url suffix') | ||
35 | logger = logging.getLogger('recipetool') | 25 | logger = logging.getLogger('recipetool') |
36 | CodeRepo = namedtuple( | ||
37 | 'CodeRepo', 'path codeRoot codeDir pathMajor pathPrefix pseudoMajor') | ||
38 | 26 | ||
39 | tinfoil = None | 27 | tinfoil = None |
40 | 28 | ||
41 | # Regular expression to parse pseudo semantic version | ||
42 | # see https://go.dev/ref/mod#pseudo-versions | ||
43 | re_pseudo_semver = re.compile( | ||
44 | r"^v[0-9]+\.(0\.0-|\d+\.\d+-([^+]*\.)?0\.)(?P<utc>\d{14})-(?P<commithash>[A-Za-z0-9]+)(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$") | ||
45 | # Regular expression to parse semantic version | ||
46 | re_semver = re.compile( | ||
47 | r"^v(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$") | ||
48 | |||
49 | 29 | ||
50 | def tinfoil_init(instance): | 30 | def tinfoil_init(instance): |
51 | global tinfoil | 31 | global tinfoil |
52 | tinfoil = instance | 32 | tinfoil = instance |
53 | 33 | ||
54 | 34 | ||
35 | |||
55 | class GoRecipeHandler(RecipeHandler): | 36 | class GoRecipeHandler(RecipeHandler): |
56 | """Class to handle the go recipe creation""" | 37 | """Class to handle the go recipe creation""" |
57 | 38 | ||
@@ -83,578 +64,6 @@ class GoRecipeHandler(RecipeHandler): | |||
83 | 64 | ||
84 | return bindir | 65 | return bindir |
85 | 66 | ||
86 | def __resolve_repository_static(self, modulepath): | ||
87 | """Resolve the repository in a static manner | ||
88 | |||
89 | The method is based on the go implementation of | ||
90 | `repoRootFromVCSPaths` in | ||
91 | https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go | ||
92 | """ | ||
93 | |||
94 | url = urllib.parse.urlparse("https://" + modulepath) | ||
95 | req = urllib.request.Request(url.geturl()) | ||
96 | |||
97 | try: | ||
98 | resp = urllib.request.urlopen(req) | ||
99 | # Some modulepath are just redirects to github (or some other vcs | ||
100 | # hoster). Therefore, we check if this modulepath redirects to | ||
101 | # somewhere else | ||
102 | if resp.geturl() != url.geturl(): | ||
103 | bb.debug(1, "%s is redirectred to %s" % | ||
104 | (url.geturl(), resp.geturl())) | ||
105 | url = urllib.parse.urlparse(resp.geturl()) | ||
106 | modulepath = url.netloc + url.path | ||
107 | |||
108 | except URLError as url_err: | ||
109 | # This is probably because the module path | ||
110 | # contains the subdir and major path. Thus, | ||
111 | # we ignore this error for now | ||
112 | logger.debug( | ||
113 | 1, "Failed to fetch page from [%s]: %s" % (url, str(url_err))) | ||
114 | |||
115 | host, _, _ = modulepath.partition('/') | ||
116 | |||
117 | class vcs(Enum): | ||
118 | pathprefix = "pathprefix" | ||
119 | regexp = "regexp" | ||
120 | type = "type" | ||
121 | repo = "repo" | ||
122 | check = "check" | ||
123 | schemelessRepo = "schemelessRepo" | ||
124 | |||
125 | # GitHub | ||
126 | vcsGitHub = {} | ||
127 | vcsGitHub[vcs.pathprefix] = "github.com" | ||
128 | vcsGitHub[vcs.regexp] = re.compile( | ||
129 | r'^(?P<root>github\.com/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
130 | vcsGitHub[vcs.type] = "git" | ||
131 | vcsGitHub[vcs.repo] = "https://\\g<root>" | ||
132 | |||
133 | # Bitbucket | ||
134 | vcsBitbucket = {} | ||
135 | vcsBitbucket[vcs.pathprefix] = "bitbucket.org" | ||
136 | vcsBitbucket[vcs.regexp] = re.compile( | ||
137 | r'^(?P<root>bitbucket\.org/(?P<bitname>[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+))(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
138 | vcsBitbucket[vcs.type] = "git" | ||
139 | vcsBitbucket[vcs.repo] = "https://\\g<root>" | ||
140 | |||
141 | # IBM DevOps Services (JazzHub) | ||
142 | vcsIBMDevOps = {} | ||
143 | vcsIBMDevOps[vcs.pathprefix] = "hub.jazz.net/git" | ||
144 | vcsIBMDevOps[vcs.regexp] = re.compile( | ||
145 | r'^(?P<root>hub\.jazz\.net/git/[a-z0-9]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
146 | vcsIBMDevOps[vcs.type] = "git" | ||
147 | vcsIBMDevOps[vcs.repo] = "https://\\g<root>" | ||
148 | |||
149 | # Git at Apache | ||
150 | vcsApacheGit = {} | ||
151 | vcsApacheGit[vcs.pathprefix] = "git.apache.org" | ||
152 | vcsApacheGit[vcs.regexp] = re.compile( | ||
153 | r'^(?P<root>git\.apache\.org/[a-z0-9_.\-]+\.git)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
154 | vcsApacheGit[vcs.type] = "git" | ||
155 | vcsApacheGit[vcs.repo] = "https://\\g<root>" | ||
156 | |||
157 | # Git at OpenStack | ||
158 | vcsOpenStackGit = {} | ||
159 | vcsOpenStackGit[vcs.pathprefix] = "git.openstack.org" | ||
160 | vcsOpenStackGit[vcs.regexp] = re.compile( | ||
161 | r'^(?P<root>git\.openstack\.org/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(\.git)?(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
162 | vcsOpenStackGit[vcs.type] = "git" | ||
163 | vcsOpenStackGit[vcs.repo] = "https://\\g<root>" | ||
164 | |||
165 | # chiselapp.com for fossil | ||
166 | vcsChiselapp = {} | ||
167 | vcsChiselapp[vcs.pathprefix] = "chiselapp.com" | ||
168 | vcsChiselapp[vcs.regexp] = re.compile( | ||
169 | r'^(?P<root>chiselapp\.com/user/[A-Za-z0-9]+/repository/[A-Za-z0-9_.\-]+)$') | ||
170 | vcsChiselapp[vcs.type] = "fossil" | ||
171 | vcsChiselapp[vcs.repo] = "https://\\g<root>" | ||
172 | |||
173 | # General syntax for any server. | ||
174 | # Must be last. | ||
175 | vcsGeneralServer = {} | ||
176 | vcsGeneralServer[vcs.regexp] = re.compile( | ||
177 | "(?P<root>(?P<repo>([a-z0-9.\\-]+\\.)+[a-z0-9.\\-]+(:[0-9]+)?(/~?[A-Za-z0-9_.\\-]+)+?)\\.(?P<vcs>bzr|fossil|git|hg|svn))(/~?(?P<suffix>[A-Za-z0-9_.\\-]+))*$") | ||
178 | vcsGeneralServer[vcs.schemelessRepo] = True | ||
179 | |||
180 | vcsPaths = [vcsGitHub, vcsBitbucket, vcsIBMDevOps, | ||
181 | vcsApacheGit, vcsOpenStackGit, vcsChiselapp, | ||
182 | vcsGeneralServer] | ||
183 | |||
184 | if modulepath.startswith("example.net") or modulepath == "rsc.io": | ||
185 | logger.warning("Suspicious module path %s" % modulepath) | ||
186 | return None | ||
187 | if modulepath.startswith("http:") or modulepath.startswith("https:"): | ||
188 | logger.warning("Import path should not start with %s %s" % | ||
189 | ("http", "https")) | ||
190 | return None | ||
191 | |||
192 | rootpath = None | ||
193 | vcstype = None | ||
194 | repourl = None | ||
195 | suffix = None | ||
196 | |||
197 | for srv in vcsPaths: | ||
198 | m = srv[vcs.regexp].match(modulepath) | ||
199 | if vcs.pathprefix in srv: | ||
200 | if host == srv[vcs.pathprefix]: | ||
201 | rootpath = m.group('root') | ||
202 | vcstype = srv[vcs.type] | ||
203 | repourl = m.expand(srv[vcs.repo]) | ||
204 | suffix = m.group('suffix') | ||
205 | break | ||
206 | elif m and srv[vcs.schemelessRepo]: | ||
207 | rootpath = m.group('root') | ||
208 | vcstype = m[vcs.type] | ||
209 | repourl = m[vcs.repo] | ||
210 | suffix = m.group('suffix') | ||
211 | break | ||
212 | |||
213 | return GoImport(rootpath, vcstype, repourl, suffix) | ||
214 | |||
215 | def __resolve_repository_dynamic(self, modulepath): | ||
216 | """Resolve the repository root in a dynamic manner. | ||
217 | |||
218 | The method is based on the go implementation of | ||
219 | `repoRootForImportDynamic` in | ||
220 | https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go | ||
221 | """ | ||
222 | url = urllib.parse.urlparse("https://" + modulepath) | ||
223 | |||
224 | class GoImportHTMLParser(HTMLParser): | ||
225 | |||
226 | def __init__(self): | ||
227 | super().__init__() | ||
228 | self.__srv = {} | ||
229 | |||
230 | def handle_starttag(self, tag, attrs): | ||
231 | if tag == 'meta' and list( | ||
232 | filter(lambda a: (a[0] == 'name' and a[1] == 'go-import'), attrs)): | ||
233 | content = list( | ||
234 | filter(lambda a: (a[0] == 'content'), attrs)) | ||
235 | if content: | ||
236 | srv = content[0][1].split() | ||
237 | self.__srv[srv[0]] = srv | ||
238 | |||
239 | def go_import(self, modulepath): | ||
240 | if modulepath in self.__srv: | ||
241 | srv = self.__srv[modulepath] | ||
242 | return GoImport(srv[0], srv[1], srv[2], None) | ||
243 | return None | ||
244 | |||
245 | url = url.geturl() + "?go-get=1" | ||
246 | req = urllib.request.Request(url) | ||
247 | |||
248 | try: | ||
249 | body = urllib.request.urlopen(req).read() | ||
250 | except HTTPError as http_err: | ||
251 | logger.warning( | ||
252 | "Unclean status when fetching page from [%s]: %s", url, str(http_err)) | ||
253 | body = http_err.fp.read() | ||
254 | except URLError as url_err: | ||
255 | logger.warning( | ||
256 | "Failed to fetch page from [%s]: %s", url, str(url_err)) | ||
257 | return None | ||
258 | |||
259 | parser = GoImportHTMLParser() | ||
260 | parser.feed(body.decode('utf-8')) | ||
261 | parser.close() | ||
262 | |||
263 | return parser.go_import(modulepath) | ||
264 | |||
265 | def __resolve_from_golang_proxy(self, modulepath, version): | ||
266 | """ | ||
267 | Resolves repository data from golang proxy | ||
268 | """ | ||
269 | url = urllib.parse.urlparse("https://proxy.golang.org/" | ||
270 | + modulepath | ||
271 | + "/@v/" | ||
272 | + version | ||
273 | + ".info") | ||
274 | |||
275 | # Transform url to lower case, golang proxy doesn't like mixed case | ||
276 | req = urllib.request.Request(url.geturl().lower()) | ||
277 | |||
278 | try: | ||
279 | resp = urllib.request.urlopen(req) | ||
280 | except URLError as url_err: | ||
281 | logger.warning( | ||
282 | "Failed to fetch page from [%s]: %s", url, str(url_err)) | ||
283 | return None | ||
284 | |||
285 | golang_proxy_res = resp.read().decode('utf-8') | ||
286 | modinfo = json.loads(golang_proxy_res) | ||
287 | |||
288 | if modinfo and 'Origin' in modinfo: | ||
289 | origin = modinfo['Origin'] | ||
290 | _root_url = urllib.parse.urlparse(origin['URL']) | ||
291 | |||
292 | # We normalize the repo URL since we don't want the scheme in it | ||
293 | _subdir = origin['Subdir'] if 'Subdir' in origin else None | ||
294 | _root, _, _ = self.__split_path_version(modulepath) | ||
295 | if _subdir: | ||
296 | _root = _root[:-len(_subdir)].strip('/') | ||
297 | |||
298 | _commit = origin['Hash'] | ||
299 | _vcs = origin['VCS'] | ||
300 | return (GoImport(_root, _vcs, _root_url.geturl(), None), _commit) | ||
301 | |||
302 | return None | ||
303 | |||
304 | def __resolve_repository(self, modulepath): | ||
305 | """ | ||
306 | Resolves src uri from go module-path | ||
307 | """ | ||
308 | repodata = self.__resolve_repository_static(modulepath) | ||
309 | if not repodata or not repodata.url: | ||
310 | repodata = self.__resolve_repository_dynamic(modulepath) | ||
311 | if not repodata or not repodata.url: | ||
312 | logger.error( | ||
313 | "Could not resolve repository for module path '%s'" % modulepath) | ||
314 | # There is no way to recover from this | ||
315 | sys.exit(14) | ||
316 | if repodata: | ||
317 | logger.debug(1, "Resolved download path for import '%s' => %s" % ( | ||
318 | modulepath, repodata.url)) | ||
319 | return repodata | ||
320 | |||
321 | def __split_path_version(self, path): | ||
322 | i = len(path) | ||
323 | dot = False | ||
324 | for j in range(i, 0, -1): | ||
325 | if path[j - 1] < '0' or path[j - 1] > '9': | ||
326 | break | ||
327 | if path[j - 1] == '.': | ||
328 | dot = True | ||
329 | break | ||
330 | i = j - 1 | ||
331 | |||
332 | if i <= 1 or i == len( | ||
333 | path) or path[i - 1] != 'v' or path[i - 2] != '/': | ||
334 | return path, "", True | ||
335 | |||
336 | prefix, pathMajor = path[:i - 2], path[i - 2:] | ||
337 | if dot or len( | ||
338 | pathMajor) <= 2 or pathMajor[2] == '0' or pathMajor == "/v1": | ||
339 | return path, "", False | ||
340 | |||
341 | return prefix, pathMajor, True | ||
342 | |||
343 | def __get_path_major(self, pathMajor): | ||
344 | if not pathMajor: | ||
345 | return "" | ||
346 | |||
347 | if pathMajor[0] != '/' and pathMajor[0] != '.': | ||
348 | logger.error( | ||
349 | "pathMajor suffix %s passed to PathMajorPrefix lacks separator", pathMajor) | ||
350 | |||
351 | if pathMajor.startswith(".v") and pathMajor.endswith("-unstable"): | ||
352 | pathMajor = pathMajor[:len("-unstable") - 2] | ||
353 | |||
354 | return pathMajor[1:] | ||
355 | |||
356 | def __build_coderepo(self, repo, path): | ||
357 | codedir = "" | ||
358 | pathprefix, pathMajor, _ = self.__split_path_version(path) | ||
359 | if repo.root == path: | ||
360 | pathprefix = path | ||
361 | elif path.startswith(repo.root): | ||
362 | codedir = pathprefix[len(repo.root):].strip('/') | ||
363 | |||
364 | pseudoMajor = self.__get_path_major(pathMajor) | ||
365 | |||
366 | logger.debug("root='%s', codedir='%s', prefix='%s', pathMajor='%s', pseudoMajor='%s'", | ||
367 | repo.root, codedir, pathprefix, pathMajor, pseudoMajor) | ||
368 | |||
369 | return CodeRepo(path, repo.root, codedir, | ||
370 | pathMajor, pathprefix, pseudoMajor) | ||
371 | |||
372 | def __resolve_version(self, repo, path, version): | ||
373 | hash = None | ||
374 | coderoot = self.__build_coderepo(repo, path) | ||
375 | |||
376 | def vcs_fetch_all(): | ||
377 | tmpdir = tempfile.mkdtemp() | ||
378 | clone_cmd = "%s clone --bare %s %s" % ('git', repo.url, tmpdir) | ||
379 | bb.process.run(clone_cmd) | ||
380 | log_cmd = "git log --all --pretty='%H %d' --decorate=short" | ||
381 | output, _ = bb.process.run( | ||
382 | log_cmd, shell=True, stderr=subprocess.PIPE, cwd=tmpdir) | ||
383 | bb.utils.prunedir(tmpdir) | ||
384 | return output.strip().split('\n') | ||
385 | |||
386 | def vcs_fetch_remote(tag): | ||
387 | # add * to grab ^{} | ||
388 | refs = {} | ||
389 | ls_remote_cmd = "git ls-remote -q --tags {} {}*".format( | ||
390 | repo.url, tag) | ||
391 | output, _ = bb.process.run(ls_remote_cmd) | ||
392 | output = output.strip().split('\n') | ||
393 | for line in output: | ||
394 | f = line.split(maxsplit=1) | ||
395 | if len(f) != 2: | ||
396 | continue | ||
397 | |||
398 | for prefix in ["HEAD", "refs/heads/", "refs/tags/"]: | ||
399 | if f[1].startswith(prefix): | ||
400 | refs[f[1][len(prefix):]] = f[0] | ||
401 | |||
402 | for key, hash in refs.items(): | ||
403 | if key.endswith(r"^{}"): | ||
404 | refs[key.strip(r"^{}")] = hash | ||
405 | |||
406 | return refs[tag] | ||
407 | |||
408 | m_pseudo_semver = re_pseudo_semver.match(version) | ||
409 | |||
410 | if m_pseudo_semver: | ||
411 | remote_refs = vcs_fetch_all() | ||
412 | short_commit = m_pseudo_semver.group('commithash') | ||
413 | for l in remote_refs: | ||
414 | r = l.split(maxsplit=1) | ||
415 | sha1 = r[0] if len(r) else None | ||
416 | if not sha1: | ||
417 | logger.error( | ||
418 | "Ups: could not resolve abbref commit for %s" % short_commit) | ||
419 | |||
420 | elif sha1.startswith(short_commit): | ||
421 | hash = sha1 | ||
422 | break | ||
423 | else: | ||
424 | m_semver = re_semver.match(version) | ||
425 | if m_semver: | ||
426 | |||
427 | def get_sha1_remote(re): | ||
428 | rsha1 = None | ||
429 | for line in remote_refs: | ||
430 | # Split lines of the following format: | ||
431 | # 22e90d9b964610628c10f673ca5f85b8c2a2ca9a (tag: sometag) | ||
432 | lineparts = line.split(maxsplit=1) | ||
433 | sha1 = lineparts[0] if len(lineparts) else None | ||
434 | refstring = lineparts[1] if len( | ||
435 | lineparts) == 2 else None | ||
436 | if refstring: | ||
437 | # Normalize tag string and split in case of multiple | ||
438 | # regs e.g. (tag: speech/v1.10.0, tag: orchestration/v1.5.0 ...) | ||
439 | refs = refstring.strip('(), ').split(',') | ||
440 | for ref in refs: | ||
441 | if re.match(ref.strip()): | ||
442 | rsha1 = sha1 | ||
443 | return rsha1 | ||
444 | |||
445 | semver = "v" + m_semver.group('major') + "."\ | ||
446 | + m_semver.group('minor') + "."\ | ||
447 | + m_semver.group('patch') \ | ||
448 | + (("-" + m_semver.group('prerelease')) | ||
449 | if m_semver.group('prerelease') else "") | ||
450 | |||
451 | tag = os.path.join( | ||
452 | coderoot.codeDir, semver) if coderoot.codeDir else semver | ||
453 | |||
454 | # probe tag using 'ls-remote', which is faster than fetching | ||
455 | # complete history | ||
456 | hash = vcs_fetch_remote(tag) | ||
457 | if not hash: | ||
458 | # backup: fetch complete history | ||
459 | remote_refs = vcs_fetch_all() | ||
460 | hash = get_sha1_remote( | ||
461 | re.compile(fr"(tag:|HEAD ->) ({tag})")) | ||
462 | |||
463 | logger.debug( | ||
464 | "Resolving commit for tag '%s' -> '%s'", tag, hash) | ||
465 | return hash | ||
466 | |||
467 | def __generate_srcuri_inline_fcn(self, path, version, replaces=None): | ||
468 | """Generate SRC_URI functions for go imports""" | ||
469 | |||
470 | logger.info("Resolving repository for module %s", path) | ||
471 | # First try to resolve repo and commit from golang proxy | ||
472 | # Most info is already there and we don't have to go through the | ||
473 | # repository or even perform the version resolve magic | ||
474 | golang_proxy_info = self.__resolve_from_golang_proxy(path, version) | ||
475 | if golang_proxy_info: | ||
476 | repo = golang_proxy_info[0] | ||
477 | commit = golang_proxy_info[1] | ||
478 | else: | ||
479 | # Fallback | ||
480 | # Resolve repository by 'hand' | ||
481 | repo = self.__resolve_repository(path) | ||
482 | commit = self.__resolve_version(repo, path, version) | ||
483 | |||
484 | url = urllib.parse.urlparse(repo.url) | ||
485 | repo_url = url.netloc + url.path | ||
486 | |||
487 | coderoot = self.__build_coderepo(repo, path) | ||
488 | |||
489 | inline_fcn = "${@go_src_uri(" | ||
490 | inline_fcn += f"'{repo_url}','{version}'" | ||
491 | if repo_url != path: | ||
492 | inline_fcn += f",path='{path}'" | ||
493 | if coderoot.codeDir: | ||
494 | inline_fcn += f",subdir='{coderoot.codeDir}'" | ||
495 | if repo.vcs != 'git': | ||
496 | inline_fcn += f",vcs='{repo.vcs}'" | ||
497 | if replaces: | ||
498 | inline_fcn += f",replaces='{replaces}'" | ||
499 | if coderoot.pathMajor: | ||
500 | inline_fcn += f",pathmajor='{coderoot.pathMajor}'" | ||
501 | inline_fcn += ")}" | ||
502 | |||
503 | return inline_fcn, commit | ||
504 | |||
505 | def __go_handle_dependencies(self, go_mod, srctree, localfilesdir, extravalues, d): | ||
506 | |||
507 | import re | ||
508 | src_uris = [] | ||
509 | src_revs = [] | ||
510 | |||
511 | def generate_src_rev(path, version, commithash): | ||
512 | src_rev = f"# {path}@{version} => {commithash}\n" | ||
513 | # Ups...maybe someone manipulated the source repository and the | ||
514 | # version or commit could not be resolved. This is a sign of | ||
515 | # a) the supply chain was manipulated (bad) | ||
516 | # b) the implementation for the version resolving didn't work | ||
517 | # anymore (less bad) | ||
518 | if not commithash: | ||
519 | src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n" | ||
520 | src_rev += f"#!!! Could not resolve version !!!\n" | ||
521 | src_rev += f"#!!! Possible supply chain attack !!!\n" | ||
522 | src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n" | ||
523 | src_rev += f"SRCREV_{path.replace('/', '.')} = \"{commithash}\"" | ||
524 | |||
525 | return src_rev | ||
526 | |||
527 | # we first go over replacement list, because we are essentialy | ||
528 | # interested only in the replaced path | ||
529 | if go_mod['Replace']: | ||
530 | for replacement in go_mod['Replace']: | ||
531 | oldpath = replacement['Old']['Path'] | ||
532 | path = replacement['New']['Path'] | ||
533 | version = '' | ||
534 | if 'Version' in replacement['New']: | ||
535 | version = replacement['New']['Version'] | ||
536 | |||
537 | if os.path.exists(os.path.join(srctree, path)): | ||
538 | # the module refers to the local path, remove it from requirement list | ||
539 | # because it's a local module | ||
540 | go_mod['Require'][:] = [v for v in go_mod['Require'] if v.get('Path') != oldpath] | ||
541 | else: | ||
542 | # Replace the path and the version, so we don't iterate replacement list anymore | ||
543 | for require in go_mod['Require']: | ||
544 | if require['Path'] == oldpath: | ||
545 | require.update({'Path': path, 'Version': version}) | ||
546 | break | ||
547 | |||
548 | for require in go_mod['Require']: | ||
549 | path = require['Path'] | ||
550 | version = require['Version'] | ||
551 | |||
552 | inline_fcn, commithash = self.__generate_srcuri_inline_fcn( | ||
553 | path, version) | ||
554 | src_uris.append(inline_fcn) | ||
555 | src_revs.append(generate_src_rev(path, version, commithash)) | ||
556 | |||
557 | # strip version part from module URL /vXX | ||
558 | baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path']) | ||
559 | pn, _ = determine_from_url(baseurl) | ||
560 | go_mods_basename = "%s-modules.inc" % pn | ||
561 | |||
562 | go_mods_filename = os.path.join(localfilesdir, go_mods_basename) | ||
563 | with open(go_mods_filename, "w") as f: | ||
564 | # We introduce this indirection to make the tests a little easier | ||
565 | f.write("SRC_URI += \"${GO_DEPENDENCIES_SRC_URI}\"\n") | ||
566 | f.write("GO_DEPENDENCIES_SRC_URI = \"\\\n") | ||
567 | for uri in src_uris: | ||
568 | f.write(" " + uri + " \\\n") | ||
569 | f.write("\"\n\n") | ||
570 | for rev in src_revs: | ||
571 | f.write(rev + "\n") | ||
572 | |||
573 | extravalues['extrafiles'][go_mods_basename] = go_mods_filename | ||
574 | |||
575 | def __go_run_cmd(self, cmd, cwd, d): | ||
576 | return bb.process.run(cmd, env=dict(os.environ, PATH=d.getVar('PATH')), | ||
577 | shell=True, cwd=cwd) | ||
578 | |||
579 | def __go_native_version(self, d): | ||
580 | stdout, _ = self.__go_run_cmd("go version", None, d) | ||
581 | m = re.match(r".*\sgo((\d+).(\d+).(\d+))\s([\w\/]*)", stdout) | ||
582 | major = int(m.group(2)) | ||
583 | minor = int(m.group(3)) | ||
584 | patch = int(m.group(4)) | ||
585 | |||
586 | return major, minor, patch | ||
587 | |||
588 | def __go_mod_patch(self, srctree, localfilesdir, extravalues, d): | ||
589 | |||
590 | patchfilename = "go.mod.patch" | ||
591 | go_native_version_major, go_native_version_minor, _ = self.__go_native_version( | ||
592 | d) | ||
593 | self.__go_run_cmd("go mod tidy -go=%d.%d" % | ||
594 | (go_native_version_major, go_native_version_minor), srctree, d) | ||
595 | stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d) | ||
596 | |||
597 | # Create patch in order to upgrade go version | ||
598 | self.__go_run_cmd("git diff go.mod > %s" % (patchfilename), srctree, d) | ||
599 | # Restore original state | ||
600 | self.__go_run_cmd("git checkout HEAD go.mod go.sum", srctree, d) | ||
601 | |||
602 | go_mod = json.loads(stdout) | ||
603 | tmpfile = os.path.join(localfilesdir, patchfilename) | ||
604 | shutil.move(os.path.join(srctree, patchfilename), tmpfile) | ||
605 | |||
606 | extravalues['extrafiles'][patchfilename] = tmpfile | ||
607 | |||
608 | return go_mod, patchfilename | ||
609 | |||
610 | def __go_mod_vendor(self, go_mod, srctree, localfilesdir, extravalues, d): | ||
611 | # Perform vendoring to retrieve the correct modules.txt | ||
612 | tmp_vendor_dir = tempfile.mkdtemp() | ||
613 | |||
614 | # -v causes to go to print modules.txt to stderr | ||
615 | _, stderr = self.__go_run_cmd( | ||
616 | "go mod vendor -v -o %s" % (tmp_vendor_dir), srctree, d) | ||
617 | |||
618 | modules_txt_basename = "modules.txt" | ||
619 | modules_txt_filename = os.path.join(localfilesdir, modules_txt_basename) | ||
620 | with open(modules_txt_filename, "w") as f: | ||
621 | f.write(stderr) | ||
622 | |||
623 | extravalues['extrafiles'][modules_txt_basename] = modules_txt_filename | ||
624 | |||
625 | licenses = [] | ||
626 | lic_files_chksum = [] | ||
627 | licvalues = find_licenses(tmp_vendor_dir, d) | ||
628 | shutil.rmtree(tmp_vendor_dir) | ||
629 | |||
630 | if licvalues: | ||
631 | for licvalue in licvalues: | ||
632 | license = licvalue[0] | ||
633 | lics = tidy_licenses(fixup_license(license)) | ||
634 | lics = [lic for lic in lics if lic not in licenses] | ||
635 | if len(lics): | ||
636 | licenses.extend(lics) | ||
637 | lic_files_chksum.append( | ||
638 | 'file://src/${GO_IMPORT}/vendor/%s;md5=%s' % (licvalue[1], licvalue[2])) | ||
639 | |||
640 | # strip version part from module URL /vXX | ||
641 | baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path']) | ||
642 | pn, _ = determine_from_url(baseurl) | ||
643 | licenses_basename = "%s-licenses.inc" % pn | ||
644 | |||
645 | licenses_filename = os.path.join(localfilesdir, licenses_basename) | ||
646 | with open(licenses_filename, "w") as f: | ||
647 | f.write("GO_MOD_LICENSES = \"%s\"\n\n" % | ||
648 | ' & '.join(sorted(licenses, key=str.casefold))) | ||
649 | # We introduce this indirection to make the tests a little easier | ||
650 | f.write("LIC_FILES_CHKSUM += \"${VENDORED_LIC_FILES_CHKSUM}\"\n") | ||
651 | f.write("VENDORED_LIC_FILES_CHKSUM = \"\\\n") | ||
652 | for lic in lic_files_chksum: | ||
653 | f.write(" " + lic + " \\\n") | ||
654 | f.write("\"\n") | ||
655 | |||
656 | extravalues['extrafiles'][licenses_basename] = licenses_filename | ||
657 | |||
658 | def process(self, srctree, classes, lines_before, | 67 | def process(self, srctree, classes, lines_before, |
659 | lines_after, handled, extravalues): | 68 | lines_after, handled, extravalues): |
660 | 69 | ||
@@ -665,63 +74,52 @@ class GoRecipeHandler(RecipeHandler): | |||
665 | if not files: | 74 | if not files: |
666 | return False | 75 | return False |
667 | 76 | ||
668 | d = bb.data.createCopy(tinfoil.config_data) | ||
669 | go_bindir = self.__ensure_go() | 77 | go_bindir = self.__ensure_go() |
670 | if not go_bindir: | 78 | if not go_bindir: |
671 | sys.exit(14) | 79 | sys.exit(14) |
672 | 80 | ||
673 | d.prependVar('PATH', '%s:' % go_bindir) | ||
674 | handled.append('buildsystem') | 81 | handled.append('buildsystem') |
675 | classes.append("go-vendor") | 82 | classes.append("go-mod") |
676 | 83 | ||
677 | stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d) | 84 | # Use go-mod-update-modules to set the full SRC_URI and LICENSE |
85 | classes.append("go-mod-update-modules") | ||
86 | extravalues["run_tasks"] = "update_modules" | ||
678 | 87 | ||
679 | go_mod = json.loads(stdout) | 88 | with tempfile.TemporaryDirectory(prefix="go-mod-") as tmp_mod_dir: |
680 | go_import = go_mod['Module']['Path'] | 89 | env = dict(os.environ) |
681 | go_version_match = re.match("([0-9]+).([0-9]+)", go_mod['Go']) | 90 | env["PATH"] += f":{go_bindir}" |
682 | go_version_major = int(go_version_match.group(1)) | 91 | env['GOMODCACHE'] = tmp_mod_dir |
683 | go_version_minor = int(go_version_match.group(2)) | ||
684 | src_uris = [] | ||
685 | 92 | ||
686 | localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-') | 93 | stdout = subprocess.check_output(["go", "mod", "edit", "-json"], cwd=srctree, env=env, text=True) |
687 | extravalues.setdefault('extrafiles', {}) | 94 | go_mod = json.loads(stdout) |
95 | go_import = re.sub(r'/v([0-9]+)$', '', go_mod['Module']['Path']) | ||
688 | 96 | ||
689 | # Use an explicit name determined from the module name because it | 97 | localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-') |
690 | # might differ from the actual URL for replaced modules | 98 | extravalues.setdefault('extrafiles', {}) |
691 | # strip version part from module URL /vXX | ||
692 | baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path']) | ||
693 | pn, _ = determine_from_url(baseurl) | ||
694 | 99 | ||
695 | # go.mod files with version < 1.17 may not include all indirect | 100 | # Write the stub ${BPN}-licenses.inc and ${BPN}-go-mods.inc files |
696 | # dependencies. Thus, we have to upgrade the go version. | 101 | basename = "{pn}-licenses.inc" |
697 | if go_version_major == 1 and go_version_minor < 17: | 102 | filename = os.path.join(localfilesdir, basename) |
698 | logger.warning( | 103 | with open(filename, "w") as f: |
699 | "go.mod files generated by Go < 1.17 might have incomplete indirect dependencies.") | 104 | f.write("# FROM RECIPETOOL\n") |
700 | go_mod, patchfilename = self.__go_mod_patch(srctree, localfilesdir, | 105 | extravalues['extrafiles'][f"../{basename}"] = filename |
701 | extravalues, d) | ||
702 | src_uris.append( | ||
703 | "file://%s;patchdir=src/${GO_IMPORT}" % (patchfilename)) | ||
704 | 106 | ||
705 | # Check whether the module is vendored. If so, we have nothing to do. | 107 | basename = "{pn}-go-mods.inc" |
706 | # Otherwise we gather all dependencies and add them to the recipe | 108 | filename = os.path.join(localfilesdir, basename) |
707 | if not os.path.exists(os.path.join(srctree, "vendor")): | 109 | with open(filename, "w") as f: |
110 | f.write("# FROM RECIPETOOL\n") | ||
111 | extravalues['extrafiles'][f"../{basename}"] = filename | ||
708 | 112 | ||
709 | # Write additional $BPN-modules.inc file | 113 | # Do generic license handling |
710 | self.__go_mod_vendor(go_mod, srctree, localfilesdir, extravalues, d) | 114 | d = bb.data.createCopy(tinfoil.config_data) |
711 | lines_before.append("LICENSE += \" & ${GO_MOD_LICENSES}\"") | 115 | handle_license_vars(srctree, lines_before, handled, extravalues, d) |
712 | lines_before.append("require %s-licenses.inc" % (pn)) | 116 | self.__rewrite_lic_vars(lines_before) |
713 | 117 | ||
714 | self.__rewrite_src_uri(lines_before, ["file://modules.txt"]) | 118 | self.__rewrite_src_uri(lines_before) |
715 | 119 | ||
716 | self.__go_handle_dependencies(go_mod, srctree, localfilesdir, extravalues, d) | 120 | lines_before.append('require ${BPN}-licenses.inc') |
717 | lines_before.append("require %s-modules.inc" % (pn)) | 121 | lines_before.append('require ${BPN}-go-mods.inc') |
718 | 122 | lines_before.append(f'GO_IMPORT = "{go_import}"') | |
719 | # Do generic license handling | ||
720 | handle_license_vars(srctree, lines_before, handled, extravalues, d) | ||
721 | self.__rewrite_lic_uri(lines_before) | ||
722 | |||
723 | lines_before.append("GO_IMPORT = \"{}\"".format(baseurl)) | ||
724 | lines_before.append("SRCREV_FORMAT = \"${BPN}\"") | ||
725 | 123 | ||
726 | def __update_lines_before(self, updated, newlines, lines_before): | 124 | def __update_lines_before(self, updated, newlines, lines_before): |
727 | if updated: | 125 | if updated: |
@@ -733,9 +131,9 @@ class GoRecipeHandler(RecipeHandler): | |||
733 | lines_before.append(line) | 131 | lines_before.append(line) |
734 | return updated | 132 | return updated |
735 | 133 | ||
736 | def __rewrite_lic_uri(self, lines_before): | 134 | def __rewrite_lic_vars(self, lines_before): |
737 | |||
738 | def varfunc(varname, origvalue, op, newlines): | 135 | def varfunc(varname, origvalue, op, newlines): |
136 | import urllib.parse | ||
739 | if varname == 'LIC_FILES_CHKSUM': | 137 | if varname == 'LIC_FILES_CHKSUM': |
740 | new_licenses = [] | 138 | new_licenses = [] |
741 | licenses = origvalue.split('\\') | 139 | licenses = origvalue.split('\\') |
@@ -760,12 +158,11 @@ class GoRecipeHandler(RecipeHandler): | |||
760 | lines_before, ['LIC_FILES_CHKSUM'], varfunc) | 158 | lines_before, ['LIC_FILES_CHKSUM'], varfunc) |
761 | return self.__update_lines_before(updated, newlines, lines_before) | 159 | return self.__update_lines_before(updated, newlines, lines_before) |
762 | 160 | ||
763 | def __rewrite_src_uri(self, lines_before, additional_uris = []): | 161 | def __rewrite_src_uri(self, lines_before): |
764 | 162 | ||
765 | def varfunc(varname, origvalue, op, newlines): | 163 | def varfunc(varname, origvalue, op, newlines): |
766 | if varname == 'SRC_URI': | 164 | if varname == 'SRC_URI': |
767 | src_uri = ["git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https"] | 165 | src_uri = ['git://${GO_IMPORT};protocol=https;nobranch=1;destsuffix=${GO_SRCURI_DESTSUFFIX}'] |
768 | src_uri.extend(additional_uris) | ||
769 | return src_uri, None, -1, True | 166 | return src_uri, None, -1, True |
770 | return origvalue, None, 0, True | 167 | return origvalue, None, 0, True |
771 | 168 | ||
diff --git a/scripts/test-remote-image b/scripts/test-remote-image index d209d22854..1d018992b0 100755 --- a/scripts/test-remote-image +++ b/scripts/test-remote-image | |||
@@ -152,8 +152,7 @@ class AutoTargetProfile(BaseTargetProfile): | |||
152 | return controller | 152 | return controller |
153 | 153 | ||
154 | def set_kernel_file(self): | 154 | def set_kernel_file(self): |
155 | postconfig = "QA_GET_MACHINE = \"${MACHINE}\"" | 155 | machine = get_bb_var('MACHINE') |
156 | machine = get_bb_var('QA_GET_MACHINE', postconfig=postconfig) | ||
157 | self.kernel_file = self.kernel_type + '-' + machine + '.bin' | 156 | self.kernel_file = self.kernel_type + '-' + machine + '.bin' |
158 | 157 | ||
159 | def set_rootfs_file(self): | 158 | def set_rootfs_file(self): |
@@ -215,13 +214,11 @@ class PublicAB(BaseRepoProfile): | |||
215 | def get_repo_path(self): | 214 | def get_repo_path(self): |
216 | path = '/machines/' | 215 | path = '/machines/' |
217 | 216 | ||
218 | postconfig = "QA_GET_MACHINE = \"${MACHINE}\"" | 217 | machine = get_bb_var('MACHINE') |
219 | machine = get_bb_var('QA_GET_MACHINE', postconfig=postconfig) | ||
220 | if 'qemu' in machine: | 218 | if 'qemu' in machine: |
221 | path += 'qemu/' | 219 | path += 'qemu/' |
222 | 220 | ||
223 | postconfig = "QA_GET_DISTRO = \"${DISTRO}\"" | 221 | distro = get_bb_var('DISTRO') |
224 | distro = get_bb_var('QA_GET_DISTRO', postconfig=postconfig) | ||
225 | path += distro.replace('poky', machine) + '/' | 222 | path += distro.replace('poky', machine) + '/' |
226 | return path | 223 | return path |
227 | 224 | ||
diff --git a/scripts/wic b/scripts/wic index 06e0b48db0..9137208f5e 100755 --- a/scripts/wic +++ b/scripts/wic | |||
@@ -237,6 +237,13 @@ def wic_ls_subcommand(args, usage_str): | |||
237 | Command-line handling for list content of images. | 237 | Command-line handling for list content of images. |
238 | The real work is done by engine.wic_ls() | 238 | The real work is done by engine.wic_ls() |
239 | """ | 239 | """ |
240 | |||
241 | if args.image_name: | ||
242 | BB_VARS.default_image = args.image_name | ||
243 | |||
244 | if args.vars_dir: | ||
245 | BB_VARS.vars_dir = args.vars_dir | ||
246 | |||
240 | engine.wic_ls(args, args.native_sysroot) | 247 | engine.wic_ls(args, args.native_sysroot) |
241 | 248 | ||
242 | def wic_cp_subcommand(args, usage_str): | 249 | def wic_cp_subcommand(args, usage_str): |
@@ -244,6 +251,12 @@ def wic_cp_subcommand(args, usage_str): | |||
244 | Command-line handling for copying files/dirs to images. | 251 | Command-line handling for copying files/dirs to images. |
245 | The real work is done by engine.wic_cp() | 252 | The real work is done by engine.wic_cp() |
246 | """ | 253 | """ |
254 | if args.image_name: | ||
255 | BB_VARS.default_image = args.image_name | ||
256 | |||
257 | if args.vars_dir: | ||
258 | BB_VARS.vars_dir = args.vars_dir | ||
259 | |||
247 | engine.wic_cp(args, args.native_sysroot) | 260 | engine.wic_cp(args, args.native_sysroot) |
248 | 261 | ||
249 | def wic_rm_subcommand(args, usage_str): | 262 | def wic_rm_subcommand(args, usage_str): |
@@ -251,6 +264,12 @@ def wic_rm_subcommand(args, usage_str): | |||
251 | Command-line handling for removing files/dirs from images. | 264 | Command-line handling for removing files/dirs from images. |
252 | The real work is done by engine.wic_rm() | 265 | The real work is done by engine.wic_rm() |
253 | """ | 266 | """ |
267 | if args.image_name: | ||
268 | BB_VARS.default_image = args.image_name | ||
269 | |||
270 | if args.vars_dir: | ||
271 | BB_VARS.vars_dir = args.vars_dir | ||
272 | |||
254 | engine.wic_rm(args, args.native_sysroot) | 273 | engine.wic_rm(args, args.native_sysroot) |
255 | 274 | ||
256 | def wic_write_subcommand(args, usage_str): | 275 | def wic_write_subcommand(args, usage_str): |
@@ -258,6 +277,12 @@ def wic_write_subcommand(args, usage_str): | |||
258 | Command-line handling for writing images. | 277 | Command-line handling for writing images. |
259 | The real work is done by engine.wic_write() | 278 | The real work is done by engine.wic_write() |
260 | """ | 279 | """ |
280 | if args.image_name: | ||
281 | BB_VARS.default_image = args.image_name | ||
282 | |||
283 | if args.vars_dir: | ||
284 | BB_VARS.vars_dir = args.vars_dir | ||
285 | |||
261 | engine.wic_write(args, args.native_sysroot) | 286 | engine.wic_write(args, args.native_sysroot) |
262 | 287 | ||
263 | def wic_help_subcommand(args, usage_str): | 288 | def wic_help_subcommand(args, usage_str): |
@@ -390,6 +415,12 @@ def wic_init_parser_ls(subparser): | |||
390 | help="image spec: <image>[:<vfat partition>[<path>]]") | 415 | help="image spec: <image>[:<vfat partition>[<path>]]") |
391 | subparser.add_argument("-n", "--native-sysroot", | 416 | subparser.add_argument("-n", "--native-sysroot", |
392 | help="path to the native sysroot containing the tools") | 417 | help="path to the native sysroot containing the tools") |
418 | subparser.add_argument("-e", "--image-name", dest="image_name", | ||
419 | help="name of the image to use the artifacts from " | ||
420 | "e.g. core-image-sato") | ||
421 | subparser.add_argument("-v", "--vars", dest='vars_dir', | ||
422 | help="directory with <image>.env files that store " | ||
423 | "bitbake variables") | ||
393 | 424 | ||
394 | def imgpathtype(arg): | 425 | def imgpathtype(arg): |
395 | img = imgtype(arg) | 426 | img = imgtype(arg) |
@@ -404,6 +435,12 @@ def wic_init_parser_cp(subparser): | |||
404 | help="image spec: <image>:<vfat partition>[<path>] or <file>") | 435 | help="image spec: <image>:<vfat partition>[<path>] or <file>") |
405 | subparser.add_argument("-n", "--native-sysroot", | 436 | subparser.add_argument("-n", "--native-sysroot", |
406 | help="path to the native sysroot containing the tools") | 437 | help="path to the native sysroot containing the tools") |
438 | subparser.add_argument("-e", "--image-name", dest="image_name", | ||
439 | help="name of the image to use the artifacts from " | ||
440 | "e.g. core-image-sato") | ||
441 | subparser.add_argument("-v", "--vars", dest='vars_dir', | ||
442 | help="directory with <image>.env files that store " | ||
443 | "bitbake variables") | ||
407 | 444 | ||
408 | def wic_init_parser_rm(subparser): | 445 | def wic_init_parser_rm(subparser): |
409 | subparser.add_argument("path", type=imgpathtype, | 446 | subparser.add_argument("path", type=imgpathtype, |
@@ -413,6 +450,12 @@ def wic_init_parser_rm(subparser): | |||
413 | subparser.add_argument("-r", dest="recursive_delete", action="store_true", default=False, | 450 | subparser.add_argument("-r", dest="recursive_delete", action="store_true", default=False, |
414 | help="remove directories and their contents recursively, " | 451 | help="remove directories and their contents recursively, " |
415 | " this only applies to ext* partition") | 452 | " this only applies to ext* partition") |
453 | subparser.add_argument("-e", "--image-name", dest="image_name", | ||
454 | help="name of the image to use the artifacts from " | ||
455 | "e.g. core-image-sato") | ||
456 | subparser.add_argument("-v", "--vars", dest='vars_dir', | ||
457 | help="directory with <image>.env files that store " | ||
458 | "bitbake variables") | ||
416 | 459 | ||
417 | def expandtype(rules): | 460 | def expandtype(rules): |
418 | """ | 461 | """ |
@@ -454,6 +497,12 @@ def wic_init_parser_write(subparser): | |||
454 | help="expand rules: auto or <partition>:<size>[,<partition>:<size>]") | 497 | help="expand rules: auto or <partition>:<size>[,<partition>:<size>]") |
455 | subparser.add_argument("-n", "--native-sysroot", | 498 | subparser.add_argument("-n", "--native-sysroot", |
456 | help="path to the native sysroot containing the tools") | 499 | help="path to the native sysroot containing the tools") |
500 | subparser.add_argument("--image-name", dest="image_name", | ||
501 | help="name of the image to use the artifacts from " | ||
502 | "e.g. core-image-sato") | ||
503 | subparser.add_argument("-v", "--vars", dest='vars_dir', | ||
504 | help="directory with <image>.env files that store " | ||
505 | "bitbake variables") | ||
457 | 506 | ||
458 | def wic_init_parser_help(subparser): | 507 | def wic_init_parser_help(subparser): |
459 | helpparsers = subparser.add_subparsers(dest='help_topic', help=hlp.wic_usage) | 508 | helpparsers = subparser.add_subparsers(dest='help_topic', help=hlp.wic_usage) |