diff options
author | Lukas Funke <lukas.funke@weidmueller.com> | 2023-11-02 16:53:13 +0100 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2023-11-06 16:41:11 +0000 |
commit | 7de36b25d1df97b105fb52b5dc3c62ab1800fb3a (patch) | |
tree | ecc8598d094ce00f0fa50dd53be4f0ce34fc0b13 | |
parent | f7f304fa3eac4ac02b39497e6aa89217a55bd51d (diff) | |
download | poky-7de36b25d1df97b105fb52b5dc3c62ab1800fb3a.tar.gz |
recipetool: Add handler to create go recipes
[RP: Replace python 3.9 use of removeprefix()]
(From OE-Core rev: 049afb933f42851d56d2e6522150a32bcccf2a6f)
Signed-off-by: Lukas Funke <lukas.funke@weidmueller.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-rw-r--r-- | scripts/lib/recipetool/create_go.py | 751 |
1 files changed, 751 insertions, 0 deletions
diff --git a/scripts/lib/recipetool/create_go.py b/scripts/lib/recipetool/create_go.py new file mode 100644 index 0000000000..21dcb41271 --- /dev/null +++ b/scripts/lib/recipetool/create_go.py | |||
@@ -0,0 +1,751 @@ | |||
1 | # Recipe creation tool - go support plugin | ||
2 | # | ||
3 | # The code is based on golang internals. See the afftected | ||
4 | # methods for further reference and information. | ||
5 | # | ||
6 | # Copyright (C) 2023 Weidmueller GmbH & Co KG | ||
7 | # Author: Lukas Funke <lukas.funke@weidmueller.com> | ||
8 | # | ||
9 | # SPDX-License-Identifier: GPL-2.0-only | ||
10 | # | ||
11 | |||
12 | |||
13 | from collections import namedtuple | ||
14 | from enum import Enum | ||
15 | from html.parser import HTMLParser | ||
16 | from recipetool.create import RecipeHandler, handle_license_vars | ||
17 | from recipetool.create import guess_license, tidy_licenses, fixup_license | ||
18 | from recipetool.create import determine_from_url | ||
19 | from urllib.error import URLError | ||
20 | |||
21 | import bb.utils | ||
22 | import json | ||
23 | import logging | ||
24 | import os | ||
25 | import re | ||
26 | import subprocess | ||
27 | import sys | ||
28 | import shutil | ||
29 | import tempfile | ||
30 | import urllib.parse | ||
31 | import urllib.request | ||
32 | |||
33 | |||
34 | GoImport = namedtuple('GoImport', 'root vcs url suffix') | ||
35 | logger = logging.getLogger('recipetool') | ||
36 | CodeRepo = namedtuple( | ||
37 | 'CodeRepo', 'path codeRoot codeDir pathMajor pathPrefix pseudoMajor') | ||
38 | |||
39 | tinfoil = None | ||
40 | |||
41 | # Regular expression to parse pseudo semantic version | ||
42 | # see https://go.dev/ref/mod#pseudo-versions | ||
43 | re_pseudo_semver = re.compile( | ||
44 | r"^v[0-9]+\.(0\.0-|\d+\.\d+-([^+]*\.)?0\.)(?P<utc>\d{14})-(?P<commithash>[A-Za-z0-9]+)(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$") | ||
45 | # Regular expression to parse semantic version | ||
46 | re_semver = re.compile( | ||
47 | r"^v(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$") | ||
48 | |||
49 | |||
50 | def tinfoil_init(instance): | ||
51 | global tinfoil | ||
52 | tinfoil = instance | ||
53 | |||
54 | |||
55 | class GoRecipeHandler(RecipeHandler): | ||
56 | """Class to handle the go recipe creation""" | ||
57 | |||
58 | @staticmethod | ||
59 | def __ensure_go(): | ||
60 | """Check if the 'go' command is available in the recipes""" | ||
61 | recipe = "go-native" | ||
62 | if not tinfoil.recipes_parsed: | ||
63 | tinfoil.parse_recipes() | ||
64 | try: | ||
65 | rd = tinfoil.parse_recipe(recipe) | ||
66 | except bb.providers.NoProvider: | ||
67 | bb.error( | ||
68 | "Nothing provides '%s' which is required for the build" % (recipe)) | ||
69 | bb.note( | ||
70 | "You will likely need to add a layer that provides '%s'" % (recipe)) | ||
71 | return None | ||
72 | |||
73 | bindir = rd.getVar('STAGING_BINDIR_NATIVE') | ||
74 | gopath = os.path.join(bindir, 'go') | ||
75 | |||
76 | if not os.path.exists(gopath): | ||
77 | tinfoil.build_targets(recipe, 'addto_recipe_sysroot') | ||
78 | |||
79 | if not os.path.exists(gopath): | ||
80 | logger.error( | ||
81 | '%s required to process specified source, but %s did not seem to populate it' % 'go', recipe) | ||
82 | return None | ||
83 | |||
84 | return bindir | ||
85 | |||
86 | def __resolve_repository_static(self, modulepath): | ||
87 | """Resolve the repository in a static manner | ||
88 | |||
89 | The method is based on the go implementation of | ||
90 | `repoRootFromVCSPaths` in | ||
91 | https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go | ||
92 | """ | ||
93 | |||
94 | url = urllib.parse.urlparse("https://" + modulepath) | ||
95 | req = urllib.request.Request(url.geturl()) | ||
96 | |||
97 | try: | ||
98 | resp = urllib.request.urlopen(req) | ||
99 | # Some modulepath are just redirects to github (or some other vcs | ||
100 | # hoster). Therefore, we check if this modulepath redirects to | ||
101 | # somewhere else | ||
102 | if resp.geturl() != url.geturl(): | ||
103 | bb.debug(1, "%s is redirectred to %s" % | ||
104 | (url.geturl(), resp.geturl())) | ||
105 | url = urllib.parse.urlparse(resp.geturl()) | ||
106 | modulepath = url.netloc + url.path | ||
107 | |||
108 | except URLError as url_err: | ||
109 | # This is probably because the module path | ||
110 | # contains the subdir and major path. Thus, | ||
111 | # we ignore this error for now | ||
112 | logger.debug( | ||
113 | 1, "Failed to fetch page from [%s]: %s" % (url, str(url_err))) | ||
114 | |||
115 | host, _, _ = modulepath.partition('/') | ||
116 | |||
117 | class vcs(Enum): | ||
118 | pathprefix = "pathprefix" | ||
119 | regexp = "regexp" | ||
120 | type = "type" | ||
121 | repo = "repo" | ||
122 | check = "check" | ||
123 | schemelessRepo = "schemelessRepo" | ||
124 | |||
125 | # GitHub | ||
126 | vcsGitHub = {} | ||
127 | vcsGitHub[vcs.pathprefix] = "github.com" | ||
128 | vcsGitHub[vcs.regexp] = re.compile( | ||
129 | r'^(?P<root>github\.com/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
130 | vcsGitHub[vcs.type] = "git" | ||
131 | vcsGitHub[vcs.repo] = "https://\\g<root>" | ||
132 | |||
133 | # Bitbucket | ||
134 | vcsBitbucket = {} | ||
135 | vcsBitbucket[vcs.pathprefix] = "bitbucket.org" | ||
136 | vcsBitbucket[vcs.regexp] = re.compile( | ||
137 | r'^(?P<root>bitbucket\.org/(?P<bitname>[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+))(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
138 | vcsBitbucket[vcs.type] = "git" | ||
139 | vcsBitbucket[vcs.repo] = "https://\\g<root>" | ||
140 | |||
141 | # IBM DevOps Services (JazzHub) | ||
142 | vcsIBMDevOps = {} | ||
143 | vcsIBMDevOps[vcs.pathprefix] = "hub.jazz.net/git" | ||
144 | vcsIBMDevOps[vcs.regexp] = re.compile( | ||
145 | r'^(?P<root>hub\.jazz\.net/git/[a-z0-9]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
146 | vcsIBMDevOps[vcs.type] = "git" | ||
147 | vcsIBMDevOps[vcs.repo] = "https://\\g<root>" | ||
148 | |||
149 | # Git at Apache | ||
150 | vcsApacheGit = {} | ||
151 | vcsApacheGit[vcs.pathprefix] = "git.apache.org" | ||
152 | vcsApacheGit[vcs.regexp] = re.compile( | ||
153 | r'^(?P<root>git\.apache\.org/[a-z0-9_.\-]+\.git)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
154 | vcsApacheGit[vcs.type] = "git" | ||
155 | vcsApacheGit[vcs.repo] = "https://\\g<root>" | ||
156 | |||
157 | # Git at OpenStack | ||
158 | vcsOpenStackGit = {} | ||
159 | vcsOpenStackGit[vcs.pathprefix] = "git.openstack.org" | ||
160 | vcsOpenStackGit[vcs.regexp] = re.compile( | ||
161 | r'^(?P<root>git\.openstack\.org/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(\.git)?(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
162 | vcsOpenStackGit[vcs.type] = "git" | ||
163 | vcsOpenStackGit[vcs.repo] = "https://\\g<root>" | ||
164 | |||
165 | # chiselapp.com for fossil | ||
166 | vcsChiselapp = {} | ||
167 | vcsChiselapp[vcs.pathprefix] = "chiselapp.com" | ||
168 | vcsChiselapp[vcs.regexp] = re.compile( | ||
169 | r'^(?P<root>chiselapp\.com/user/[A-Za-z0-9]+/repository/[A-Za-z0-9_.\-]+)$') | ||
170 | vcsChiselapp[vcs.type] = "fossil" | ||
171 | vcsChiselapp[vcs.repo] = "https://\\g<root>" | ||
172 | |||
173 | # General syntax for any server. | ||
174 | # Must be last. | ||
175 | vcsGeneralServer = {} | ||
176 | vcsGeneralServer[vcs.regexp] = re.compile( | ||
177 | "(?P<root>(?P<repo>([a-z0-9.\\-]+\\.)+[a-z0-9.\\-]+(:[0-9]+)?(/~?[A-Za-z0-9_.\\-]+)+?)\\.(?P<vcs>bzr|fossil|git|hg|svn))(/~?(?P<suffix>[A-Za-z0-9_.\\-]+))*$") | ||
178 | vcsGeneralServer[vcs.schemelessRepo] = True | ||
179 | |||
180 | vcsPaths = [vcsGitHub, vcsBitbucket, vcsIBMDevOps, | ||
181 | vcsApacheGit, vcsOpenStackGit, vcsChiselapp, | ||
182 | vcsGeneralServer] | ||
183 | |||
184 | if modulepath.startswith("example.net") or modulepath == "rsc.io": | ||
185 | logger.warning("Suspicious module path %s" % modulepath) | ||
186 | return None | ||
187 | if modulepath.startswith("http:") or modulepath.startswith("https:"): | ||
188 | logger.warning("Import path should not start with %s %s" % | ||
189 | ("http", "https")) | ||
190 | return None | ||
191 | |||
192 | rootpath = None | ||
193 | vcstype = None | ||
194 | repourl = None | ||
195 | suffix = None | ||
196 | |||
197 | for srv in vcsPaths: | ||
198 | m = srv[vcs.regexp].match(modulepath) | ||
199 | if vcs.pathprefix in srv: | ||
200 | if host == srv[vcs.pathprefix]: | ||
201 | rootpath = m.group('root') | ||
202 | vcstype = srv[vcs.type] | ||
203 | repourl = m.expand(srv[vcs.repo]) | ||
204 | suffix = m.group('suffix') | ||
205 | break | ||
206 | elif m and srv[vcs.schemelessRepo]: | ||
207 | rootpath = m.group('root') | ||
208 | vcstype = m[vcs.type] | ||
209 | repourl = m[vcs.repo] | ||
210 | suffix = m.group('suffix') | ||
211 | break | ||
212 | |||
213 | return GoImport(rootpath, vcstype, repourl, suffix) | ||
214 | |||
215 | def __resolve_repository_dynamic(self, modulepath): | ||
216 | """Resolve the repository root in a dynamic manner. | ||
217 | |||
218 | The method is based on the go implementation of | ||
219 | `repoRootForImportDynamic` in | ||
220 | https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go | ||
221 | """ | ||
222 | url = urllib.parse.urlparse("https://" + modulepath) | ||
223 | |||
224 | class GoImportHTMLParser(HTMLParser): | ||
225 | |||
226 | def __init__(self): | ||
227 | super().__init__() | ||
228 | self.__srv = [] | ||
229 | |||
230 | def handle_starttag(self, tag, attrs): | ||
231 | if tag == 'meta' and list( | ||
232 | filter(lambda a: (a[0] == 'name' and a[1] == 'go-import'), attrs)): | ||
233 | content = list( | ||
234 | filter(lambda a: (a[0] == 'content'), attrs)) | ||
235 | if content: | ||
236 | self.__srv = content[0][1].split() | ||
237 | |||
238 | @property | ||
239 | def import_prefix(self): | ||
240 | return self.__srv[0] if len(self.__srv) else None | ||
241 | |||
242 | @property | ||
243 | def vcs(self): | ||
244 | return self.__srv[1] if len(self.__srv) else None | ||
245 | |||
246 | @property | ||
247 | def repourl(self): | ||
248 | return self.__srv[2] if len(self.__srv) else None | ||
249 | |||
250 | url = url.geturl() + "?go-get=1" | ||
251 | req = urllib.request.Request(url) | ||
252 | |||
253 | try: | ||
254 | resp = urllib.request.urlopen(req) | ||
255 | |||
256 | except URLError as url_err: | ||
257 | logger.warning( | ||
258 | "Failed to fetch page from [%s]: %s", url, str(url_err)) | ||
259 | return None | ||
260 | |||
261 | parser = GoImportHTMLParser() | ||
262 | parser.feed(resp.read().decode('utf-8')) | ||
263 | parser.close() | ||
264 | |||
265 | return GoImport(parser.import_prefix, parser.vcs, parser.repourl, None) | ||
266 | |||
267 | def __resolve_from_golang_proxy(self, modulepath, version): | ||
268 | """ | ||
269 | Resolves repository data from golang proxy | ||
270 | """ | ||
271 | url = urllib.parse.urlparse("https://proxy.golang.org/" | ||
272 | + modulepath | ||
273 | + "/@v/" | ||
274 | + version | ||
275 | + ".info") | ||
276 | |||
277 | # Transform url to lower case, golang proxy doesn't like mixed case | ||
278 | req = urllib.request.Request(url.geturl().lower()) | ||
279 | |||
280 | try: | ||
281 | resp = urllib.request.urlopen(req) | ||
282 | except URLError as url_err: | ||
283 | logger.warning( | ||
284 | "Failed to fetch page from [%s]: %s", url, str(url_err)) | ||
285 | return None | ||
286 | |||
287 | golang_proxy_res = resp.read().decode('utf-8') | ||
288 | modinfo = json.loads(golang_proxy_res) | ||
289 | |||
290 | if modinfo and 'Origin' in modinfo: | ||
291 | origin = modinfo['Origin'] | ||
292 | _root_url = urllib.parse.urlparse(origin['URL']) | ||
293 | |||
294 | # We normalize the repo URL since we don't want the scheme in it | ||
295 | _subdir = origin['Subdir'] if 'Subdir' in origin else None | ||
296 | _root, _, _ = self.__split_path_version(modulepath) | ||
297 | if _subdir: | ||
298 | _root = _root[:-len(_subdir)].strip('/') | ||
299 | |||
300 | _commit = origin['Hash'] | ||
301 | _vcs = origin['VCS'] | ||
302 | return (GoImport(_root, _vcs, _root_url.geturl(), None), _commit) | ||
303 | |||
304 | return None | ||
305 | |||
306 | def __resolve_repository(self, modulepath): | ||
307 | """ | ||
308 | Resolves src uri from go module-path | ||
309 | """ | ||
310 | repodata = self.__resolve_repository_static(modulepath) | ||
311 | if not repodata or not repodata.url: | ||
312 | repodata = self.__resolve_repository_dynamic(modulepath) | ||
313 | if not repodata or not repodata.url: | ||
314 | logger.error( | ||
315 | "Could not resolve repository for module path '%s'" % modulepath) | ||
316 | # There is no way to recover from this | ||
317 | sys.exit(14) | ||
318 | if repodata: | ||
319 | logger.debug(1, "Resolved download path for import '%s' => %s" % ( | ||
320 | modulepath, repodata.url)) | ||
321 | return repodata | ||
322 | |||
323 | def __split_path_version(self, path): | ||
324 | i = len(path) | ||
325 | dot = False | ||
326 | for j in range(i, 0, -1): | ||
327 | if path[j - 1] < '0' or path[j - 1] > '9': | ||
328 | break | ||
329 | if path[j - 1] == '.': | ||
330 | dot = True | ||
331 | break | ||
332 | i = j - 1 | ||
333 | |||
334 | if i <= 1 or i == len( | ||
335 | path) or path[i - 1] != 'v' or path[i - 2] != '/': | ||
336 | return path, "", True | ||
337 | |||
338 | prefix, pathMajor = path[:i - 2], path[i - 2:] | ||
339 | if dot or len( | ||
340 | pathMajor) <= 2 or pathMajor[2] == '0' or pathMajor == "/v1": | ||
341 | return path, "", False | ||
342 | |||
343 | return prefix, pathMajor, True | ||
344 | |||
345 | def __get_path_major(self, pathMajor): | ||
346 | if not pathMajor: | ||
347 | return "" | ||
348 | |||
349 | if pathMajor[0] != '/' and pathMajor[0] != '.': | ||
350 | logger.error( | ||
351 | "pathMajor suffix %s passed to PathMajorPrefix lacks separator", pathMajor) | ||
352 | |||
353 | if pathMajor.startswith(".v") and pathMajor.endswith("-unstable"): | ||
354 | pathMajor = pathMajor[:len("-unstable") - 2] | ||
355 | |||
356 | return pathMajor[1:] | ||
357 | |||
358 | def __build_coderepo(self, repo, path): | ||
359 | codedir = "" | ||
360 | pathprefix, pathMajor, _ = self.__split_path_version(path) | ||
361 | if repo.root == path: | ||
362 | pathprefix = path | ||
363 | elif path.startswith(repo.root): | ||
364 | codedir = pathprefix[len(repo.root):].strip('/') | ||
365 | |||
366 | pseudoMajor = self.__get_path_major(pathMajor) | ||
367 | |||
368 | logger.debug("root='%s', codedir='%s', prefix='%s', pathMajor='%s', pseudoMajor='%s'", | ||
369 | repo.root, codedir, pathprefix, pathMajor, pseudoMajor) | ||
370 | |||
371 | return CodeRepo(path, repo.root, codedir, | ||
372 | pathMajor, pathprefix, pseudoMajor) | ||
373 | |||
374 | def __resolve_version(self, repo, path, version): | ||
375 | hash = None | ||
376 | coderoot = self.__build_coderepo(repo, path) | ||
377 | |||
378 | def vcs_fetch_all(): | ||
379 | tmpdir = tempfile.mkdtemp() | ||
380 | clone_cmd = "%s clone --bare %s %s" % ('git', repo.url, tmpdir) | ||
381 | bb.process.run(clone_cmd) | ||
382 | log_cmd = "git log --all --pretty='%H %d' --decorate=short" | ||
383 | output, _ = bb.process.run( | ||
384 | log_cmd, shell=True, stderr=subprocess.PIPE, cwd=tmpdir) | ||
385 | bb.utils.prunedir(tmpdir) | ||
386 | return output.strip().split('\n') | ||
387 | |||
388 | def vcs_fetch_remote(tag): | ||
389 | # add * to grab ^{} | ||
390 | refs = {} | ||
391 | ls_remote_cmd = "git ls-remote -q --tags {} {}*".format( | ||
392 | repo.url, tag) | ||
393 | output, _ = bb.process.run(ls_remote_cmd) | ||
394 | output = output.strip().split('\n') | ||
395 | for line in output: | ||
396 | f = line.split(maxsplit=1) | ||
397 | if len(f) != 2: | ||
398 | continue | ||
399 | |||
400 | for prefix in ["HEAD", "refs/heads/", "refs/tags/"]: | ||
401 | if f[1].startswith(prefix): | ||
402 | refs[f[1][len(prefix):]] = f[0] | ||
403 | |||
404 | for key, hash in refs.items(): | ||
405 | if key.endswith(r"^{}"): | ||
406 | refs[key.strip(r"^{}")] = hash | ||
407 | |||
408 | return refs[tag] | ||
409 | |||
410 | m_pseudo_semver = re_pseudo_semver.match(version) | ||
411 | |||
412 | if m_pseudo_semver: | ||
413 | remote_refs = vcs_fetch_all() | ||
414 | short_commit = m_pseudo_semver.group('commithash') | ||
415 | for l in remote_refs: | ||
416 | r = l.split(maxsplit=1) | ||
417 | sha1 = r[0] if len(r) else None | ||
418 | if not sha1: | ||
419 | logger.error( | ||
420 | "Ups: could not resolve abbref commit for %s" % short_commit) | ||
421 | |||
422 | elif sha1.startswith(short_commit): | ||
423 | hash = sha1 | ||
424 | break | ||
425 | else: | ||
426 | m_semver = re_semver.match(version) | ||
427 | if m_semver: | ||
428 | |||
429 | def get_sha1_remote(re): | ||
430 | rsha1 = None | ||
431 | for line in remote_refs: | ||
432 | # Split lines of the following format: | ||
433 | # 22e90d9b964610628c10f673ca5f85b8c2a2ca9a (tag: sometag) | ||
434 | lineparts = line.split(maxsplit=1) | ||
435 | sha1 = lineparts[0] if len(lineparts) else None | ||
436 | refstring = lineparts[1] if len( | ||
437 | lineparts) == 2 else None | ||
438 | if refstring: | ||
439 | # Normalize tag string and split in case of multiple | ||
440 | # regs e.g. (tag: speech/v1.10.0, tag: orchestration/v1.5.0 ...) | ||
441 | refs = refstring.strip('(), ').split(',') | ||
442 | for ref in refs: | ||
443 | if re.match(ref.strip()): | ||
444 | rsha1 = sha1 | ||
445 | return rsha1 | ||
446 | |||
447 | semver = "v" + m_semver.group('major') + "."\ | ||
448 | + m_semver.group('minor') + "."\ | ||
449 | + m_semver.group('patch') \ | ||
450 | + (("-" + m_semver.group('prerelease')) | ||
451 | if m_semver.group('prerelease') else "") | ||
452 | |||
453 | tag = os.path.join( | ||
454 | coderoot.codeDir, semver) if coderoot.codeDir else semver | ||
455 | |||
456 | # probe tag using 'ls-remote', which is faster than fetching | ||
457 | # complete history | ||
458 | hash = vcs_fetch_remote(tag) | ||
459 | if not hash: | ||
460 | # backup: fetch complete history | ||
461 | remote_refs = vcs_fetch_all() | ||
462 | hash = get_sha1_remote( | ||
463 | re.compile(fr"(tag:|HEAD ->) ({tag})")) | ||
464 | |||
465 | logger.debug( | ||
466 | "Resolving commit for tag '%s' -> '%s'", tag, hash) | ||
467 | return hash | ||
468 | |||
469 | def __generate_srcuri_inline_fcn(self, path, version, replaces=None): | ||
470 | """Generate SRC_URI functions for go imports""" | ||
471 | |||
472 | logger.info("Resolving repository for module %s", path) | ||
473 | # First try to resolve repo and commit from golang proxy | ||
474 | # Most info is already there and we don't have to go through the | ||
475 | # repository or even perform the version resolve magic | ||
476 | golang_proxy_info = self.__resolve_from_golang_proxy(path, version) | ||
477 | if golang_proxy_info: | ||
478 | repo = golang_proxy_info[0] | ||
479 | commit = golang_proxy_info[1] | ||
480 | else: | ||
481 | # Fallback | ||
482 | # Resolve repository by 'hand' | ||
483 | repo = self.__resolve_repository(path) | ||
484 | commit = self.__resolve_version(repo, path, version) | ||
485 | |||
486 | url = urllib.parse.urlparse(repo.url) | ||
487 | repo_url = url.netloc + url.path | ||
488 | |||
489 | coderoot = self.__build_coderepo(repo, path) | ||
490 | |||
491 | inline_fcn = "${@go_src_uri(" | ||
492 | inline_fcn += f"'{repo_url}','{version}'" | ||
493 | if repo_url != path: | ||
494 | inline_fcn += f",path='{path}'" | ||
495 | if coderoot.codeDir: | ||
496 | inline_fcn += f",subdir='{coderoot.codeDir}'" | ||
497 | if repo.vcs != 'git': | ||
498 | inline_fcn += f",vcs='{repo.vcs}'" | ||
499 | if replaces: | ||
500 | inline_fcn += f",replaces='{replaces}'" | ||
501 | if coderoot.pathMajor: | ||
502 | inline_fcn += f",pathmajor='{coderoot.pathMajor}'" | ||
503 | inline_fcn += ")}" | ||
504 | |||
505 | return inline_fcn, commit | ||
506 | |||
507 | def __go_handle_dependencies(self, go_mod, localfilesdir, extravalues, d): | ||
508 | |||
509 | src_uris = [] | ||
510 | src_revs = [] | ||
511 | |||
512 | def generate_src_rev(path, version, commithash): | ||
513 | src_rev = f"# {path}@{version} => {commithash}\n" | ||
514 | # Ups...maybe someone manipulated the source repository and the | ||
515 | # version or commit could not be resolved. This is a sign of | ||
516 | # a) the supply chain was manipulated (bad) | ||
517 | # b) the implementation for the version resolving didn't work | ||
518 | # anymore (less bad) | ||
519 | if not commithash: | ||
520 | src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n" | ||
521 | src_rev += f"#!!! Could not resolve version !!!\n" | ||
522 | src_rev += f"#!!! Possible supply chain attack !!!\n" | ||
523 | src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n" | ||
524 | src_rev += f"SRCREV_{path.replace('/', '.')} = \"{commithash}\"" | ||
525 | |||
526 | return src_rev | ||
527 | |||
528 | for require in go_mod['Require']: | ||
529 | path = require['Path'] | ||
530 | version = require['Version'] | ||
531 | |||
532 | inline_fcn, commithash = self.__generate_srcuri_inline_fcn( | ||
533 | path, version) | ||
534 | src_uris.append(inline_fcn) | ||
535 | src_revs.append(generate_src_rev(path, version, commithash)) | ||
536 | |||
537 | if go_mod['Replace']: | ||
538 | for replacement in go_mod['Replace']: | ||
539 | oldpath = replacement['Old']['Path'] | ||
540 | path = replacement['New']['Path'] | ||
541 | version = replacement['New']['Version'] | ||
542 | |||
543 | inline_fcn, commithash = self.__generate_srcuri_inline_fcn( | ||
544 | path, version, oldpath) | ||
545 | src_uris.append(inline_fcn) | ||
546 | src_revs.append(generate_src_rev(path, version, commithash)) | ||
547 | |||
548 | pn, _ = determine_from_url(go_mod['Module']['Path']) | ||
549 | go_mods_basename = "%s-modules.inc" % pn | ||
550 | |||
551 | go_mods_filename = os.path.join(localfilesdir, go_mods_basename) | ||
552 | with open(go_mods_filename, "w") as f: | ||
553 | # We introduce this indirection to make the tests a little easier | ||
554 | f.write("SRC_URI += \"${GO_DEPENDENCIES_SRC_URI}\"\n") | ||
555 | f.write("GO_DEPENDENCIES_SRC_URI = \"\\\n") | ||
556 | for uri in src_uris: | ||
557 | f.write(" " + uri + " \\\n") | ||
558 | f.write("\"\n\n") | ||
559 | for rev in src_revs: | ||
560 | f.write(rev + "\n") | ||
561 | |||
562 | extravalues['extrafiles'][go_mods_basename] = go_mods_filename | ||
563 | |||
564 | def __go_run_cmd(self, cmd, cwd, d): | ||
565 | return bb.process.run(cmd, env=dict(os.environ, PATH=d.getVar('PATH')), | ||
566 | shell=True, cwd=cwd) | ||
567 | |||
568 | def __go_native_version(self, d): | ||
569 | stdout, _ = self.__go_run_cmd("go version", None, d) | ||
570 | m = re.match(r".*\sgo((\d+).(\d+).(\d+))\s([\w\/]*)", stdout) | ||
571 | major = int(m.group(2)) | ||
572 | minor = int(m.group(3)) | ||
573 | patch = int(m.group(4)) | ||
574 | |||
575 | return major, minor, patch | ||
576 | |||
577 | def __go_mod_patch(self, srctree, localfilesdir, extravalues, d): | ||
578 | |||
579 | patchfilename = "go.mod.patch" | ||
580 | go_native_version_major, go_native_version_minor, _ = self.__go_native_version( | ||
581 | d) | ||
582 | self.__go_run_cmd("go mod tidy -go=%d.%d" % | ||
583 | (go_native_version_major, go_native_version_minor), srctree, d) | ||
584 | stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d) | ||
585 | |||
586 | # Create patch in order to upgrade go version | ||
587 | self.__go_run_cmd("git diff go.mod > %s" % (patchfilename), srctree, d) | ||
588 | # Restore original state | ||
589 | self.__go_run_cmd("git checkout HEAD go.mod go.sum", srctree, d) | ||
590 | |||
591 | go_mod = json.loads(stdout) | ||
592 | tmpfile = os.path.join(localfilesdir, patchfilename) | ||
593 | shutil.move(os.path.join(srctree, patchfilename), tmpfile) | ||
594 | |||
595 | extravalues['extrafiles'][patchfilename] = tmpfile | ||
596 | |||
597 | return go_mod, patchfilename | ||
598 | |||
599 | def __go_mod_vendor(self, go_mod, srctree, localfilesdir, extravalues, d): | ||
600 | # Perform vendoring to retrieve the correct modules.txt | ||
601 | tmp_vendor_dir = tempfile.mkdtemp() | ||
602 | |||
603 | # -v causes to go to print modules.txt to stderr | ||
604 | _, stderr = self.__go_run_cmd( | ||
605 | "go mod vendor -v -o %s" % (tmp_vendor_dir), srctree, d) | ||
606 | |||
607 | modules_txt_basename = "modules.txt" | ||
608 | modules_txt_filename = os.path.join(localfilesdir, modules_txt_basename) | ||
609 | with open(modules_txt_filename, "w") as f: | ||
610 | f.write(stderr) | ||
611 | |||
612 | extravalues['extrafiles'][modules_txt_basename] = modules_txt_filename | ||
613 | |||
614 | licenses = [] | ||
615 | lic_files_chksum = [] | ||
616 | licvalues = guess_license(tmp_vendor_dir, d) | ||
617 | shutil.rmtree(tmp_vendor_dir) | ||
618 | |||
619 | if licvalues: | ||
620 | for licvalue in licvalues: | ||
621 | license = licvalue[0] | ||
622 | lics = tidy_licenses(fixup_license(license)) | ||
623 | lics = [lic for lic in lics if lic not in licenses] | ||
624 | if len(lics): | ||
625 | licenses.extend(lics) | ||
626 | lic_files_chksum.append( | ||
627 | 'file://src/${GO_IMPORT}/vendor/%s;md5=%s' % (licvalue[1], licvalue[2])) | ||
628 | |||
629 | pn, _ = determine_from_url(go_mod['Module']['Path']) | ||
630 | licenses_basename = "%s-licenses.inc" % pn | ||
631 | |||
632 | licenses_filename = os.path.join(localfilesdir, licenses_basename) | ||
633 | with open(licenses_filename, "w") as f: | ||
634 | f.write("GO_MOD_LICENSES = \"%s\"\n\n" % | ||
635 | ' & '.join(sorted(licenses, key=str.casefold))) | ||
636 | # We introduce this indirection to make the tests a little easier | ||
637 | f.write("LIC_FILES_CHKSUM += \"${VENDORED_LIC_FILES_CHKSUM}\"\n") | ||
638 | f.write("VENDORED_LIC_FILES_CHKSUM = \"\\\n") | ||
639 | for lic in lic_files_chksum: | ||
640 | f.write(" " + lic + " \\\n") | ||
641 | f.write("\"\n") | ||
642 | |||
643 | extravalues['extrafiles'][licenses_basename] = licenses_filename | ||
644 | |||
645 | def process(self, srctree, classes, lines_before, | ||
646 | lines_after, handled, extravalues): | ||
647 | |||
648 | if 'buildsystem' in handled: | ||
649 | return False | ||
650 | |||
651 | files = RecipeHandler.checkfiles(srctree, ['go.mod']) | ||
652 | if not files: | ||
653 | return False | ||
654 | |||
655 | d = bb.data.createCopy(tinfoil.config_data) | ||
656 | go_bindir = self.__ensure_go() | ||
657 | if not go_bindir: | ||
658 | sys.exit(14) | ||
659 | |||
660 | d.prependVar('PATH', '%s:' % go_bindir) | ||
661 | handled.append('buildsystem') | ||
662 | classes.append("go-vendor") | ||
663 | |||
664 | stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d) | ||
665 | |||
666 | go_mod = json.loads(stdout) | ||
667 | go_import = go_mod['Module']['Path'] | ||
668 | go_version_match = re.match("([0-9]+).([0-9]+)", go_mod['Go']) | ||
669 | go_version_major = int(go_version_match.group(1)) | ||
670 | go_version_minor = int(go_version_match.group(2)) | ||
671 | src_uris = [] | ||
672 | |||
673 | localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-') | ||
674 | extravalues.setdefault('extrafiles', {}) | ||
675 | # go.mod files with version < 1.17 may not include all indirect | ||
676 | # dependencies. Thus, we have to upgrade the go version. | ||
677 | if go_version_major == 1 and go_version_minor < 17: | ||
678 | logger.warning( | ||
679 | "go.mod files generated by Go < 1.17 might have incomplete indirect dependencies.") | ||
680 | go_mod, patchfilename = self.__go_mod_patch(srctree, localfilesdir, | ||
681 | extravalues, d) | ||
682 | src_uris.append( | ||
683 | "file://%s;patchdir=src/${GO_IMPORT}" % (patchfilename)) | ||
684 | |||
685 | # Check whether the module is vendored. If so, we have nothing to do. | ||
686 | # Otherwise we gather all dependencies and add them to the recipe | ||
687 | if not os.path.exists(os.path.join(srctree, "vendor")): | ||
688 | |||
689 | # Write additional $BPN-modules.inc file | ||
690 | self.__go_mod_vendor(go_mod, srctree, localfilesdir, extravalues, d) | ||
691 | lines_before.append("LICENSE += \" & ${GO_MOD_LICENSES}\"") | ||
692 | lines_before.append("require ${BPN}-licenses.inc") | ||
693 | |||
694 | self.__rewrite_src_uri(lines_before, ["file://modules.txt"]) | ||
695 | |||
696 | self.__go_handle_dependencies(go_mod, localfilesdir, extravalues, d) | ||
697 | lines_before.append("require ${BPN}-modules.inc") | ||
698 | |||
699 | # Do generic license handling | ||
700 | handle_license_vars(srctree, lines_before, handled, extravalues, d) | ||
701 | self.__rewrite_lic_uri(lines_before) | ||
702 | |||
703 | lines_before.append("GO_IMPORT = \"{}\"".format(go_import)) | ||
704 | lines_before.append("SRCREV_FORMAT = \"${BPN}\"") | ||
705 | |||
706 | def __update_lines_before(self, updated, newlines, lines_before): | ||
707 | if updated: | ||
708 | del lines_before[:] | ||
709 | for line in newlines: | ||
710 | # Hack to avoid newlines that edit_metadata inserts | ||
711 | if line.endswith('\n'): | ||
712 | line = line[:-1] | ||
713 | lines_before.append(line) | ||
714 | return updated | ||
715 | |||
716 | def __rewrite_lic_uri(self, lines_before): | ||
717 | |||
718 | def varfunc(varname, origvalue, op, newlines): | ||
719 | if varname == 'LIC_FILES_CHKSUM': | ||
720 | new_licenses = [] | ||
721 | licenses = origvalue.split('\\') | ||
722 | for license in licenses: | ||
723 | license = license.strip() | ||
724 | uri, chksum = license.split(';', 1) | ||
725 | url = urllib.parse.urlparse(uri) | ||
726 | new_uri = os.path.join( | ||
727 | url.scheme + "://", "src", "${GO_IMPORT}", url.netloc + url.path) + ";" + chksum | ||
728 | new_licenses.append(new_uri) | ||
729 | |||
730 | return new_licenses, None, -1, True | ||
731 | return origvalue, None, 0, True | ||
732 | |||
733 | updated, newlines = bb.utils.edit_metadata( | ||
734 | lines_before, ['LIC_FILES_CHKSUM'], varfunc) | ||
735 | return self.__update_lines_before(updated, newlines, lines_before) | ||
736 | |||
737 | def __rewrite_src_uri(self, lines_before, additional_uris = []): | ||
738 | |||
739 | def varfunc(varname, origvalue, op, newlines): | ||
740 | if varname == 'SRC_URI': | ||
741 | src_uri = ["git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https"] | ||
742 | src_uri.extend(additional_uris) | ||
743 | return src_uri, None, -1, True | ||
744 | return origvalue, None, 0, True | ||
745 | |||
746 | updated, newlines = bb.utils.edit_metadata(lines_before, ['SRC_URI'], varfunc) | ||
747 | return self.__update_lines_before(updated, newlines, lines_before) | ||
748 | |||
749 | |||
750 | def register_recipe_handlers(handlers): | ||
751 | handlers.append((GoRecipeHandler(), 60)) | ||