summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rwxr-xr-xbitbake/bin/bitbake2
-rwxr-xr-xbitbake/bin/bitbake-worker7
-rw-r--r--bitbake/lib/bb/__init__.py2
-rw-r--r--bitbake/lib/bb/tests/utils.py11
-rw-r--r--bitbake/lib/bb/tinfoil.py135
-rw-r--r--bitbake/lib/bb/utils.py17
-rw-r--r--documentation/dev-manual/new-recipe.rst9
-rw-r--r--meta/classes-global/base.bbclass1
-rw-r--r--meta/classes-global/sstate.bbclass12
-rw-r--r--meta/classes-recipe/go-mod-update-modules.bbclass152
-rw-r--r--meta/classes-recipe/go-mod.bbclass2
-rw-r--r--meta/classes-recipe/testsdk.bbclass1
-rw-r--r--meta/classes/create-spdx-2.2.bbclass8
-rw-r--r--meta/conf/bitbake.conf2
-rw-r--r--meta/conf/distro/include/maintainers.inc2
-rw-r--r--meta/conf/distro/include/ptest-packagelists.inc1
-rw-r--r--meta/conf/sanity.conf2
-rw-r--r--meta/lib/oe/license.py15
-rw-r--r--meta/lib/oeqa/buildtools-docs/cases/README (renamed from meta/lib/oeqa/sdk/buildtools-docs-cases/README)0
-rw-r--r--meta/lib/oeqa/buildtools-docs/cases/build.py (renamed from meta/lib/oeqa/sdk/buildtools-docs-cases/build.py)0
-rw-r--r--meta/lib/oeqa/buildtools/cases/README (renamed from meta/lib/oeqa/sdk/buildtools-cases/README)0
-rw-r--r--meta/lib/oeqa/buildtools/cases/build.py (renamed from meta/lib/oeqa/sdk/buildtools-cases/build.py)0
-rw-r--r--meta/lib/oeqa/buildtools/cases/gcc.py (renamed from meta/lib/oeqa/sdk/buildtools-cases/gcc.py)0
-rw-r--r--meta/lib/oeqa/buildtools/cases/https.py (renamed from meta/lib/oeqa/sdk/buildtools-cases/https.py)0
-rw-r--r--meta/lib/oeqa/buildtools/cases/sanity.py (renamed from meta/lib/oeqa/sdk/buildtools-cases/sanity.py)0
-rw-r--r--meta/lib/oeqa/sdk/testsdk.py24
-rw-r--r--meta/lib/oeqa/selftest/cases/devtool.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/recipetool.py33
-rw-r--r--meta/recipes-core/meta/buildtools-docs-tarball.bb5
-rw-r--r--meta/recipes-core/meta/buildtools-tarball.bb21
-rw-r--r--meta/recipes-devtools/dosfstools/dosfstools/0001-fsck.fat-Adhere-to-the-fsck-exit-codes.patch214
-rw-r--r--meta/recipes-devtools/dosfstools/dosfstools/0002-manpages-Document-fsck.fat-new-exit-codes.patch46
-rw-r--r--meta/recipes-devtools/dosfstools/dosfstools_4.2.bb7
-rw-r--r--meta/recipes-devtools/json-c/json-c_0.18.bb3
-rw-r--r--meta/recipes-devtools/mtools/mtools_4.0.49.bb (renamed from meta/recipes-devtools/mtools/mtools_4.0.48.bb)2
-rw-r--r--meta/recipes-devtools/ninja/ninja_1.13.0.bb (renamed from meta/recipes-devtools/ninja/ninja_1.12.1.bb)10
-rw-r--r--meta/recipes-devtools/python/python3-sphinx-argparse_0.5.2.bb13
-rw-r--r--meta/recipes-devtools/python/python3-sphinx-copybutton_0.5.2.bb10
-rw-r--r--meta/recipes-devtools/python/python3-urllib3_2.5.0.bb (renamed from meta/recipes-devtools/python/python3-urllib3_2.4.0.bb)2
-rw-r--r--meta/recipes-devtools/python/python3-wheel_0.46.1.bb (renamed from meta/recipes-devtools/python/python3-wheel_0.45.1.bb)9
-rw-r--r--meta/recipes-devtools/tcf-agent/tcf-agent_1.8.0.bb6
-rw-r--r--scripts/lib/recipetool/create.py19
-rw-r--r--scripts/lib/recipetool/create_go.py677
43 files changed, 755 insertions, 729 deletions
diff --git a/bitbake/bin/bitbake b/bitbake/bin/bitbake
index c2d0ca7613..40b5d895c1 100755
--- a/bitbake/bin/bitbake
+++ b/bitbake/bin/bitbake
@@ -27,7 +27,7 @@ from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException
27 27
28bb.utils.check_system_locale() 28bb.utils.check_system_locale()
29 29
30__version__ = "2.15.0" 30__version__ = "2.15.1"
31 31
32if __name__ == "__main__": 32if __name__ == "__main__":
33 if __version__ != bb.__version__: 33 if __version__ != bb.__version__:
diff --git a/bitbake/bin/bitbake-worker b/bitbake/bin/bitbake-worker
index 35fa1ab62b..d2b146a6a9 100755
--- a/bitbake/bin/bitbake-worker
+++ b/bitbake/bin/bitbake-worker
@@ -182,11 +182,8 @@ def fork_off_task(cfg, data, databuilder, workerdata, extraconfigdata, runtask):
182 elif workerdata["umask"]: 182 elif workerdata["umask"]:
183 umask = workerdata["umask"] 183 umask = workerdata["umask"]
184 if umask: 184 if umask:
185 # umask might come in as a number or text string.. 185 # Convert to a python numeric value as it could be a string
186 try: 186 umask = bb.utils.to_filemode(umask)
187 umask = int(umask, 8)
188 except TypeError:
189 pass
190 187
191 dry_run = cfg.dry_run or runtask['dry_run'] 188 dry_run = cfg.dry_run or runtask['dry_run']
192 189
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
index 62ceaaef6e..bf4c54d829 100644
--- a/bitbake/lib/bb/__init__.py
+++ b/bitbake/lib/bb/__init__.py
@@ -9,7 +9,7 @@
9# SPDX-License-Identifier: GPL-2.0-only 9# SPDX-License-Identifier: GPL-2.0-only
10# 10#
11 11
12__version__ = "2.15.0" 12__version__ = "2.15.1"
13 13
14import sys 14import sys
15if sys.version_info < (3, 9, 0): 15if sys.version_info < (3, 9, 0):
diff --git a/bitbake/lib/bb/tests/utils.py b/bitbake/lib/bb/tests/utils.py
index 48e61dfcea..52b7bf85bf 100644
--- a/bitbake/lib/bb/tests/utils.py
+++ b/bitbake/lib/bb/tests/utils.py
@@ -692,3 +692,14 @@ class EnvironmentTests(unittest.TestCase):
692 self.assertIn("A", os.environ) 692 self.assertIn("A", os.environ)
693 self.assertEqual(os.environ["A"], "this is A") 693 self.assertEqual(os.environ["A"], "this is A")
694 self.assertNotIn("B", os.environ) 694 self.assertNotIn("B", os.environ)
695
696class FilemodeTests(unittest.TestCase):
697 def test_filemode_convert(self):
698 self.assertEqual(0o775, bb.utils.to_filemode("0o775"))
699 self.assertEqual(0o775, bb.utils.to_filemode(0o775))
700 self.assertEqual(0o775, bb.utils.to_filemode("775"))
701 with self.assertRaises(ValueError):
702 bb.utils.to_filemode("xyz")
703 with self.assertRaises(ValueError):
704 bb.utils.to_filemode("999")
705
diff --git a/bitbake/lib/bb/tinfoil.py b/bitbake/lib/bb/tinfoil.py
index f48baeb334..e7fbcbca0a 100644
--- a/bitbake/lib/bb/tinfoil.py
+++ b/bitbake/lib/bb/tinfoil.py
@@ -14,7 +14,7 @@ import time
14import atexit 14import atexit
15import re 15import re
16from collections import OrderedDict, defaultdict 16from collections import OrderedDict, defaultdict
17from functools import partial 17from functools import partial, wraps
18from contextlib import contextmanager 18from contextlib import contextmanager
19 19
20import bb.cache 20import bb.cache
@@ -27,6 +27,135 @@ import bb.remotedata
27from bb.main import setup_bitbake, BitBakeConfigParameters 27from bb.main import setup_bitbake, BitBakeConfigParameters
28import bb.fetch2 28import bb.fetch2
29 29
30def wait_for(f):
31 """
32 Wrap a function that makes an asynchronous tinfoil call using
33 self.run_command() and wait for events to say that the call has been
34 successful, or an error has occurred.
35 """
36 @wraps(f)
37 def wrapper(self, *args, handle_events=True, extra_events=None, event_callback=None, **kwargs):
38 if handle_events:
39 # A reasonable set of default events matching up with those we handle below
40 eventmask = [
41 'bb.event.BuildStarted',
42 'bb.event.BuildCompleted',
43 'logging.LogRecord',
44 'bb.event.NoProvider',
45 'bb.command.CommandCompleted',
46 'bb.command.CommandFailed',
47 'bb.build.TaskStarted',
48 'bb.build.TaskFailed',
49 'bb.build.TaskSucceeded',
50 'bb.build.TaskFailedSilent',
51 'bb.build.TaskProgress',
52 'bb.runqueue.runQueueTaskStarted',
53 'bb.runqueue.sceneQueueTaskStarted',
54 'bb.event.ProcessStarted',
55 'bb.event.ProcessProgress',
56 'bb.event.ProcessFinished',
57 ]
58 if extra_events:
59 eventmask.extend(extra_events)
60 ret = self.set_event_mask(eventmask)
61
62 includelogs = self.config_data.getVar('BBINCLUDELOGS')
63 loglines = self.config_data.getVar('BBINCLUDELOGS_LINES')
64
65 # Call actual function
66 ret = f(self, *args, **kwargs)
67
68 if handle_events:
69 lastevent = time.time()
70 result = False
71 # Borrowed from knotty, instead somewhat hackily we use the helper
72 # as the object to store "shutdown" on
73 helper = bb.ui.uihelper.BBUIHelper()
74 helper.shutdown = 0
75 parseprogress = None
76 termfilter = bb.ui.knotty.TerminalFilter(helper, helper, self.logger.handlers, quiet=self.quiet)
77 try:
78 while True:
79 try:
80 event = self.wait_event(0.25)
81 if event:
82 lastevent = time.time()
83 if event_callback and event_callback(event):
84 continue
85 if helper.eventHandler(event):
86 if isinstance(event, bb.build.TaskFailedSilent):
87 self.logger.warning("Logfile for failed setscene task is %s" % event.logfile)
88 elif isinstance(event, bb.build.TaskFailed):
89 bb.ui.knotty.print_event_log(event, includelogs, loglines, termfilter)
90 continue
91 if isinstance(event, bb.event.ProcessStarted):
92 if self.quiet > 1:
93 continue
94 parseprogress = bb.ui.knotty.new_progress(event.processname, event.total)
95 parseprogress.start(False)
96 continue
97 if isinstance(event, bb.event.ProcessProgress):
98 if self.quiet > 1:
99 continue
100 if parseprogress:
101 parseprogress.update(event.progress)
102 else:
103 bb.warn("Got ProcessProgress event for something that never started?")
104 continue
105 if isinstance(event, bb.event.ProcessFinished):
106 if self.quiet > 1:
107 continue
108 if parseprogress:
109 parseprogress.finish()
110 parseprogress = None
111 continue
112 if isinstance(event, bb.command.CommandCompleted):
113 result = True
114 break
115 if isinstance(event, (bb.command.CommandFailed, bb.command.CommandExit)):
116 self.logger.error(str(event))
117 result = False
118 break
119 if isinstance(event, logging.LogRecord):
120 if event.taskpid == 0 or event.levelno > logging.INFO:
121 self.logger.handle(event)
122 continue
123 if isinstance(event, bb.event.NoProvider):
124 self.logger.error(str(event))
125 result = False
126 break
127 elif helper.shutdown > 1:
128 break
129 termfilter.updateFooter()
130 if time.time() > (lastevent + (3*60)):
131 if not self.run_command('ping', handle_events=False):
132 print("\nUnable to ping server and no events, closing down...\n")
133 return False
134 except KeyboardInterrupt:
135 termfilter.clearFooter()
136 if helper.shutdown == 1:
137 print("\nSecond Keyboard Interrupt, stopping...\n")
138 ret = self.run_command("stateForceShutdown")
139 if ret and ret[2]:
140 self.logger.error("Unable to cleanly stop: %s" % ret[2])
141 elif helper.shutdown == 0:
142 print("\nKeyboard Interrupt, closing down...\n")
143 interrupted = True
144 ret = self.run_command("stateShutdown")
145 if ret and ret[2]:
146 self.logger.error("Unable to cleanly shutdown: %s" % ret[2])
147 helper.shutdown = helper.shutdown + 1
148 termfilter.clearFooter()
149 finally:
150 termfilter.finish()
151 if helper.failed_tasks:
152 result = False
153 return result
154 else:
155 return ret
156
157 return wrapper
158
30 159
31# We need this in order to shut down the connection to the bitbake server, 160# We need this in order to shut down the connection to the bitbake server,
32# otherwise the process will never properly exit 161# otherwise the process will never properly exit
@@ -700,6 +829,10 @@ class Tinfoil:
700 """ 829 """
701 return self.run_command('buildFile', buildfile, task, internal) 830 return self.run_command('buildFile', buildfile, task, internal)
702 831
832 @wait_for
833 def build_file_sync(self, *args):
834 self.build_file(*args)
835
703 def build_targets(self, targets, task=None, handle_events=True, extra_events=None, event_callback=None): 836 def build_targets(self, targets, task=None, handle_events=True, extra_events=None, event_callback=None):
704 """ 837 """
705 Builds the specified targets. This is equivalent to a normal invocation 838 Builds the specified targets. This is equivalent to a normal invocation
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
index a2806fd360..1cc74ed546 100644
--- a/bitbake/lib/bb/utils.py
+++ b/bitbake/lib/bb/utils.py
@@ -1211,6 +1211,23 @@ def which(path, item, direction = 0, history = False, executable=False):
1211 return "", hist 1211 return "", hist
1212 return "" 1212 return ""
1213 1213
1214def to_filemode(input):
1215 """
1216 Take a bitbake variable contents defining a file mode and return
1217 the proper python representation of the number
1218
1219 Arguments:
1220
1221 - ``input``: a string or number to convert, e.g. a bitbake variable
1222 string, assumed to be an octal representation
1223
1224 Returns the python file mode as a number
1225 """
1226 # umask might come in as a number or text string..
1227 if type(input) is int:
1228 return input
1229 return int(input, 8)
1230
1214@contextmanager 1231@contextmanager
1215def umask(new_mask): 1232def umask(new_mask):
1216 """ 1233 """
diff --git a/documentation/dev-manual/new-recipe.rst b/documentation/dev-manual/new-recipe.rst
index 6ee94fd6e5..832aa300e1 100644
--- a/documentation/dev-manual/new-recipe.rst
+++ b/documentation/dev-manual/new-recipe.rst
@@ -188,13 +188,14 @@ the recipe.
188 Use lower-cased characters and do not include the reserved suffixes 188 Use lower-cased characters and do not include the reserved suffixes
189 ``-native``, ``-cross``, ``-initial``, or ``-dev`` casually (i.e. do not use 189 ``-native``, ``-cross``, ``-initial``, or ``-dev`` casually (i.e. do not use
190 them as part of your recipe name unless the string applies). Here are some 190 them as part of your recipe name unless the string applies). Here are some
191 examples: 191 examples (which includes the use of the string "git" as a special case of a
192 version identifier):
192 193
193 .. code-block:: none 194 .. code-block:: none
194 195
195 cups_1.7.0.bb 196 cups_2.4.12.bb
196 gawk_4.0.2.bb 197 gawk_5.3.2.bb
197 irssi_0.8.16-rc1.bb 198 psplash_git.bb
198 199
199Running a Build on the Recipe 200Running a Build on the Recipe
200============================= 201=============================
diff --git a/meta/classes-global/base.bbclass b/meta/classes-global/base.bbclass
index b86f50e283..ac145d9fd6 100644
--- a/meta/classes-global/base.bbclass
+++ b/meta/classes-global/base.bbclass
@@ -154,6 +154,7 @@ do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
154do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}" 154do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
155do_fetch[prefuncs] += "fetcher_hashes_dummyfunc" 155do_fetch[prefuncs] += "fetcher_hashes_dummyfunc"
156do_fetch[network] = "1" 156do_fetch[network] = "1"
157do_fetch[umask] = "${OE_SHARED_UMASK}"
157python base_do_fetch() { 158python base_do_fetch() {
158 159
159 src_uri = (d.getVar('SRC_URI') or "").split() 160 src_uri = (d.getVar('SRC_URI') or "").split()
diff --git a/meta/classes-global/sstate.bbclass b/meta/classes-global/sstate.bbclass
index 2968cc4c2e..53bc2e3940 100644
--- a/meta/classes-global/sstate.bbclass
+++ b/meta/classes-global/sstate.bbclass
@@ -745,7 +745,7 @@ def pstaging_fetch(sstatefetch, d):
745 if bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG"), False): 745 if bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG"), False):
746 uris += ['file://{0}.sig;downloadfilename={0}.sig'.format(sstatefetch)] 746 uris += ['file://{0}.sig;downloadfilename={0}.sig'.format(sstatefetch)]
747 747
748 with bb.utils.umask(0o002): 748 with bb.utils.umask(bb.utils.to_filemode(d.getVar("OE_SHARED_UMASK"))):
749 bb.utils.mkdirhier(dldir) 749 bb.utils.mkdirhier(dldir)
750 750
751 for srcuri in uris: 751 for srcuri in uris:
@@ -776,9 +776,10 @@ sstate_task_prefunc[dirs] = "${WORKDIR}"
776python sstate_task_postfunc () { 776python sstate_task_postfunc () {
777 shared_state = sstate_state_fromvars(d) 777 shared_state = sstate_state_fromvars(d)
778 778
779 omask = os.umask(0o002) 779 shared_umask = bb.utils.to_filemode(d.getVar("OE_SHARED_UMASK"))
780 if omask != 0o002: 780 omask = os.umask(shared_umask)
781 bb.note("Using umask 0o002 (not %0o) for sstate packaging" % omask) 781 if omask != shared_umask:
782 bb.note("Using umask %0o (not %0o) for sstate packaging" % (shared_umask, omask))
782 sstate_package(shared_state, d) 783 sstate_package(shared_state, d)
783 os.umask(omask) 784 os.umask(omask)
784 785
@@ -843,7 +844,8 @@ python sstate_create_and_sign_package () {
843 844
844 # Create the required sstate directory if it is not present. 845 # Create the required sstate directory if it is not present.
845 if not sstate_pkg.parent.is_dir(): 846 if not sstate_pkg.parent.is_dir():
846 with bb.utils.umask(0o002): 847 shared_umask = bb.utils.to_filemode(d.getVar("OE_SHARED_UMASK"))
848 with bb.utils.umask(shared_umask):
847 bb.utils.mkdirhier(str(sstate_pkg.parent)) 849 bb.utils.mkdirhier(str(sstate_pkg.parent))
848 850
849 if sign_pkg: 851 if sign_pkg:
diff --git a/meta/classes-recipe/go-mod-update-modules.bbclass b/meta/classes-recipe/go-mod-update-modules.bbclass
new file mode 100644
index 0000000000..5fccd0bb0d
--- /dev/null
+++ b/meta/classes-recipe/go-mod-update-modules.bbclass
@@ -0,0 +1,152 @@
1addtask do_update_modules after do_configure
2do_update_modules[nostamp] = "1"
3do_update_modules[network] = "1"
4
5# This class maintains two files, BPN-go-mods.inc and BPN-licenses.inc.
6#
7# -go-mods.inc will append SRC_URI with all of the Go modules that are
8# dependencies of this recipe.
9#
10# -licenses.inc will append LICENSE and LIC_FILES_CHKSUM with the found licenses
11# in the modules.
12#
13# These files are machine-generated and should not be modified.
14
15python do_update_modules() {
16 import subprocess, tempfile, json, re, urllib.parse
17 from oe.license import tidy_licenses
18 from oe.license_finder import find_licenses
19
20 def unescape_path(path):
21 """Unescape capital letters using exclamation points."""
22 return re.sub(r'!([a-z])', lambda m: m.group(1).upper(), path)
23
24 def fold_uri(uri):
25 """Fold URI for sorting shorter module paths before longer."""
26 return uri.replace(';', ' ').replace('/', '!')
27
28 def parse_existing_licenses():
29 hashes = {}
30 for url in d.getVar("LIC_FILES_CHKSUM").split():
31 (method, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
32 if "spdx" in parm and parm["spdx"] != "Unknown":
33 hashes[parm["md5"]] = urllib.parse.unquote_plus(parm["spdx"])
34 return hashes
35
36 bpn = d.getVar("BPN")
37 thisdir = d.getVar("THISDIR")
38 s_dir = d.getVar("S")
39
40 with tempfile.TemporaryDirectory(prefix='go-mod-') as mod_cache_dir:
41 notice = """
42# This file has been generated by go-mod-update-modules.bbclass
43#
44# Do not modify it by hand, as the contents will be replaced when
45# running the update-modules task.
46
47"""
48
49 env = dict(os.environ, GOMODCACHE=mod_cache_dir)
50
51 source = d.expand("${UNPACKDIR}/${GO_SRCURI_DESTSUFFIX}")
52 output = subprocess.check_output(("go", "mod", "edit", "-json"), cwd=source, env=env, text=True)
53 go_mod = json.loads(output)
54
55 output = subprocess.check_output(("go", "list", "-json=Dir,Module", "-deps", f"{go_mod['Module']['Path']}/..."), cwd=source, env=env, text=True)
56
57 #
58 # Licenses
59 #
60
61 # load hashes from the existing licenses.inc
62 extra_hashes = parse_existing_licenses()
63
64 # The output of this isn't actually valid JSON, but a series of dicts.
65 # Wrap in [] and join the dicts with ,
66 # Very frustrating that the json parser in python can't repeatedly
67 # parse from a stream.
68 pkgs = json.loads('[' + output.replace('}\n{', '},\n{') + ']')
69 # Collect licenses for the dependencies.
70 licenses = set()
71 lic_files_chksum = []
72 lic_files = {}
73
74 for pkg in pkgs:
75 mod = pkg.get('Module', None)
76 if not mod or mod.get('Main', False):
77 continue
78
79 mod_dir = mod['Dir']
80
81 if not mod_dir.startswith(mod_cache_dir):
82 continue
83
84 path = os.path.relpath(mod_dir, mod_cache_dir)
85
86 for license_name, license_file, license_md5 in find_licenses(mod['Dir'], d, first_only=True, extra_hashes=extra_hashes):
87 lic_files[os.path.join(path, license_file)] = (license_name, license_md5)
88
89 for lic_file in lic_files:
90 license_name, license_md5 = lic_files[lic_file]
91 if license_name == "Unknown":
92 bb.warn(f"Unknown license: {lic_file} {license_md5}")
93
94 licenses.add(lic_files[lic_file][0])
95 lic_files_chksum.append(
96 f'file://pkg/mod/{lic_file};md5={license_md5};spdx={urllib.parse.quote_plus(license_name)}')
97
98 licenses_filename = os.path.join(thisdir, f"{bpn}-licenses.inc")
99 with open(licenses_filename, "w") as f:
100 f.write(notice)
101 f.write(f'LICENSE += "& {" & ".join(tidy_licenses(licenses))}"\n\n')
102 f.write('LIC_FILES_CHKSUM += "\\\n')
103 for lic in sorted(lic_files_chksum, key=fold_uri):
104 f.write(' ' + lic + ' \\\n')
105 f.write('"\n')
106
107 #
108 # Sources
109 #
110
111 # Collect the module cache files downloaded by the go list command as
112 # the go list command knows best what the go list command needs and it
113 # needs more files in the module cache than the go install command as
114 # it doesn't do the dependency pruning mentioned in the Go module
115 # reference, https://go.dev/ref/mod, for go 1.17 or higher.
116 src_uris = []
117 downloaddir = os.path.join(mod_cache_dir, 'cache', 'download')
118 for dirpath, _, filenames in os.walk(downloaddir):
119 # We want to process files under @v directories
120 path, base = os.path.split(os.path.relpath(dirpath, downloaddir))
121 if base != '@v':
122 continue
123
124 path = unescape_path(path)
125 zipver = None
126 for name in filenames:
127 ver, ext = os.path.splitext(name)
128 if ext == '.zip':
129 chksum = bb.utils.sha256_file(os.path.join(dirpath, name))
130 src_uris.append(f'gomod://{path};version={ver};sha256sum={chksum}')
131 zipver = ver
132 break
133 for name in filenames:
134 ver, ext = os.path.splitext(name)
135 if ext == '.mod' and ver != zipver:
136 chksum = bb.utils.sha256_file(os.path.join(dirpath, name))
137 src_uris.append(f'gomod://{path};version={ver};mod=1;sha256sum={chksum}')
138
139
140 go_mods_filename = os.path.join(thisdir, f"{bpn}-go-mods.inc")
141 with open(go_mods_filename, "w") as f:
142 f.write(notice)
143 f.write('SRC_URI += "\\\n')
144 for uri in sorted(src_uris, key=fold_uri):
145 f.write(' ' + uri + ' \\\n')
146 f.write('"\n')
147
148 subprocess.check_output(("go", "clean", "-modcache"), cwd=source, env=env, text=True)
149}
150
151# This doesn't work as we need to wipe the inc files first so we don't try looking for LICENSE files that don't yet exist
152# RECIPE_UPGRADE_EXTRA_TASKS += "do_update_modules"
diff --git a/meta/classes-recipe/go-mod.bbclass b/meta/classes-recipe/go-mod.bbclass
index 93ae72235f..a15dda8f0e 100644
--- a/meta/classes-recipe/go-mod.bbclass
+++ b/meta/classes-recipe/go-mod.bbclass
@@ -23,7 +23,7 @@ GOBUILDFLAGS:append = " -modcacherw"
23inherit go 23inherit go
24 24
25export GOMODCACHE = "${S}/pkg/mod" 25export GOMODCACHE = "${S}/pkg/mod"
26GO_MOD_CACHE_DIR = "${@os.path.relpath(d.getVar('GOMODCACHE'), d.getVar('WORKDIR'))}" 26GO_MOD_CACHE_DIR = "${@os.path.relpath(d.getVar('GOMODCACHE'), d.getVar('UNPACKDIR'))}"
27do_unpack[cleandirs] += "${GOMODCACHE}" 27do_unpack[cleandirs] += "${GOMODCACHE}"
28 28
29GO_WORKDIR ?= "${GO_IMPORT}" 29GO_WORKDIR ?= "${GO_IMPORT}"
diff --git a/meta/classes-recipe/testsdk.bbclass b/meta/classes-recipe/testsdk.bbclass
index 59d2834c99..b1c4fa67e6 100644
--- a/meta/classes-recipe/testsdk.bbclass
+++ b/meta/classes-recipe/testsdk.bbclass
@@ -19,6 +19,7 @@ TESTSDK_SUITES ?= ""
19 19
20TESTSDK_CLASS_NAME ?= "oeqa.sdk.testsdk.TestSDK" 20TESTSDK_CLASS_NAME ?= "oeqa.sdk.testsdk.TestSDK"
21TESTSDKEXT_CLASS_NAME ?= "oeqa.sdkext.testsdk.TestSDKExt" 21TESTSDKEXT_CLASS_NAME ?= "oeqa.sdkext.testsdk.TestSDKExt"
22TESTSDK_CASE_DIRS ?= "sdk"
22 23
23def import_and_run(name, d): 24def import_and_run(name, d):
24 import importlib 25 import importlib
diff --git a/meta/classes/create-spdx-2.2.bbclass b/meta/classes/create-spdx-2.2.bbclass
index 6fc60a1d97..94e0108815 100644
--- a/meta/classes/create-spdx-2.2.bbclass
+++ b/meta/classes/create-spdx-2.2.bbclass
@@ -23,6 +23,8 @@ def get_namespace(d, name):
23 namespace_uuid = uuid.uuid5(uuid.NAMESPACE_DNS, d.getVar("SPDX_UUID_NAMESPACE")) 23 namespace_uuid = uuid.uuid5(uuid.NAMESPACE_DNS, d.getVar("SPDX_UUID_NAMESPACE"))
24 return "%s/%s-%s" % (d.getVar("SPDX_NAMESPACE_PREFIX"), name, str(uuid.uuid5(namespace_uuid, name))) 24 return "%s/%s-%s" % (d.getVar("SPDX_NAMESPACE_PREFIX"), name, str(uuid.uuid5(namespace_uuid, name)))
25 25
26SPDX_PACKAGE_VERSION ??= "${PV}"
27SPDX_PACKAGE_VERSION[doc] = "The version of a package, versionInfo in recipe, package and image"
26 28
27def create_annotation(d, comment): 29def create_annotation(d, comment):
28 from datetime import datetime, timezone 30 from datetime import datetime, timezone
@@ -447,7 +449,7 @@ python do_create_spdx() {
447 449
448 recipe = oe.spdx.SPDXPackage() 450 recipe = oe.spdx.SPDXPackage()
449 recipe.name = d.getVar("PN") 451 recipe.name = d.getVar("PN")
450 recipe.versionInfo = d.getVar("PV") 452 recipe.versionInfo = d.getVar("SPDX_PACKAGE_VERSION")
451 recipe.SPDXID = oe.sbom.get_recipe_spdxid(d) 453 recipe.SPDXID = oe.sbom.get_recipe_spdxid(d)
452 recipe.supplier = d.getVar("SPDX_SUPPLIER") 454 recipe.supplier = d.getVar("SPDX_SUPPLIER")
453 if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d): 455 if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d):
@@ -556,7 +558,7 @@ python do_create_spdx() {
556 558
557 spdx_package.SPDXID = oe.sbom.get_package_spdxid(pkg_name) 559 spdx_package.SPDXID = oe.sbom.get_package_spdxid(pkg_name)
558 spdx_package.name = pkg_name 560 spdx_package.name = pkg_name
559 spdx_package.versionInfo = d.getVar("PV") 561 spdx_package.versionInfo = d.getVar("SPDX_PACKAGE_VERSION")
560 spdx_package.licenseDeclared = convert_license_to_spdx(package_license, license_data, package_doc, d, found_licenses) 562 spdx_package.licenseDeclared = convert_license_to_spdx(package_license, license_data, package_doc, d, found_licenses)
561 spdx_package.supplier = d.getVar("SPDX_SUPPLIER") 563 spdx_package.supplier = d.getVar("SPDX_SUPPLIER")
562 564
@@ -832,7 +834,7 @@ def combine_spdx(d, rootfs_name, rootfs_deploydir, rootfs_spdxid, packages, spdx
832 834
833 image = oe.spdx.SPDXPackage() 835 image = oe.spdx.SPDXPackage()
834 image.name = d.getVar("PN") 836 image.name = d.getVar("PN")
835 image.versionInfo = d.getVar("PV") 837 image.versionInfo = d.getVar("SPDX_PACKAGE_VERSION")
836 image.SPDXID = rootfs_spdxid 838 image.SPDXID = rootfs_spdxid
837 image.supplier = d.getVar("SPDX_SUPPLIER") 839 image.supplier = d.getVar("SPDX_SUPPLIER")
838 840
diff --git a/meta/conf/bitbake.conf b/meta/conf/bitbake.conf
index a3300fc172..b1f8ac5b11 100644
--- a/meta/conf/bitbake.conf
+++ b/meta/conf/bitbake.conf
@@ -944,6 +944,8 @@ TRANSLATED_TARGET_ARCH ??= "${@d.getVar('TARGET_ARCH').replace("_", "-")}"
944 944
945# Set a default umask to use for tasks for determinism 945# Set a default umask to use for tasks for determinism
946BB_DEFAULT_UMASK ??= "022" 946BB_DEFAULT_UMASK ??= "022"
947# The umask to use for shared files (e.g. DL_DIR and SSTATE_DIR)
948OE_SHARED_UMASK ??= "002"
947 949
948# Complete output from bitbake 950# Complete output from bitbake
949BB_CONSOLELOG ?= "${LOG_DIR}/cooker/${MACHINE}/${DATETIME}.log" 951BB_CONSOLELOG ?= "${LOG_DIR}/cooker/${MACHINE}/${DATETIME}.log"
diff --git a/meta/conf/distro/include/maintainers.inc b/meta/conf/distro/include/maintainers.inc
index d94fb693e3..4d51219c94 100644
--- a/meta/conf/distro/include/maintainers.inc
+++ b/meta/conf/distro/include/maintainers.inc
@@ -718,6 +718,8 @@ RECIPE_MAINTAINER:pn-python3-snowballstemmer = "Tim Orling <tim.orling@konsulko.
718RECIPE_MAINTAINER:pn-python3-sortedcontainers = "Tim Orling <tim.orling@konsulko.com>" 718RECIPE_MAINTAINER:pn-python3-sortedcontainers = "Tim Orling <tim.orling@konsulko.com>"
719RECIPE_MAINTAINER:pn-python3-spdx-tools = "Marta Rybczynska <marta.rybczynska@ygreky.com>" 719RECIPE_MAINTAINER:pn-python3-spdx-tools = "Marta Rybczynska <marta.rybczynska@ygreky.com>"
720RECIPE_MAINTAINER:pn-python3-sphinx = "Trevor Gamblin <tgamblin@baylibre.com>" 720RECIPE_MAINTAINER:pn-python3-sphinx = "Trevor Gamblin <tgamblin@baylibre.com>"
721RECIPE_MAINTAINER:pn-python3-sphinx-argparse = "Antonin Godard <antonin.godard@bootlin.com>"
722RECIPE_MAINTAINER:pn-python3-sphinx-copybutton = "Antonin Godard <antonin.godard@bootlin.com>"
721RECIPE_MAINTAINER:pn-python3-sphinx-rtd-theme = "Tim Orling <tim.orling@konsulko.com>" 723RECIPE_MAINTAINER:pn-python3-sphinx-rtd-theme = "Tim Orling <tim.orling@konsulko.com>"
722RECIPE_MAINTAINER:pn-python3-sphinxcontrib-applehelp = "Tim Orling <tim.orling@konsulko.com>" 724RECIPE_MAINTAINER:pn-python3-sphinxcontrib-applehelp = "Tim Orling <tim.orling@konsulko.com>"
723RECIPE_MAINTAINER:pn-python3-sphinxcontrib-devhelp = "Tim Orling <tim.orling@konsulko.com>" 725RECIPE_MAINTAINER:pn-python3-sphinxcontrib-devhelp = "Tim Orling <tim.orling@konsulko.com>"
diff --git a/meta/conf/distro/include/ptest-packagelists.inc b/meta/conf/distro/include/ptest-packagelists.inc
index e06731ece7..4253c7b062 100644
--- a/meta/conf/distro/include/ptest-packagelists.inc
+++ b/meta/conf/distro/include/ptest-packagelists.inc
@@ -78,6 +78,7 @@ PTESTS_FAST = "\
78 python3-uritools \ 78 python3-uritools \
79 python3-wcwidth \ 79 python3-wcwidth \
80 python3-webcolors \ 80 python3-webcolors \
81 python3-wheel \
81 qemu \ 82 qemu \
82 quilt \ 83 quilt \
83 rpm-sequoia \ 84 rpm-sequoia \
diff --git a/meta/conf/sanity.conf b/meta/conf/sanity.conf
index 3692007e96..474816797a 100644
--- a/meta/conf/sanity.conf
+++ b/meta/conf/sanity.conf
@@ -3,7 +3,7 @@
3# See sanity.bbclass 3# See sanity.bbclass
4# 4#
5# Expert users can confirm their sanity with "touch conf/sanity.conf" 5# Expert users can confirm their sanity with "touch conf/sanity.conf"
6BB_MIN_VERSION = "2.15.0" 6BB_MIN_VERSION = "2.15.1"
7 7
8SANITY_ABIFILE = "${TMPDIR}/abi_version" 8SANITY_ABIFILE = "${TMPDIR}/abi_version"
9 9
diff --git a/meta/lib/oe/license.py b/meta/lib/oe/license.py
index 6f882c3812..6e55fa1e7f 100644
--- a/meta/lib/oe/license.py
+++ b/meta/lib/oe/license.py
@@ -462,3 +462,18 @@ def skip_incompatible_package_licenses(d, pkgs):
462 skipped_pkgs[pkg] = incompatible_lic 462 skipped_pkgs[pkg] = incompatible_lic
463 463
464 return skipped_pkgs 464 return skipped_pkgs
465
466def tidy_licenses(value):
467 """
468 Flat, split and sort licenses.
469 """
470 from oe.license import flattened_licenses
471
472 def _choose(a, b):
473 str_a, str_b = sorted((" & ".join(a), " & ".join(b)), key=str.casefold)
474 return ["(%s | %s)" % (str_a, str_b)]
475
476 if not isinstance(value, str):
477 value = " & ".join(value)
478
479 return sorted(list(set(flattened_licenses(value, _choose))), key=str.casefold)
diff --git a/meta/lib/oeqa/sdk/buildtools-docs-cases/README b/meta/lib/oeqa/buildtools-docs/cases/README
index f8edbc7dad..f8edbc7dad 100644
--- a/meta/lib/oeqa/sdk/buildtools-docs-cases/README
+++ b/meta/lib/oeqa/buildtools-docs/cases/README
diff --git a/meta/lib/oeqa/sdk/buildtools-docs-cases/build.py b/meta/lib/oeqa/buildtools-docs/cases/build.py
index 6e3ee94292..6e3ee94292 100644
--- a/meta/lib/oeqa/sdk/buildtools-docs-cases/build.py
+++ b/meta/lib/oeqa/buildtools-docs/cases/build.py
diff --git a/meta/lib/oeqa/sdk/buildtools-cases/README b/meta/lib/oeqa/buildtools/cases/README
index d4f20faa9f..d4f20faa9f 100644
--- a/meta/lib/oeqa/sdk/buildtools-cases/README
+++ b/meta/lib/oeqa/buildtools/cases/README
diff --git a/meta/lib/oeqa/sdk/buildtools-cases/build.py b/meta/lib/oeqa/buildtools/cases/build.py
index c85c32496b..c85c32496b 100644
--- a/meta/lib/oeqa/sdk/buildtools-cases/build.py
+++ b/meta/lib/oeqa/buildtools/cases/build.py
diff --git a/meta/lib/oeqa/sdk/buildtools-cases/gcc.py b/meta/lib/oeqa/buildtools/cases/gcc.py
index a62c4d0bc4..a62c4d0bc4 100644
--- a/meta/lib/oeqa/sdk/buildtools-cases/gcc.py
+++ b/meta/lib/oeqa/buildtools/cases/gcc.py
diff --git a/meta/lib/oeqa/sdk/buildtools-cases/https.py b/meta/lib/oeqa/buildtools/cases/https.py
index 4525e3d758..4525e3d758 100644
--- a/meta/lib/oeqa/sdk/buildtools-cases/https.py
+++ b/meta/lib/oeqa/buildtools/cases/https.py
diff --git a/meta/lib/oeqa/sdk/buildtools-cases/sanity.py b/meta/lib/oeqa/buildtools/cases/sanity.py
index a55d456656..a55d456656 100644
--- a/meta/lib/oeqa/sdk/buildtools-cases/sanity.py
+++ b/meta/lib/oeqa/buildtools/cases/sanity.py
diff --git a/meta/lib/oeqa/sdk/testsdk.py b/meta/lib/oeqa/sdk/testsdk.py
index 52b702b6a2..cffcf9f49a 100644
--- a/meta/lib/oeqa/sdk/testsdk.py
+++ b/meta/lib/oeqa/sdk/testsdk.py
@@ -31,6 +31,28 @@ class TestSDK(TestSDKBase):
31 context_class = OESDKTestContext 31 context_class = OESDKTestContext
32 test_type = 'sdk' 32 test_type = 'sdk'
33 33
34 def sdk_dir_names(self, d):
35 """Return list from TESTSDK_CASE_DIRS."""
36 testdirs = d.getVar("TESTSDK_CASE_DIRS")
37 if testdirs:
38 return testdirs.split()
39
40 bb.fatal("TESTSDK_CASE_DIRS unset, can't find SDK test directories.")
41
42 def get_sdk_paths(self, d):
43 """
44 Return a list of paths where SDK test cases reside.
45
46 SDK tests are expected in <LAYER_DIR>/lib/oeqa/<dirname>/cases
47 """
48 paths = []
49 for layer in d.getVar("BBLAYERS").split():
50 for dirname in self.sdk_dir_names(d):
51 case_path = os.path.join(layer, "lib", "oeqa", dirname, "cases")
52 if os.path.isdir(case_path):
53 paths.append(case_path)
54 return paths
55
34 def get_tcname(self, d): 56 def get_tcname(self, d):
35 """ 57 """
36 Get the name of the SDK file 58 Get the name of the SDK file
@@ -115,7 +137,7 @@ class TestSDK(TestSDKBase):
115 137
116 try: 138 try:
117 modules = (d.getVar("TESTSDK_SUITES") or "").split() 139 modules = (d.getVar("TESTSDK_SUITES") or "").split()
118 tc.loadTests(self.context_executor_class.default_cases, modules) 140 tc.loadTests(self.get_sdk_paths(d), modules)
119 except Exception as e: 141 except Exception as e:
120 import traceback 142 import traceback
121 bb.fatal("Loading tests failed:\n%s" % traceback.format_exc()) 143 bb.fatal("Loading tests failed:\n%s" % traceback.format_exc())
diff --git a/meta/lib/oeqa/selftest/cases/devtool.py b/meta/lib/oeqa/selftest/cases/devtool.py
index 74a7727cc0..05f228f03e 100644
--- a/meta/lib/oeqa/selftest/cases/devtool.py
+++ b/meta/lib/oeqa/selftest/cases/devtool.py
@@ -154,7 +154,7 @@ class DevtoolTestCase(OESelftestTestCase):
154 value = invalue 154 value = invalue
155 invar = None 155 invar = None
156 elif '=' in line: 156 elif '=' in line:
157 splitline = line.split('=', 1) 157 splitline = re.split(r"[?+:]*=[+]?", line, 1)
158 var = splitline[0].rstrip() 158 var = splitline[0].rstrip()
159 value = splitline[1].strip().strip('"') 159 value = splitline[1].strip().strip('"')
160 if value.endswith('\\'): 160 if value.endswith('\\'):
diff --git a/meta/lib/oeqa/selftest/cases/recipetool.py b/meta/lib/oeqa/selftest/cases/recipetool.py
index 2a91f6c7ae..0bd724c8ee 100644
--- a/meta/lib/oeqa/selftest/cases/recipetool.py
+++ b/meta/lib/oeqa/selftest/cases/recipetool.py
@@ -757,13 +757,12 @@ class RecipetoolCreateTests(RecipetoolBase):
757 757
758 def test_recipetool_create_go(self): 758 def test_recipetool_create_go(self):
759 # Basic test to check go recipe generation 759 # Basic test to check go recipe generation
760 self.maxDiff = None
761
760 temprecipe = os.path.join(self.tempdir, 'recipe') 762 temprecipe = os.path.join(self.tempdir, 'recipe')
761 os.makedirs(temprecipe) 763 os.makedirs(temprecipe)
762 764
763 recipefile = os.path.join(temprecipe, 'recipetool-go-test_git.bb') 765 recipefile = os.path.join(temprecipe, 'recipetool-go-test_git.bb')
764 deps_require_file = os.path.join(temprecipe, 'recipetool-go-test', 'recipetool-go-test-modules.inc')
765 lics_require_file = os.path.join(temprecipe, 'recipetool-go-test', 'recipetool-go-test-licenses.inc')
766 modules_txt_file = os.path.join(temprecipe, 'recipetool-go-test', 'modules.txt')
767 766
768 srcuri = 'https://git.yoctoproject.org/recipetool-go-test.git' 767 srcuri = 'https://git.yoctoproject.org/recipetool-go-test.git'
769 srcrev = "c3e213c01b6c1406b430df03ef0d1ae77de5d2f7" 768 srcrev = "c3e213c01b6c1406b430df03ef0d1ae77de5d2f7"
@@ -771,13 +770,11 @@ class RecipetoolCreateTests(RecipetoolBase):
771 770
772 result = runCmd('recipetool create -o %s %s -S %s -B %s' % (temprecipe, srcuri, srcrev, srcbranch)) 771 result = runCmd('recipetool create -o %s %s -S %s -B %s' % (temprecipe, srcuri, srcrev, srcbranch))
773 772
774 self.maxDiff = None 773 inherits = ['go-mod', 'go-mod-update-modules']
775 inherits = ['go-vendor']
776 774
777 checkvars = {} 775 checkvars = {}
778 checkvars['GO_IMPORT'] = "git.yoctoproject.org/recipetool-go-test" 776 checkvars['GO_IMPORT'] = "git.yoctoproject.org/recipetool-go-test"
779 checkvars['SRC_URI'] = {'git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https', 777 checkvars['SRC_URI'] = {'git://${GO_IMPORT};protocol=https;nobranch=1;destsuffix=${GO_SRCURI_DESTSUFFIX}'}
780 'file://modules.txt'}
781 checkvars['LIC_FILES_CHKSUM'] = { 778 checkvars['LIC_FILES_CHKSUM'] = {
782 'file://src/${GO_IMPORT}/LICENSE;md5=4e3933dd47afbf115e484d11385fb3bd', 779 'file://src/${GO_IMPORT}/LICENSE;md5=4e3933dd47afbf115e484d11385fb3bd',
783 'file://src/${GO_IMPORT}/is/LICENSE;md5=62beaee5a116dd1e80161667b1df39ab' 780 'file://src/${GO_IMPORT}/is/LICENSE;md5=62beaee5a116dd1e80161667b1df39ab'
@@ -786,26 +783,16 @@ class RecipetoolCreateTests(RecipetoolBase):
786 self._test_recipe_contents(recipefile, checkvars, inherits) 783 self._test_recipe_contents(recipefile, checkvars, inherits)
787 self.assertNotIn('Traceback', result.output) 784 self.assertNotIn('Traceback', result.output)
788 785
786 lics_require_file = os.path.join(temprecipe, 'recipetool-go-test-licenses.inc')
787 self.assertFileExists(lics_require_file)
789 checkvars = {} 788 checkvars = {}
790 checkvars['VENDORED_LIC_FILES_CHKSUM'] = set( 789 checkvars['LIC_FILES_CHKSUM'] = {'file://pkg/mod/github.com/godbus/dbus/v5@v5.1.0/LICENSE;md5=09042bd5c6c96a2b9e45ddf1bc517eed;spdx=BSD-2-Clause'}
791 ['file://src/${GO_IMPORT}/vendor/github.com/godbus/dbus/v5/LICENSE;md5=09042bd5c6c96a2b9e45ddf1bc517eed',
792 'file://src/${GO_IMPORT}/vendor/github.com/matryer/is/LICENSE;md5=62beaee5a116dd1e80161667b1df39ab'])
793 self.assertTrue(os.path.isfile(lics_require_file))
794 self._test_recipe_contents(lics_require_file, checkvars, []) 790 self._test_recipe_contents(lics_require_file, checkvars, [])
795 791
796 # make sure that dependencies don't mention local directory ./matryer/is 792 deps_require_file = os.path.join(temprecipe, 'recipetool-go-test-go-mods.inc')
797 dependencies = \ 793 self.assertFileExists(deps_require_file)
798 [ ('github.com/godbus/dbus','v5.1.0', 'github.com/godbus/dbus/v5', '/v5', ''),
799 ]
800
801 src_uri = set()
802 for d in dependencies:
803 src_uri.add(self._go_urifiy(*d))
804
805 checkvars = {} 794 checkvars = {}
806 checkvars['GO_DEPENDENCIES_SRC_URI'] = src_uri 795 checkvars['SRC_URI'] = {'gomod://github.com/godbus/dbus/v5;version=v5.1.0;sha256sum=03dfa8e71089a6f477310d15c4d3a036d82d028532881b50fee254358e782ad9'}
807
808 self.assertTrue(os.path.isfile(deps_require_file))
809 self._test_recipe_contents(deps_require_file, checkvars, []) 796 self._test_recipe_contents(deps_require_file, checkvars, [])
810 797
811class RecipetoolTests(RecipetoolBase): 798class RecipetoolTests(RecipetoolBase):
diff --git a/meta/recipes-core/meta/buildtools-docs-tarball.bb b/meta/recipes-core/meta/buildtools-docs-tarball.bb
index b9ef68eb6d..98d47f7b71 100644
--- a/meta/recipes-core/meta/buildtools-docs-tarball.bb
+++ b/meta/recipes-core/meta/buildtools-docs-tarball.bb
@@ -7,6 +7,8 @@ LICENSE = "MIT"
7# Add nativesdk equivalent of build-essentials 7# Add nativesdk equivalent of build-essentials
8TOOLCHAIN_HOST_TASK += "\ 8TOOLCHAIN_HOST_TASK += "\
9 nativesdk-python3-sphinx \ 9 nativesdk-python3-sphinx \
10 nativesdk-python3-sphinx-argparse \
11 nativesdk-python3-sphinx-copybutton \
10 nativesdk-python3-sphinx-rtd-theme \ 12 nativesdk-python3-sphinx-rtd-theme \
11 nativesdk-python3-pyyaml \ 13 nativesdk-python3-pyyaml \
12 nativesdk-rsvg \ 14 nativesdk-rsvg \
@@ -16,4 +18,5 @@ TOOLCHAIN_OUTPUTNAME = "${SDK_ARCH}-buildtools-docs-nativesdk-standalone-${DISTR
16 18
17SDK_TITLE = "Docs Build tools tarball" 19SDK_TITLE = "Docs Build tools tarball"
18 20
19TESTSDK_CASES = "buildtools-docs-cases" 21# Directory that contains testcases
22TESTSDK_CASE_DIRS = "buildtools-docs" \ No newline at end of file
diff --git a/meta/recipes-core/meta/buildtools-tarball.bb b/meta/recipes-core/meta/buildtools-tarball.bb
index 6fa6d93a3d..02117ab84d 100644
--- a/meta/recipes-core/meta/buildtools-tarball.bb
+++ b/meta/recipes-core/meta/buildtools-tarball.bb
@@ -124,22 +124,7 @@ TOOLCHAIN_NEED_CONFIGSITE_CACHE = ""
124# The recipe doesn't need any default deps 124# The recipe doesn't need any default deps
125INHIBIT_DEFAULT_DEPS = "1" 125INHIBIT_DEFAULT_DEPS = "1"
126 126
127# Directory in testsdk that contains testcases 127inherit testsdk
128TESTSDK_CASES = "buildtools-cases"
129 128
130# We have our own code, avoid deferred inherit 129# Directory that contains testcases
131SDK_CLASSES:remove = "testsdk" 130TESTSDK_CASE_DIRS = "buildtools" \ No newline at end of file
132
133python do_testsdk() {
134 import oeqa.sdk.testsdk
135 testsdk = oeqa.sdk.testsdk.TestSDK()
136
137 cases_path = os.path.join(os.path.abspath(os.path.dirname(oeqa.sdk.testsdk.__file__)), d.getVar("TESTSDK_CASES"))
138 testsdk.context_executor_class.default_cases = [cases_path,]
139
140 testsdk.run(d)
141}
142addtask testsdk
143do_testsdk[nostamp] = "1"
144do_testsdk[network] = "1"
145do_testsdk[depends] += "xz-native:do_populate_sysroot"
diff --git a/meta/recipes-devtools/dosfstools/dosfstools/0001-fsck.fat-Adhere-to-the-fsck-exit-codes.patch b/meta/recipes-devtools/dosfstools/dosfstools/0001-fsck.fat-Adhere-to-the-fsck-exit-codes.patch
new file mode 100644
index 0000000000..3d2ce48723
--- /dev/null
+++ b/meta/recipes-devtools/dosfstools/dosfstools/0001-fsck.fat-Adhere-to-the-fsck-exit-codes.patch
@@ -0,0 +1,214 @@
1From 9d165145b9f9c20a56e111360fbc2003c2b28cba Mon Sep 17 00:00:00 2001
2From: Ricardo Simoes <ricardo.simoes@pt.bosch.com>
3Date: Thu, 26 Jun 2025 08:14:29 +0100
4Subject: [PATCH] fsck.fat: Adhere to the fsck exit codes
5
6fsck.fat is used as a filesystem-specific checker for the `fsck`. This
7also causes `fsck` to return the same exit-codes given by `fsck.fat`.
8
9In most cases this is already the case. One exception to that comes when
10checking a read-only filesystem. In that case `fsck.fat` will return 6,
11which for `fsck` means "Fiesystem errors left uncorrected" and "System
12should reboot". When a more proper response would be to return 8,
13"Operational Error".
14
15This commit solves that problem by introducing a new header file which
16standardizes the exit codes used by `fsck.fat`.
17
18Signed-off-by: Ricardo Ungerer <ungerer.ricardo@gmail.com>
19
20Upstream-Status: Inactive-Upstream [lastcommit: 2023, lastrelease: 2021]
21Upstream-Status: Submitted [https://github.com/dosfstools/dosfstools/pull/217]
22---
23 src/Makefile.am | 4 ++--
24 src/common.c | 8 ++++----
25 src/exit_codes.h | 15 +++++++++++++++
26 src/fsck.fat.c | 23 ++++++++++++-----------
27 src/io.c | 3 ++-
28 5 files changed, 35 insertions(+), 18 deletions(-)
29 create mode 100644 src/exit_codes.h
30
31diff --git a/src/Makefile.am b/src/Makefile.am
32index a389046..48f00dd 100644
33--- a/src/Makefile.am
34+++ b/src/Makefile.am
35@@ -23,7 +23,7 @@ EXTRA_DIST = blkdev/README
36
37 charconv_common_sources = charconv.c charconv.h
38 charconv_common_ldadd = $(LIBICONV)
39-fscklabel_common_sources = boot.c boot.h common.c common.h \
40+fscklabel_common_sources = boot.c boot.h common.c common.h exit_codes.h \
41 fat.c fat.h io.c io.h msdos_fs.h \
42 $(charconv_common_sources) \
43 fsck.fat.h endian_compat.h
44@@ -38,7 +38,7 @@ devinfo_common_sources = device_info.c device_info.h \
45 blkdev/blkdev.c blkdev/blkdev.h \
46 blkdev/linux_version.c blkdev/linux_version.h
47 mkfs_fat_SOURCES = mkfs.fat.c msdos_fs.h common.c common.h endian_compat.h \
48- $(charconv_common_sources) $(devinfo_common_sources)
49+ exit_codes.h $(charconv_common_sources) $(devinfo_common_sources)
50 mkfs_fat_CPPFLAGS = -I$(srcdir)/blkdev
51 mkfs_fat_CFLAGS = $(AM_CFLAGS)
52 mkfs_fat_LDADD = $(charconv_common_ldadd)
53diff --git a/src/common.c b/src/common.c
54index 4f1afcb..089d4b3 100644
55--- a/src/common.c
56+++ b/src/common.c
57@@ -38,7 +38,7 @@
58
59 #include "common.h"
60 #include "charconv.h"
61-
62+#include "exit_codes.h"
63
64 int interactive;
65 int write_immed;
66@@ -62,7 +62,7 @@ void die(const char *msg, ...)
67 vfprintf(stderr, msg, args);
68 va_end(args);
69 fprintf(stderr, "\n");
70- exit(1);
71+ exit(OPERATIONAL_ERROR);
72 }
73
74 void pdie(const char *msg, ...)
75@@ -205,7 +205,7 @@ int get_choice(int noninteractive_result, const char *noninteractive_msg,
76 } while (choice == '\n'); /* filter out enter presses */
77
78 if (choice == EOF)
79- exit(1);
80+ exit(USAGE_OR_SYNTAX_ERROR);
81
82 printf("%c\n", choice);
83
84@@ -235,7 +235,7 @@ int get_choice(int noninteractive_result, const char *noninteractive_msg,
85 inhibit_quit_choice = 0;
86
87 if (quit_choice == 1)
88- exit(0);
89+ exit(NO_ERRORS);
90 }
91 }
92
93diff --git a/src/exit_codes.h b/src/exit_codes.h
94new file mode 100644
95index 0000000..f67d22e
96--- /dev/null
97+++ b/src/exit_codes.h
98@@ -0,0 +1,15 @@
99+#ifndef _EXIT_CODES_H
100+#define _EXIT_CODES_H
101+
102+/* Codes as defined by fsck.
103+ For more information, see fsck manpage. */
104+#define NO_ERRORS 0
105+#define FS_ERRORS_CORRECTED 1
106+#define SYSTEM_SHOULD_BE_REBOOTED 2
107+#define FS_ERRORS_LEFT_UNCORRECTED 4
108+#define OPERATIONAL_ERROR 8
109+#define USAGE_OR_SYNTAX_ERROR 16
110+#define CHECKING_CANCELED_BY_USER 32
111+#define SHARED_LIB_ERROR 128
112+
113+#endif
114diff --git a/src/fsck.fat.c b/src/fsck.fat.c
115index 8b02b57..42e3ab4 100644
116--- a/src/fsck.fat.c
117+++ b/src/fsck.fat.c
118@@ -46,6 +46,7 @@
119 #include "file.h"
120 #include "check.h"
121 #include "charconv.h"
122+#include "exit_codes.h"
123
124 int rw = 0, list = 0, test = 0, verbose = 0;
125 long fat_table = 0;
126@@ -147,10 +148,10 @@ int main(int argc, char **argv)
127 codepage = strtol(optarg, &tmp, 10);
128 if (!*optarg || isspace(*optarg) || *tmp || errno || codepage < 0 || codepage > INT_MAX) {
129 fprintf(stderr, "Invalid codepage : %s\n", optarg);
130- usage(argv[0], 2);
131+ usage(argv[0], USAGE_OR_SYNTAX_ERROR);
132 }
133 if (!set_dos_codepage(codepage))
134- usage(argv[0], 2);
135+ usage(argv[0], USAGE_OR_SYNTAX_ERROR);
136 break;
137 case 'd':
138 file_add(optarg, fdt_drop);
139@@ -163,7 +164,7 @@ int main(int argc, char **argv)
140 fat_table = strtol(optarg, &tmp, 10);
141 if (!*optarg || isspace(*optarg) || *tmp || errno || fat_table < 0 || fat_table > 255) {
142 fprintf(stderr, "Invalid FAT table : %s\n", optarg);
143- usage(argv[0], 2);
144+ usage(argv[0], USAGE_OR_SYNTAX_ERROR);
145 }
146 break;
147 case 'l':
148@@ -202,31 +203,31 @@ int main(int argc, char **argv)
149 atari_format = 1;
150 } else {
151 fprintf(stderr, "Unknown variant: %s\n", optarg);
152- usage(argv[0], 2);
153+ usage(argv[0], USAGE_OR_SYNTAX_ERROR);
154 }
155 break;
156 case 'w':
157 write_immed = 1;
158 break;
159 case OPT_HELP:
160- usage(argv[0], 0);
161+ usage(argv[0], EXIT_SUCCESS);
162 break;
163 case '?':
164- usage(argv[0], 2);
165+ usage(argv[0], USAGE_OR_SYNTAX_ERROR);
166 break;
167 default:
168 fprintf(stderr,
169 "Internal error: getopt_long() returned unexpected value %d\n", c);
170- exit(3);
171+ exit(OPERATIONAL_ERROR);
172 }
173 if (!set_dos_codepage(-1)) /* set default codepage if none was given in command line */
174- exit(2);
175+ exit(OPERATIONAL_ERROR);
176 if ((test || write_immed) && !rw) {
177 fprintf(stderr, "-t and -w can not be used in read only mode\n");
178- exit(2);
179+ exit(USAGE_OR_SYNTAX_ERROR);
180 }
181 if (optind != argc - 1)
182- usage(argv[0], 2);
183+ usage(argv[0], USAGE_OR_SYNTAX_ERROR);
184
185 printf("fsck.fat " VERSION " (" VERSION_DATE ")\n");
186 fs_open(argv[optind], rw);
187@@ -285,5 +286,5 @@ exit:
188 n_files, (unsigned long)fs.data_clusters - free_clusters,
189 (unsigned long)fs.data_clusters);
190
191- return fs_close(rw) ? 1 : 0;
192+ return fs_close(rw) ? FS_ERRORS_CORRECTED : NO_ERRORS;
193 }
194diff --git a/src/io.c b/src/io.c
195index 8c0c3b2..8bd1ae5 100644
196--- a/src/io.c
197+++ b/src/io.c
198@@ -44,6 +44,7 @@
199 #include "fsck.fat.h"
200 #include "common.h"
201 #include "io.h"
202+#include "exit_codes.h"
203
204 typedef struct _change {
205 void *data;
206@@ -60,7 +61,7 @@ void fs_open(const char *path, int rw)
207 {
208 if ((fd = open(path, rw ? O_RDWR : O_RDONLY)) < 0) {
209 perror("open");
210- exit(6);
211+ exit(OPERATIONAL_ERROR);
212 }
213 changes = last = NULL;
214 did_change = 0;
diff --git a/meta/recipes-devtools/dosfstools/dosfstools/0002-manpages-Document-fsck.fat-new-exit-codes.patch b/meta/recipes-devtools/dosfstools/dosfstools/0002-manpages-Document-fsck.fat-new-exit-codes.patch
new file mode 100644
index 0000000000..29bba7b093
--- /dev/null
+++ b/meta/recipes-devtools/dosfstools/dosfstools/0002-manpages-Document-fsck.fat-new-exit-codes.patch
@@ -0,0 +1,46 @@
1From 8d703216d2ea3247092a08adb0c37b38eb77ccc7 Mon Sep 17 00:00:00 2001
2From: Ricardo Ungerer <ungerer.ricardo@gmail.com>
3Date: Wed, 21 May 2025 07:18:15 +0100
4Subject: [PATCH 2/3] manpages: Document fsck.fat new exit codes
5
6Signed-off-by: Ricardo Ungerer <ungerer.ricardo@gmail.com>
7
8Upstream-Status: Inactive-Upstream [lastcommit: 2023, lastrelease: 2021]
9Upstream-Status: Submitted [https://github.com/dosfstools/dosfstools/pull/217]
10---
11 manpages/fsck.fat.8.in | 18 +++++++++++++-----
12 1 file changed, 13 insertions(+), 5 deletions(-)
13
14diff --git a/manpages/fsck.fat.8.in b/manpages/fsck.fat.8.in
15index 824a83d..557aa4c 100644
16--- a/manpages/fsck.fat.8.in
17+++ b/manpages/fsck.fat.8.in
18@@ -222,13 +222,21 @@ Display help message describing usage and options then exit.
19 .\" ----------------------------------------------------------------------------
20 .SH "EXIT STATUS"
21 .IP "0" 4
22-No recoverable errors have been detected.
23+No errors
24 .IP "1" 4
25-Recoverable errors have been detected or \fBfsck.fat\fP has discovered an
26-internal inconsistency.
27+Filesystem errors corrected
28 .IP "2" 4
29-Usage error.
30-\fBfsck.fat\fP did not access the filesystem.
31+System should be rebooted
32+.IP "4" 4
33+Filesystem errors left uncorrected
34+.IP "8" 4
35+Operational error
36+.IP "16" 4
37+Usage or syntax error
38+.IP "32" 4
39+Checking canceled by user request
40+.IP "128" 4
41+Shared-library error
42 .\" ----------------------------------------------------------------------------
43 .SH FILES
44 .IP "\fIfsck0000.rec\fP, \fIfsck0001.rec\fP, ..." 4
45--
462.25.1
diff --git a/meta/recipes-devtools/dosfstools/dosfstools_4.2.bb b/meta/recipes-devtools/dosfstools/dosfstools_4.2.bb
index 175fa265ef..86fb68f664 100644
--- a/meta/recipes-devtools/dosfstools/dosfstools_4.2.bb
+++ b/meta/recipes-devtools/dosfstools/dosfstools_4.2.bb
@@ -10,11 +10,12 @@ LICENSE = "GPL-3.0-only"
10LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504" 10LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
11 11
12SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/${BP}.tar.gz \ 12SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/${BP}.tar.gz \
13 " 13 file://source-date-epoch.patch \
14 file://0001-fsck.fat-Adhere-to-the-fsck-exit-codes.patch \
15 file://0002-manpages-Document-fsck.fat-new-exit-codes.patch \
16 "
14SRC_URI[sha256sum] = "64926eebf90092dca21b14259a5301b7b98e7b1943e8a201c7d726084809b527" 17SRC_URI[sha256sum] = "64926eebf90092dca21b14259a5301b7b98e7b1943e8a201c7d726084809b527"
15 18
16SRC_URI += "file://source-date-epoch.patch"
17
18inherit autotools gettext pkgconfig update-alternatives github-releases 19inherit autotools gettext pkgconfig update-alternatives github-releases
19 20
20EXTRA_OECONF = "--enable-compat-symlinks --without-iconv" 21EXTRA_OECONF = "--enable-compat-symlinks --without-iconv"
diff --git a/meta/recipes-devtools/json-c/json-c_0.18.bb b/meta/recipes-devtools/json-c/json-c_0.18.bb
index ece320d66c..c112aacf4b 100644
--- a/meta/recipes-devtools/json-c/json-c_0.18.bb
+++ b/meta/recipes-devtools/json-c/json-c_0.18.bb
@@ -19,8 +19,7 @@ UPSTREAM_CHECK_REGEX = "json-c-(?P<pver>\d+(\.\d+)+)-\d+"
19 19
20RPROVIDES:${PN} = "libjson" 20RPROVIDES:${PN} = "libjson"
21 21
22# - '-Werror' must be disabled for ICECC builds 22# Apps aren't needed/packaged and their CMakeLists.txt is incompatible with CMake 4+.
23# - Apps aren't needed/packaged and their CMakeLists.txt is incompatible with CMake 4+.
24EXTRA_OECMAKE = "-DDISABLE_WERROR=ON \ 23EXTRA_OECMAKE = "-DDISABLE_WERROR=ON \
25 -DBUILD_APPS=OFF \ 24 -DBUILD_APPS=OFF \
26" 25"
diff --git a/meta/recipes-devtools/mtools/mtools_4.0.48.bb b/meta/recipes-devtools/mtools/mtools_4.0.49.bb
index 646735f3b3..294b2f37b2 100644
--- a/meta/recipes-devtools/mtools/mtools_4.0.48.bb
+++ b/meta/recipes-devtools/mtools/mtools_4.0.49.bb
@@ -24,7 +24,7 @@ RRECOMMENDS:${PN}:libc-glibc = "\
24 glibc-gconv-ibm866 \ 24 glibc-gconv-ibm866 \
25 glibc-gconv-ibm869 \ 25 glibc-gconv-ibm869 \
26 " 26 "
27SRC_URI[sha256sum] = "03c29aac8735dd7154a989fbc29eaf2b506121ae1c3a35cd0bf2a02e94d271a9" 27SRC_URI[sha256sum] = "6fe5193583d6e7c59da75e63d7234f76c0b07caf33b103894f46f66a871ffc9f"
28 28
29SRC_URI = "${GNU_MIRROR}/mtools/mtools-${PV}.tar.bz2 \ 29SRC_URI = "${GNU_MIRROR}/mtools/mtools-${PV}.tar.bz2 \
30 file://mtools-makeinfo.patch \ 30 file://mtools-makeinfo.patch \
diff --git a/meta/recipes-devtools/ninja/ninja_1.12.1.bb b/meta/recipes-devtools/ninja/ninja_1.13.0.bb
index 5aff82edec..a5fa8f1c9e 100644
--- a/meta/recipes-devtools/ninja/ninja_1.12.1.bb
+++ b/meta/recipes-devtools/ninja/ninja_1.13.0.bb
@@ -1,14 +1,18 @@
1SUMMARY = "Ninja is a small build system with a focus on speed." 1SUMMARY = "Ninja is a small build system with a focus on speed."
2HOMEPAGE = "https://ninja-build.org/" 2HOMEPAGE = "https://ninja-build.org/"
3DESCRIPTION = "Ninja is a small build system with a focus on speed. It differs from other build systems in two major respects: it is designed to have its input files generated by a higher-level build system, and it is designed to run builds as fast as possible." 3DESCRIPTION = "Ninja is a small build system with a focus on speed. \
4It differs from other build systems in two major respects: \
5it is designed to have its input files generated by a higher-level build system, \
6and it is designed to run builds as fast as possible."
7
4LICENSE = "Apache-2.0" 8LICENSE = "Apache-2.0"
5LIC_FILES_CHKSUM = "file://COPYING;md5=a81586a64ad4e476c791cda7e2f2c52e" 9LIC_FILES_CHKSUM = "file://COPYING;md5=a81586a64ad4e476c791cda7e2f2c52e"
6 10
7DEPENDS = "re2c-native ninja-native" 11DEPENDS = "re2c-native ninja-native"
8 12
9SRCREV = "2daa09ba270b0a43e1929d29b073348aa985dfaa" 13SRCREV = "b4d51f6ed5bed09dd2b70324df0d9cb4ecad2638"
10 14
11SRC_URI = "git://github.com/ninja-build/ninja.git;branch=release;protocol=https" 15SRC_URI = "git://github.com/ninja-build/ninja.git;branch=release;protocol=https;tag=v${PV}"
12UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>.*)" 16UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>.*)"
13 17
14do_configure[noexec] = "1" 18do_configure[noexec] = "1"
diff --git a/meta/recipes-devtools/python/python3-sphinx-argparse_0.5.2.bb b/meta/recipes-devtools/python/python3-sphinx-argparse_0.5.2.bb
new file mode 100644
index 0000000000..554fb3eb51
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-sphinx-argparse_0.5.2.bb
@@ -0,0 +1,13 @@
1SUMMARY = "A sphinx extension that automatically documents argparse commands and options"
2HOMEPAGE = "https://sphinx-argparse.readthedocs.io/"
3LICENSE = "MIT"
4LIC_FILES_CHKSUM = "file://LICENCE.rst;md5=5c1cd8f13774629fee215681e66a1056"
5
6SRC_URI[sha256sum] = "e5352f8fa894b6fb6fda0498ba28a9f8d435971ef4bbc1a6c9c6414e7644f032"
7
8PYPI_PACKAGE = "sphinx_argparse"
9UPSTREAM_CHECK_PYPI_PACKAGE = "${PYPI_PACKAGE}"
10
11inherit pypi python_flit_core
12
13BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-sphinx-copybutton_0.5.2.bb b/meta/recipes-devtools/python/python3-sphinx-copybutton_0.5.2.bb
new file mode 100644
index 0000000000..0441804661
--- /dev/null
+++ b/meta/recipes-devtools/python/python3-sphinx-copybutton_0.5.2.bb
@@ -0,0 +1,10 @@
1SUMMARY = "Add a copy button to code blocks in Sphinx"
2HOMEPAGE = "https://sphinx-copybutton.readthedocs.io"
3LICENSE = "MIT"
4LIC_FILES_CHKSUM = "file://LICENSE;md5=c60e920848b6d2ecec51ea44a1a33bf0"
5
6SRC_URI[sha256sum] = "4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd"
7
8inherit setuptools3 pypi
9
10BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/python/python3-urllib3_2.4.0.bb b/meta/recipes-devtools/python/python3-urllib3_2.5.0.bb
index 7a4bffc05e..a4f3995730 100644
--- a/meta/recipes-devtools/python/python3-urllib3_2.4.0.bb
+++ b/meta/recipes-devtools/python/python3-urllib3_2.5.0.bb
@@ -3,7 +3,7 @@ HOMEPAGE = "https://github.com/urllib3/urllib3"
3LICENSE = "MIT" 3LICENSE = "MIT"
4LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=52d273a3054ced561275d4d15260ecda" 4LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=52d273a3054ced561275d4d15260ecda"
5 5
6SRC_URI[sha256sum] = "414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466" 6SRC_URI[sha256sum] = "3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"
7 7
8inherit pypi python_hatchling 8inherit pypi python_hatchling
9 9
diff --git a/meta/recipes-devtools/python/python3-wheel_0.45.1.bb b/meta/recipes-devtools/python/python3-wheel_0.46.1.bb
index 8274e83747..058af2f0e7 100644
--- a/meta/recipes-devtools/python/python3-wheel_0.45.1.bb
+++ b/meta/recipes-devtools/python/python3-wheel_0.46.1.bb
@@ -4,9 +4,14 @@ SECTION = "devel/python"
4LICENSE = "MIT" 4LICENSE = "MIT"
5LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=7ffb0db04527cfe380e4f2726bd05ebf" 5LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=7ffb0db04527cfe380e4f2726bd05ebf"
6 6
7SRC_URI[sha256sum] = "661e1abd9198507b1409a20c02106d9670b2576e916d58f520316666abca6729" 7SRC_URI[sha256sum] = "fd477efb5da0f7df1d3c76c73c14394002c844451bd63229d8570f376f5e6a38"
8 8
9inherit python_flit_core pypi 9inherit python_flit_core pypi ptest-python-pytest
10
11RDEPENDS:${PN} += "python3-packaging"
12
13# One test is skipped but requires the "full" python3-flit, not just python3-flit-core
14RDEPENDS:${PN}-ptest += "python3-setuptools"
10 15
11BBCLASSEXTEND = "native nativesdk" 16BBCLASSEXTEND = "native nativesdk"
12 17
diff --git a/meta/recipes-devtools/tcf-agent/tcf-agent_1.8.0.bb b/meta/recipes-devtools/tcf-agent/tcf-agent_1.8.0.bb
index 1639ae84e9..f008c0c6de 100644
--- a/meta/recipes-devtools/tcf-agent/tcf-agent_1.8.0.bb
+++ b/meta/recipes-devtools/tcf-agent/tcf-agent_1.8.0.bb
@@ -49,6 +49,12 @@ CFLAGS:append:riscv64 = " ${LCL_STOP_SERVICES}"
49CFLAGS:append:riscv32 = " ${LCL_STOP_SERVICES}" 49CFLAGS:append:riscv32 = " ${LCL_STOP_SERVICES}"
50CFLAGS:append:loongarch64 = " ${LCL_STOP_SERVICES}" 50CFLAGS:append:loongarch64 = " ${LCL_STOP_SERVICES}"
51 51
52# This works with gcc-ranlib wrapper only because it exists without error if nothing
53# is passed as argument but binutils ranlib and llvm ranlib do not and expect an input
54# passing $@ ensures that Makefile default target which is the archive name in tcf makefiles
55# is passed to RANLIB, ensures that whichever ranlib is used, the behavior is identical
56RANLIB:append = " $@"
57
52do_install() { 58do_install() {
53 oe_runmake install INSTALLROOT=${D} 59 oe_runmake install INSTALLROOT=${D}
54 install -d ${D}${sysconfdir}/init.d/ 60 install -d ${D}${sysconfdir}/init.d/
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py
index edb6467103..ef0ba974a9 100644
--- a/scripts/lib/recipetool/create.py
+++ b/scripts/lib/recipetool/create.py
@@ -18,6 +18,7 @@ from urllib.parse import urlparse, urldefrag, urlsplit
18import hashlib 18import hashlib
19import bb.fetch2 19import bb.fetch2
20logger = logging.getLogger('recipetool') 20logger = logging.getLogger('recipetool')
21from oe.license import tidy_licenses
21from oe.license_finder import find_licenses 22from oe.license_finder import find_licenses
22 23
23tinfoil = None 24tinfoil = None
@@ -764,6 +765,7 @@ def create_recipe(args):
764 extrafiles = extravalues.pop('extrafiles', {}) 765 extrafiles = extravalues.pop('extrafiles', {})
765 extra_pn = extravalues.pop('PN', None) 766 extra_pn = extravalues.pop('PN', None)
766 extra_pv = extravalues.pop('PV', None) 767 extra_pv = extravalues.pop('PV', None)
768 run_tasks = extravalues.pop('run_tasks', "").split()
767 769
768 if extra_pv and not realpv: 770 if extra_pv and not realpv:
769 realpv = extra_pv 771 realpv = extra_pv
@@ -824,7 +826,8 @@ def create_recipe(args):
824 extraoutdir = os.path.join(os.path.dirname(outfile), pn) 826 extraoutdir = os.path.join(os.path.dirname(outfile), pn)
825 bb.utils.mkdirhier(extraoutdir) 827 bb.utils.mkdirhier(extraoutdir)
826 for destfn, extrafile in extrafiles.items(): 828 for destfn, extrafile in extrafiles.items():
827 shutil.move(extrafile, os.path.join(extraoutdir, destfn)) 829 fn = destfn.format(pn=pn, pv=realpv)
830 shutil.move(extrafile, os.path.join(extraoutdir, fn))
828 831
829 lines = lines_before 832 lines = lines_before
830 lines_before = [] 833 lines_before = []
@@ -917,6 +920,10 @@ def create_recipe(args):
917 log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool) 920 log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool)
918 tinfoil.modified_files() 921 tinfoil.modified_files()
919 922
923 for task in run_tasks:
924 logger.info("Running task %s" % task)
925 tinfoil.build_file_sync(outfile, task)
926
920 if tempsrc: 927 if tempsrc:
921 if args.keep_temp: 928 if args.keep_temp:
922 logger.info('Preserving temporary directory %s' % tempsrc) 929 logger.info('Preserving temporary directory %s' % tempsrc)
@@ -944,16 +951,6 @@ def fixup_license(value):
944 return '(' + value + ')' 951 return '(' + value + ')'
945 return value 952 return value
946 953
947def tidy_licenses(value):
948 """Flat, split and sort licenses"""
949 from oe.license import flattened_licenses
950 def _choose(a, b):
951 str_a, str_b = sorted((" & ".join(a), " & ".join(b)), key=str.casefold)
952 return ["(%s | %s)" % (str_a, str_b)]
953 if not isinstance(value, str):
954 value = " & ".join(value)
955 return sorted(list(set(flattened_licenses(value, _choose))), key=str.casefold)
956
957def handle_license_vars(srctree, lines_before, handled, extravalues, d): 954def handle_license_vars(srctree, lines_before, handled, extravalues, d):
958 lichandled = [x for x in handled if x[0] == 'license'] 955 lichandled = [x for x in handled if x[0] == 'license']
959 if lichandled: 956 if lichandled:
diff --git a/scripts/lib/recipetool/create_go.py b/scripts/lib/recipetool/create_go.py
index 5cc53931f0..4b1fa39d13 100644
--- a/scripts/lib/recipetool/create_go.py
+++ b/scripts/lib/recipetool/create_go.py
@@ -10,13 +10,7 @@
10# 10#
11 11
12 12
13from collections import namedtuple
14from enum import Enum
15from html.parser import HTMLParser
16from recipetool.create import RecipeHandler, handle_license_vars 13from recipetool.create import RecipeHandler, handle_license_vars
17from recipetool.create import find_licenses, tidy_licenses, fixup_license
18from recipetool.create import determine_from_url
19from urllib.error import URLError, HTTPError
20 14
21import bb.utils 15import bb.utils
22import json 16import json
@@ -25,33 +19,20 @@ import os
25import re 19import re
26import subprocess 20import subprocess
27import sys 21import sys
28import shutil
29import tempfile 22import tempfile
30import urllib.parse
31import urllib.request
32 23
33 24
34GoImport = namedtuple('GoImport', 'root vcs url suffix')
35logger = logging.getLogger('recipetool') 25logger = logging.getLogger('recipetool')
36CodeRepo = namedtuple(
37 'CodeRepo', 'path codeRoot codeDir pathMajor pathPrefix pseudoMajor')
38 26
39tinfoil = None 27tinfoil = None
40 28
41# Regular expression to parse pseudo semantic version
42# see https://go.dev/ref/mod#pseudo-versions
43re_pseudo_semver = re.compile(
44 r"^v[0-9]+\.(0\.0-|\d+\.\d+-([^+]*\.)?0\.)(?P<utc>\d{14})-(?P<commithash>[A-Za-z0-9]+)(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$")
45# Regular expression to parse semantic version
46re_semver = re.compile(
47 r"^v(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$")
48
49 29
50def tinfoil_init(instance): 30def tinfoil_init(instance):
51 global tinfoil 31 global tinfoil
52 tinfoil = instance 32 tinfoil = instance
53 33
54 34
35
55class GoRecipeHandler(RecipeHandler): 36class GoRecipeHandler(RecipeHandler):
56 """Class to handle the go recipe creation""" 37 """Class to handle the go recipe creation"""
57 38
@@ -83,578 +64,6 @@ class GoRecipeHandler(RecipeHandler):
83 64
84 return bindir 65 return bindir
85 66
86 def __resolve_repository_static(self, modulepath):
87 """Resolve the repository in a static manner
88
89 The method is based on the go implementation of
90 `repoRootFromVCSPaths` in
91 https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go
92 """
93
94 url = urllib.parse.urlparse("https://" + modulepath)
95 req = urllib.request.Request(url.geturl())
96
97 try:
98 resp = urllib.request.urlopen(req)
99 # Some modulepath are just redirects to github (or some other vcs
100 # hoster). Therefore, we check if this modulepath redirects to
101 # somewhere else
102 if resp.geturl() != url.geturl():
103 bb.debug(1, "%s is redirectred to %s" %
104 (url.geturl(), resp.geturl()))
105 url = urllib.parse.urlparse(resp.geturl())
106 modulepath = url.netloc + url.path
107
108 except URLError as url_err:
109 # This is probably because the module path
110 # contains the subdir and major path. Thus,
111 # we ignore this error for now
112 logger.debug(
113 1, "Failed to fetch page from [%s]: %s" % (url, str(url_err)))
114
115 host, _, _ = modulepath.partition('/')
116
117 class vcs(Enum):
118 pathprefix = "pathprefix"
119 regexp = "regexp"
120 type = "type"
121 repo = "repo"
122 check = "check"
123 schemelessRepo = "schemelessRepo"
124
125 # GitHub
126 vcsGitHub = {}
127 vcsGitHub[vcs.pathprefix] = "github.com"
128 vcsGitHub[vcs.regexp] = re.compile(
129 r'^(?P<root>github\.com/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
130 vcsGitHub[vcs.type] = "git"
131 vcsGitHub[vcs.repo] = "https://\\g<root>"
132
133 # Bitbucket
134 vcsBitbucket = {}
135 vcsBitbucket[vcs.pathprefix] = "bitbucket.org"
136 vcsBitbucket[vcs.regexp] = re.compile(
137 r'^(?P<root>bitbucket\.org/(?P<bitname>[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+))(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
138 vcsBitbucket[vcs.type] = "git"
139 vcsBitbucket[vcs.repo] = "https://\\g<root>"
140
141 # IBM DevOps Services (JazzHub)
142 vcsIBMDevOps = {}
143 vcsIBMDevOps[vcs.pathprefix] = "hub.jazz.net/git"
144 vcsIBMDevOps[vcs.regexp] = re.compile(
145 r'^(?P<root>hub\.jazz\.net/git/[a-z0-9]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
146 vcsIBMDevOps[vcs.type] = "git"
147 vcsIBMDevOps[vcs.repo] = "https://\\g<root>"
148
149 # Git at Apache
150 vcsApacheGit = {}
151 vcsApacheGit[vcs.pathprefix] = "git.apache.org"
152 vcsApacheGit[vcs.regexp] = re.compile(
153 r'^(?P<root>git\.apache\.org/[a-z0-9_.\-]+\.git)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
154 vcsApacheGit[vcs.type] = "git"
155 vcsApacheGit[vcs.repo] = "https://\\g<root>"
156
157 # Git at OpenStack
158 vcsOpenStackGit = {}
159 vcsOpenStackGit[vcs.pathprefix] = "git.openstack.org"
160 vcsOpenStackGit[vcs.regexp] = re.compile(
161 r'^(?P<root>git\.openstack\.org/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(\.git)?(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
162 vcsOpenStackGit[vcs.type] = "git"
163 vcsOpenStackGit[vcs.repo] = "https://\\g<root>"
164
165 # chiselapp.com for fossil
166 vcsChiselapp = {}
167 vcsChiselapp[vcs.pathprefix] = "chiselapp.com"
168 vcsChiselapp[vcs.regexp] = re.compile(
169 r'^(?P<root>chiselapp\.com/user/[A-Za-z0-9]+/repository/[A-Za-z0-9_.\-]+)$')
170 vcsChiselapp[vcs.type] = "fossil"
171 vcsChiselapp[vcs.repo] = "https://\\g<root>"
172
173 # General syntax for any server.
174 # Must be last.
175 vcsGeneralServer = {}
176 vcsGeneralServer[vcs.regexp] = re.compile(
177 "(?P<root>(?P<repo>([a-z0-9.\\-]+\\.)+[a-z0-9.\\-]+(:[0-9]+)?(/~?[A-Za-z0-9_.\\-]+)+?)\\.(?P<vcs>bzr|fossil|git|hg|svn))(/~?(?P<suffix>[A-Za-z0-9_.\\-]+))*$")
178 vcsGeneralServer[vcs.schemelessRepo] = True
179
180 vcsPaths = [vcsGitHub, vcsBitbucket, vcsIBMDevOps,
181 vcsApacheGit, vcsOpenStackGit, vcsChiselapp,
182 vcsGeneralServer]
183
184 if modulepath.startswith("example.net") or modulepath == "rsc.io":
185 logger.warning("Suspicious module path %s" % modulepath)
186 return None
187 if modulepath.startswith("http:") or modulepath.startswith("https:"):
188 logger.warning("Import path should not start with %s %s" %
189 ("http", "https"))
190 return None
191
192 rootpath = None
193 vcstype = None
194 repourl = None
195 suffix = None
196
197 for srv in vcsPaths:
198 m = srv[vcs.regexp].match(modulepath)
199 if vcs.pathprefix in srv:
200 if host == srv[vcs.pathprefix]:
201 rootpath = m.group('root')
202 vcstype = srv[vcs.type]
203 repourl = m.expand(srv[vcs.repo])
204 suffix = m.group('suffix')
205 break
206 elif m and srv[vcs.schemelessRepo]:
207 rootpath = m.group('root')
208 vcstype = m[vcs.type]
209 repourl = m[vcs.repo]
210 suffix = m.group('suffix')
211 break
212
213 return GoImport(rootpath, vcstype, repourl, suffix)
214
215 def __resolve_repository_dynamic(self, modulepath):
216 """Resolve the repository root in a dynamic manner.
217
218 The method is based on the go implementation of
219 `repoRootForImportDynamic` in
220 https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go
221 """
222 url = urllib.parse.urlparse("https://" + modulepath)
223
224 class GoImportHTMLParser(HTMLParser):
225
226 def __init__(self):
227 super().__init__()
228 self.__srv = {}
229
230 def handle_starttag(self, tag, attrs):
231 if tag == 'meta' and list(
232 filter(lambda a: (a[0] == 'name' and a[1] == 'go-import'), attrs)):
233 content = list(
234 filter(lambda a: (a[0] == 'content'), attrs))
235 if content:
236 srv = content[0][1].split()
237 self.__srv[srv[0]] = srv
238
239 def go_import(self, modulepath):
240 if modulepath in self.__srv:
241 srv = self.__srv[modulepath]
242 return GoImport(srv[0], srv[1], srv[2], None)
243 return None
244
245 url = url.geturl() + "?go-get=1"
246 req = urllib.request.Request(url)
247
248 try:
249 body = urllib.request.urlopen(req).read()
250 except HTTPError as http_err:
251 logger.warning(
252 "Unclean status when fetching page from [%s]: %s", url, str(http_err))
253 body = http_err.fp.read()
254 except URLError as url_err:
255 logger.warning(
256 "Failed to fetch page from [%s]: %s", url, str(url_err))
257 return None
258
259 parser = GoImportHTMLParser()
260 parser.feed(body.decode('utf-8'))
261 parser.close()
262
263 return parser.go_import(modulepath)
264
265 def __resolve_from_golang_proxy(self, modulepath, version):
266 """
267 Resolves repository data from golang proxy
268 """
269 url = urllib.parse.urlparse("https://proxy.golang.org/"
270 + modulepath
271 + "/@v/"
272 + version
273 + ".info")
274
275 # Transform url to lower case, golang proxy doesn't like mixed case
276 req = urllib.request.Request(url.geturl().lower())
277
278 try:
279 resp = urllib.request.urlopen(req)
280 except URLError as url_err:
281 logger.warning(
282 "Failed to fetch page from [%s]: %s", url, str(url_err))
283 return None
284
285 golang_proxy_res = resp.read().decode('utf-8')
286 modinfo = json.loads(golang_proxy_res)
287
288 if modinfo and 'Origin' in modinfo:
289 origin = modinfo['Origin']
290 _root_url = urllib.parse.urlparse(origin['URL'])
291
292 # We normalize the repo URL since we don't want the scheme in it
293 _subdir = origin['Subdir'] if 'Subdir' in origin else None
294 _root, _, _ = self.__split_path_version(modulepath)
295 if _subdir:
296 _root = _root[:-len(_subdir)].strip('/')
297
298 _commit = origin['Hash']
299 _vcs = origin['VCS']
300 return (GoImport(_root, _vcs, _root_url.geturl(), None), _commit)
301
302 return None
303
304 def __resolve_repository(self, modulepath):
305 """
306 Resolves src uri from go module-path
307 """
308 repodata = self.__resolve_repository_static(modulepath)
309 if not repodata or not repodata.url:
310 repodata = self.__resolve_repository_dynamic(modulepath)
311 if not repodata or not repodata.url:
312 logger.error(
313 "Could not resolve repository for module path '%s'" % modulepath)
314 # There is no way to recover from this
315 sys.exit(14)
316 if repodata:
317 logger.debug(1, "Resolved download path for import '%s' => %s" % (
318 modulepath, repodata.url))
319 return repodata
320
321 def __split_path_version(self, path):
322 i = len(path)
323 dot = False
324 for j in range(i, 0, -1):
325 if path[j - 1] < '0' or path[j - 1] > '9':
326 break
327 if path[j - 1] == '.':
328 dot = True
329 break
330 i = j - 1
331
332 if i <= 1 or i == len(
333 path) or path[i - 1] != 'v' or path[i - 2] != '/':
334 return path, "", True
335
336 prefix, pathMajor = path[:i - 2], path[i - 2:]
337 if dot or len(
338 pathMajor) <= 2 or pathMajor[2] == '0' or pathMajor == "/v1":
339 return path, "", False
340
341 return prefix, pathMajor, True
342
343 def __get_path_major(self, pathMajor):
344 if not pathMajor:
345 return ""
346
347 if pathMajor[0] != '/' and pathMajor[0] != '.':
348 logger.error(
349 "pathMajor suffix %s passed to PathMajorPrefix lacks separator", pathMajor)
350
351 if pathMajor.startswith(".v") and pathMajor.endswith("-unstable"):
352 pathMajor = pathMajor[:len("-unstable") - 2]
353
354 return pathMajor[1:]
355
356 def __build_coderepo(self, repo, path):
357 codedir = ""
358 pathprefix, pathMajor, _ = self.__split_path_version(path)
359 if repo.root == path:
360 pathprefix = path
361 elif path.startswith(repo.root):
362 codedir = pathprefix[len(repo.root):].strip('/')
363
364 pseudoMajor = self.__get_path_major(pathMajor)
365
366 logger.debug("root='%s', codedir='%s', prefix='%s', pathMajor='%s', pseudoMajor='%s'",
367 repo.root, codedir, pathprefix, pathMajor, pseudoMajor)
368
369 return CodeRepo(path, repo.root, codedir,
370 pathMajor, pathprefix, pseudoMajor)
371
372 def __resolve_version(self, repo, path, version):
373 hash = None
374 coderoot = self.__build_coderepo(repo, path)
375
376 def vcs_fetch_all():
377 tmpdir = tempfile.mkdtemp()
378 clone_cmd = "%s clone --bare %s %s" % ('git', repo.url, tmpdir)
379 bb.process.run(clone_cmd)
380 log_cmd = "git log --all --pretty='%H %d' --decorate=short"
381 output, _ = bb.process.run(
382 log_cmd, shell=True, stderr=subprocess.PIPE, cwd=tmpdir)
383 bb.utils.prunedir(tmpdir)
384 return output.strip().split('\n')
385
386 def vcs_fetch_remote(tag):
387 # add * to grab ^{}
388 refs = {}
389 ls_remote_cmd = "git ls-remote -q --tags {} {}*".format(
390 repo.url, tag)
391 output, _ = bb.process.run(ls_remote_cmd)
392 output = output.strip().split('\n')
393 for line in output:
394 f = line.split(maxsplit=1)
395 if len(f) != 2:
396 continue
397
398 for prefix in ["HEAD", "refs/heads/", "refs/tags/"]:
399 if f[1].startswith(prefix):
400 refs[f[1][len(prefix):]] = f[0]
401
402 for key, hash in refs.items():
403 if key.endswith(r"^{}"):
404 refs[key.strip(r"^{}")] = hash
405
406 return refs[tag]
407
408 m_pseudo_semver = re_pseudo_semver.match(version)
409
410 if m_pseudo_semver:
411 remote_refs = vcs_fetch_all()
412 short_commit = m_pseudo_semver.group('commithash')
413 for l in remote_refs:
414 r = l.split(maxsplit=1)
415 sha1 = r[0] if len(r) else None
416 if not sha1:
417 logger.error(
418 "Ups: could not resolve abbref commit for %s" % short_commit)
419
420 elif sha1.startswith(short_commit):
421 hash = sha1
422 break
423 else:
424 m_semver = re_semver.match(version)
425 if m_semver:
426
427 def get_sha1_remote(re):
428 rsha1 = None
429 for line in remote_refs:
430 # Split lines of the following format:
431 # 22e90d9b964610628c10f673ca5f85b8c2a2ca9a (tag: sometag)
432 lineparts = line.split(maxsplit=1)
433 sha1 = lineparts[0] if len(lineparts) else None
434 refstring = lineparts[1] if len(
435 lineparts) == 2 else None
436 if refstring:
437 # Normalize tag string and split in case of multiple
438 # regs e.g. (tag: speech/v1.10.0, tag: orchestration/v1.5.0 ...)
439 refs = refstring.strip('(), ').split(',')
440 for ref in refs:
441 if re.match(ref.strip()):
442 rsha1 = sha1
443 return rsha1
444
445 semver = "v" + m_semver.group('major') + "."\
446 + m_semver.group('minor') + "."\
447 + m_semver.group('patch') \
448 + (("-" + m_semver.group('prerelease'))
449 if m_semver.group('prerelease') else "")
450
451 tag = os.path.join(
452 coderoot.codeDir, semver) if coderoot.codeDir else semver
453
454 # probe tag using 'ls-remote', which is faster than fetching
455 # complete history
456 hash = vcs_fetch_remote(tag)
457 if not hash:
458 # backup: fetch complete history
459 remote_refs = vcs_fetch_all()
460 hash = get_sha1_remote(
461 re.compile(fr"(tag:|HEAD ->) ({tag})"))
462
463 logger.debug(
464 "Resolving commit for tag '%s' -> '%s'", tag, hash)
465 return hash
466
467 def __generate_srcuri_inline_fcn(self, path, version, replaces=None):
468 """Generate SRC_URI functions for go imports"""
469
470 logger.info("Resolving repository for module %s", path)
471 # First try to resolve repo and commit from golang proxy
472 # Most info is already there and we don't have to go through the
473 # repository or even perform the version resolve magic
474 golang_proxy_info = self.__resolve_from_golang_proxy(path, version)
475 if golang_proxy_info:
476 repo = golang_proxy_info[0]
477 commit = golang_proxy_info[1]
478 else:
479 # Fallback
480 # Resolve repository by 'hand'
481 repo = self.__resolve_repository(path)
482 commit = self.__resolve_version(repo, path, version)
483
484 url = urllib.parse.urlparse(repo.url)
485 repo_url = url.netloc + url.path
486
487 coderoot = self.__build_coderepo(repo, path)
488
489 inline_fcn = "${@go_src_uri("
490 inline_fcn += f"'{repo_url}','{version}'"
491 if repo_url != path:
492 inline_fcn += f",path='{path}'"
493 if coderoot.codeDir:
494 inline_fcn += f",subdir='{coderoot.codeDir}'"
495 if repo.vcs != 'git':
496 inline_fcn += f",vcs='{repo.vcs}'"
497 if replaces:
498 inline_fcn += f",replaces='{replaces}'"
499 if coderoot.pathMajor:
500 inline_fcn += f",pathmajor='{coderoot.pathMajor}'"
501 inline_fcn += ")}"
502
503 return inline_fcn, commit
504
505 def __go_handle_dependencies(self, go_mod, srctree, localfilesdir, extravalues, d):
506
507 import re
508 src_uris = []
509 src_revs = []
510
511 def generate_src_rev(path, version, commithash):
512 src_rev = f"# {path}@{version} => {commithash}\n"
513 # Ups...maybe someone manipulated the source repository and the
514 # version or commit could not be resolved. This is a sign of
515 # a) the supply chain was manipulated (bad)
516 # b) the implementation for the version resolving didn't work
517 # anymore (less bad)
518 if not commithash:
519 src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
520 src_rev += f"#!!! Could not resolve version !!!\n"
521 src_rev += f"#!!! Possible supply chain attack !!!\n"
522 src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
523 src_rev += f"SRCREV_{path.replace('/', '.')} = \"{commithash}\""
524
525 return src_rev
526
527 # we first go over replacement list, because we are essentialy
528 # interested only in the replaced path
529 if go_mod['Replace']:
530 for replacement in go_mod['Replace']:
531 oldpath = replacement['Old']['Path']
532 path = replacement['New']['Path']
533 version = ''
534 if 'Version' in replacement['New']:
535 version = replacement['New']['Version']
536
537 if os.path.exists(os.path.join(srctree, path)):
538 # the module refers to the local path, remove it from requirement list
539 # because it's a local module
540 go_mod['Require'][:] = [v for v in go_mod['Require'] if v.get('Path') != oldpath]
541 else:
542 # Replace the path and the version, so we don't iterate replacement list anymore
543 for require in go_mod['Require']:
544 if require['Path'] == oldpath:
545 require.update({'Path': path, 'Version': version})
546 break
547
548 for require in go_mod['Require']:
549 path = require['Path']
550 version = require['Version']
551
552 inline_fcn, commithash = self.__generate_srcuri_inline_fcn(
553 path, version)
554 src_uris.append(inline_fcn)
555 src_revs.append(generate_src_rev(path, version, commithash))
556
557 # strip version part from module URL /vXX
558 baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
559 pn, _ = determine_from_url(baseurl)
560 go_mods_basename = "%s-modules.inc" % pn
561
562 go_mods_filename = os.path.join(localfilesdir, go_mods_basename)
563 with open(go_mods_filename, "w") as f:
564 # We introduce this indirection to make the tests a little easier
565 f.write("SRC_URI += \"${GO_DEPENDENCIES_SRC_URI}\"\n")
566 f.write("GO_DEPENDENCIES_SRC_URI = \"\\\n")
567 for uri in src_uris:
568 f.write(" " + uri + " \\\n")
569 f.write("\"\n\n")
570 for rev in src_revs:
571 f.write(rev + "\n")
572
573 extravalues['extrafiles'][go_mods_basename] = go_mods_filename
574
575 def __go_run_cmd(self, cmd, cwd, d):
576 return bb.process.run(cmd, env=dict(os.environ, PATH=d.getVar('PATH')),
577 shell=True, cwd=cwd)
578
579 def __go_native_version(self, d):
580 stdout, _ = self.__go_run_cmd("go version", None, d)
581 m = re.match(r".*\sgo((\d+).(\d+).(\d+))\s([\w\/]*)", stdout)
582 major = int(m.group(2))
583 minor = int(m.group(3))
584 patch = int(m.group(4))
585
586 return major, minor, patch
587
588 def __go_mod_patch(self, srctree, localfilesdir, extravalues, d):
589
590 patchfilename = "go.mod.patch"
591 go_native_version_major, go_native_version_minor, _ = self.__go_native_version(
592 d)
593 self.__go_run_cmd("go mod tidy -go=%d.%d" %
594 (go_native_version_major, go_native_version_minor), srctree, d)
595 stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d)
596
597 # Create patch in order to upgrade go version
598 self.__go_run_cmd("git diff go.mod > %s" % (patchfilename), srctree, d)
599 # Restore original state
600 self.__go_run_cmd("git checkout HEAD go.mod go.sum", srctree, d)
601
602 go_mod = json.loads(stdout)
603 tmpfile = os.path.join(localfilesdir, patchfilename)
604 shutil.move(os.path.join(srctree, patchfilename), tmpfile)
605
606 extravalues['extrafiles'][patchfilename] = tmpfile
607
608 return go_mod, patchfilename
609
610 def __go_mod_vendor(self, go_mod, srctree, localfilesdir, extravalues, d):
611 # Perform vendoring to retrieve the correct modules.txt
612 tmp_vendor_dir = tempfile.mkdtemp()
613
614 # -v causes to go to print modules.txt to stderr
615 _, stderr = self.__go_run_cmd(
616 "go mod vendor -v -o %s" % (tmp_vendor_dir), srctree, d)
617
618 modules_txt_basename = "modules.txt"
619 modules_txt_filename = os.path.join(localfilesdir, modules_txt_basename)
620 with open(modules_txt_filename, "w") as f:
621 f.write(stderr)
622
623 extravalues['extrafiles'][modules_txt_basename] = modules_txt_filename
624
625 licenses = []
626 lic_files_chksum = []
627 licvalues = find_licenses(tmp_vendor_dir, d)
628 shutil.rmtree(tmp_vendor_dir)
629
630 if licvalues:
631 for licvalue in licvalues:
632 license = licvalue[0]
633 lics = tidy_licenses(fixup_license(license))
634 lics = [lic for lic in lics if lic not in licenses]
635 if len(lics):
636 licenses.extend(lics)
637 lic_files_chksum.append(
638 'file://src/${GO_IMPORT}/vendor/%s;md5=%s' % (licvalue[1], licvalue[2]))
639
640 # strip version part from module URL /vXX
641 baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
642 pn, _ = determine_from_url(baseurl)
643 licenses_basename = "%s-licenses.inc" % pn
644
645 licenses_filename = os.path.join(localfilesdir, licenses_basename)
646 with open(licenses_filename, "w") as f:
647 f.write("GO_MOD_LICENSES = \"%s\"\n\n" %
648 ' & '.join(sorted(licenses, key=str.casefold)))
649 # We introduce this indirection to make the tests a little easier
650 f.write("LIC_FILES_CHKSUM += \"${VENDORED_LIC_FILES_CHKSUM}\"\n")
651 f.write("VENDORED_LIC_FILES_CHKSUM = \"\\\n")
652 for lic in lic_files_chksum:
653 f.write(" " + lic + " \\\n")
654 f.write("\"\n")
655
656 extravalues['extrafiles'][licenses_basename] = licenses_filename
657
658 def process(self, srctree, classes, lines_before, 67 def process(self, srctree, classes, lines_before,
659 lines_after, handled, extravalues): 68 lines_after, handled, extravalues):
660 69
@@ -665,63 +74,52 @@ class GoRecipeHandler(RecipeHandler):
665 if not files: 74 if not files:
666 return False 75 return False
667 76
668 d = bb.data.createCopy(tinfoil.config_data)
669 go_bindir = self.__ensure_go() 77 go_bindir = self.__ensure_go()
670 if not go_bindir: 78 if not go_bindir:
671 sys.exit(14) 79 sys.exit(14)
672 80
673 d.prependVar('PATH', '%s:' % go_bindir)
674 handled.append('buildsystem') 81 handled.append('buildsystem')
675 classes.append("go-vendor") 82 classes.append("go-mod")
676 83
677 stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d) 84 # Use go-mod-update-modules to set the full SRC_URI and LICENSE
85 classes.append("go-mod-update-modules")
86 extravalues["run_tasks"] = "update_modules"
678 87
679 go_mod = json.loads(stdout) 88 with tempfile.TemporaryDirectory(prefix="go-mod-") as tmp_mod_dir:
680 go_import = go_mod['Module']['Path'] 89 env = dict(os.environ)
681 go_version_match = re.match("([0-9]+).([0-9]+)", go_mod['Go']) 90 env["PATH"] += f":{go_bindir}"
682 go_version_major = int(go_version_match.group(1)) 91 env['GOMODCACHE'] = tmp_mod_dir
683 go_version_minor = int(go_version_match.group(2))
684 src_uris = []
685 92
686 localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-') 93 stdout = subprocess.check_output(["go", "mod", "edit", "-json"], cwd=srctree, env=env, text=True)
687 extravalues.setdefault('extrafiles', {}) 94 go_mod = json.loads(stdout)
95 go_import = re.sub(r'/v([0-9]+)$', '', go_mod['Module']['Path'])
688 96
689 # Use an explicit name determined from the module name because it 97 localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-')
690 # might differ from the actual URL for replaced modules 98 extravalues.setdefault('extrafiles', {})
691 # strip version part from module URL /vXX
692 baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
693 pn, _ = determine_from_url(baseurl)
694 99
695 # go.mod files with version < 1.17 may not include all indirect 100 # Write the stub ${BPN}-licenses.inc and ${BPN}-go-mods.inc files
696 # dependencies. Thus, we have to upgrade the go version. 101 basename = "{pn}-licenses.inc"
697 if go_version_major == 1 and go_version_minor < 17: 102 filename = os.path.join(localfilesdir, basename)
698 logger.warning( 103 with open(filename, "w") as f:
699 "go.mod files generated by Go < 1.17 might have incomplete indirect dependencies.") 104 f.write("# FROM RECIPETOOL\n")
700 go_mod, patchfilename = self.__go_mod_patch(srctree, localfilesdir, 105 extravalues['extrafiles'][f"../{basename}"] = filename
701 extravalues, d)
702 src_uris.append(
703 "file://%s;patchdir=src/${GO_IMPORT}" % (patchfilename))
704 106
705 # Check whether the module is vendored. If so, we have nothing to do. 107 basename = "{pn}-go-mods.inc"
706 # Otherwise we gather all dependencies and add them to the recipe 108 filename = os.path.join(localfilesdir, basename)
707 if not os.path.exists(os.path.join(srctree, "vendor")): 109 with open(filename, "w") as f:
110 f.write("# FROM RECIPETOOL\n")
111 extravalues['extrafiles'][f"../{basename}"] = filename
708 112
709 # Write additional $BPN-modules.inc file 113 # Do generic license handling
710 self.__go_mod_vendor(go_mod, srctree, localfilesdir, extravalues, d) 114 d = bb.data.createCopy(tinfoil.config_data)
711 lines_before.append("LICENSE += \" & ${GO_MOD_LICENSES}\"") 115 handle_license_vars(srctree, lines_before, handled, extravalues, d)
712 lines_before.append("require %s-licenses.inc" % (pn)) 116 self.__rewrite_lic_vars(lines_before)
713 117
714 self.__rewrite_src_uri(lines_before, ["file://modules.txt"]) 118 self.__rewrite_src_uri(lines_before)
715 119
716 self.__go_handle_dependencies(go_mod, srctree, localfilesdir, extravalues, d) 120 lines_before.append('require ${BPN}-licenses.inc')
717 lines_before.append("require %s-modules.inc" % (pn)) 121 lines_before.append('require ${BPN}-go-mods.inc')
718 122 lines_before.append(f'GO_IMPORT = "{go_import}"')
719 # Do generic license handling
720 handle_license_vars(srctree, lines_before, handled, extravalues, d)
721 self.__rewrite_lic_uri(lines_before)
722
723 lines_before.append("GO_IMPORT = \"{}\"".format(baseurl))
724 lines_before.append("SRCREV_FORMAT = \"${BPN}\"")
725 123
726 def __update_lines_before(self, updated, newlines, lines_before): 124 def __update_lines_before(self, updated, newlines, lines_before):
727 if updated: 125 if updated:
@@ -733,9 +131,9 @@ class GoRecipeHandler(RecipeHandler):
733 lines_before.append(line) 131 lines_before.append(line)
734 return updated 132 return updated
735 133
736 def __rewrite_lic_uri(self, lines_before): 134 def __rewrite_lic_vars(self, lines_before):
737
738 def varfunc(varname, origvalue, op, newlines): 135 def varfunc(varname, origvalue, op, newlines):
136 import urllib.parse
739 if varname == 'LIC_FILES_CHKSUM': 137 if varname == 'LIC_FILES_CHKSUM':
740 new_licenses = [] 138 new_licenses = []
741 licenses = origvalue.split('\\') 139 licenses = origvalue.split('\\')
@@ -760,12 +158,11 @@ class GoRecipeHandler(RecipeHandler):
760 lines_before, ['LIC_FILES_CHKSUM'], varfunc) 158 lines_before, ['LIC_FILES_CHKSUM'], varfunc)
761 return self.__update_lines_before(updated, newlines, lines_before) 159 return self.__update_lines_before(updated, newlines, lines_before)
762 160
763 def __rewrite_src_uri(self, lines_before, additional_uris = []): 161 def __rewrite_src_uri(self, lines_before):
764 162
765 def varfunc(varname, origvalue, op, newlines): 163 def varfunc(varname, origvalue, op, newlines):
766 if varname == 'SRC_URI': 164 if varname == 'SRC_URI':
767 src_uri = ["git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https"] 165 src_uri = ['git://${GO_IMPORT};protocol=https;nobranch=1;destsuffix=${GO_SRCURI_DESTSUFFIX}']
768 src_uri.extend(additional_uris)
769 return src_uri, None, -1, True 166 return src_uri, None, -1, True
770 return origvalue, None, 0, True 167 return origvalue, None, 0, True
771 168