summaryrefslogtreecommitdiffstats
path: root/meta/lib/oe
diff options
context:
space:
mode:
Diffstat (limited to 'meta/lib/oe')
-rw-r--r--meta/lib/oe/__init__.py6
-rw-r--r--meta/lib/oe/buildcfg.py79
-rw-r--r--meta/lib/oe/buildhistory_analysis.py2
-rw-r--r--meta/lib/oe/cachedpath.py2
-rw-r--r--meta/lib/oe/classextend.py13
-rw-r--r--meta/lib/oe/classutils.py2
-rw-r--r--meta/lib/oe/copy_buildsystem.py20
-rw-r--r--meta/lib/oe/cve_check.py187
-rw-r--r--meta/lib/oe/data.py2
-rw-r--r--meta/lib/oe/distro_check.py4
-rw-r--r--meta/lib/oe/elf.py12
-rw-r--r--meta/lib/oe/go.py34
-rw-r--r--meta/lib/oe/gpg_sign.py58
-rw-r--r--meta/lib/oe/license.py47
-rw-r--r--meta/lib/oe/lsb.py2
-rw-r--r--meta/lib/oe/maketype.py9
-rw-r--r--meta/lib/oe/manifest.py2
-rw-r--r--meta/lib/oe/npm_registry.py175
-rw-r--r--meta/lib/oe/overlayfs.py54
-rw-r--r--meta/lib/oe/package.py1751
-rw-r--r--meta/lib/oe/package_manager/__init__.py26
-rw-r--r--meta/lib/oe/package_manager/deb/__init__.py37
-rw-r--r--meta/lib/oe/package_manager/deb/manifest.py2
-rw-r--r--meta/lib/oe/package_manager/deb/rootfs.py2
-rw-r--r--meta/lib/oe/package_manager/deb/sdk.py11
-rw-r--r--meta/lib/oe/package_manager/ipk/__init__.py42
-rw-r--r--meta/lib/oe/package_manager/ipk/manifest.py3
-rw-r--r--meta/lib/oe/package_manager/ipk/rootfs.py41
-rw-r--r--meta/lib/oe/package_manager/ipk/sdk.py11
-rw-r--r--meta/lib/oe/package_manager/rpm/__init__.py47
-rw-r--r--meta/lib/oe/package_manager/rpm/manifest.py2
-rw-r--r--meta/lib/oe/package_manager/rpm/rootfs.py4
-rw-r--r--meta/lib/oe/package_manager/rpm/sdk.py10
-rw-r--r--meta/lib/oe/packagedata.py276
-rw-r--r--meta/lib/oe/packagegroup.py2
-rw-r--r--meta/lib/oe/patch.py256
-rw-r--r--meta/lib/oe/path.py8
-rw-r--r--meta/lib/oe/prservice.py33
-rw-r--r--meta/lib/oe/qa.py59
-rw-r--r--meta/lib/oe/recipeutils.py135
-rw-r--r--meta/lib/oe/reproducible.py97
-rw-r--r--meta/lib/oe/rootfs.py103
-rw-r--r--meta/lib/oe/rust.py13
-rw-r--r--meta/lib/oe/sbom.py120
-rw-r--r--meta/lib/oe/sdk.py8
-rw-r--r--meta/lib/oe/spdx.py357
-rw-r--r--meta/lib/oe/sstatesig.py237
-rw-r--r--meta/lib/oe/terminal.py30
-rw-r--r--meta/lib/oe/types.py2
-rw-r--r--meta/lib/oe/useradd.py4
-rw-r--r--meta/lib/oe/utils.py182
51 files changed, 4111 insertions, 510 deletions
diff --git a/meta/lib/oe/__init__.py b/meta/lib/oe/__init__.py
index 4e7c09da04..6eb536ad28 100644
--- a/meta/lib/oe/__init__.py
+++ b/meta/lib/oe/__init__.py
@@ -1,6 +1,12 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
5from pkgutil import extend_path 7from pkgutil import extend_path
6__path__ = extend_path(__path__, __name__) 8__path__ = extend_path(__path__, __name__)
9
10BBIMPORTS = ["data", "path", "utils", "types", "package", "packagedata", \
11 "packagegroup", "sstatesig", "lsb", "cachedpath", "license", \
12 "qa", "reproducible", "rust", "buildcfg", "go"]
diff --git a/meta/lib/oe/buildcfg.py b/meta/lib/oe/buildcfg.py
new file mode 100644
index 0000000000..27b059b834
--- /dev/null
+++ b/meta/lib/oe/buildcfg.py
@@ -0,0 +1,79 @@
1
2import os
3import subprocess
4import bb.process
5
6def detect_revision(d):
7 path = get_scmbasepath(d)
8 return get_metadata_git_revision(path)
9
10def detect_branch(d):
11 path = get_scmbasepath(d)
12 return get_metadata_git_branch(path)
13
14def get_scmbasepath(d):
15 return os.path.join(d.getVar('COREBASE'), 'meta')
16
17def get_metadata_git_branch(path):
18 try:
19 rev, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', cwd=path)
20 except bb.process.ExecutionError:
21 rev = '<unknown>'
22 return rev.strip()
23
24def get_metadata_git_revision(path):
25 try:
26 rev, _ = bb.process.run('git rev-parse HEAD', cwd=path)
27 except bb.process.ExecutionError:
28 rev = '<unknown>'
29 return rev.strip()
30
31def get_metadata_git_toplevel(path):
32 try:
33 toplevel, _ = bb.process.run('git rev-parse --show-toplevel', cwd=path)
34 except bb.process.ExecutionError:
35 return ""
36 return toplevel.strip()
37
38def get_metadata_git_remotes(path):
39 try:
40 remotes_list, _ = bb.process.run('git remote', cwd=path)
41 remotes = remotes_list.split()
42 except bb.process.ExecutionError:
43 remotes = []
44 return remotes
45
46def get_metadata_git_remote_url(path, remote):
47 try:
48 uri, _ = bb.process.run('git remote get-url {remote}'.format(remote=remote), cwd=path)
49 except bb.process.ExecutionError:
50 return ""
51 return uri.strip()
52
53def get_metadata_git_describe(path):
54 try:
55 describe, _ = bb.process.run('git describe --tags', cwd=path)
56 except bb.process.ExecutionError:
57 return ""
58 return describe.strip()
59
60def is_layer_modified(path):
61 try:
62 subprocess.check_output("""cd %s; export PSEUDO_UNLOAD=1; set -e;
63 git diff --quiet --no-ext-diff
64 git diff --quiet --no-ext-diff --cached""" % path,
65 shell=True,
66 stderr=subprocess.STDOUT)
67 return ""
68 except subprocess.CalledProcessError as ex:
69 # Silently treat errors as "modified", without checking for the
70 # (expected) return code 1 in a modified git repo. For example, we get
71 # output and a 129 return code when a layer isn't a git repo at all.
72 return " -- modified"
73
74def get_layer_revisions(d):
75 layers = (d.getVar("BBLAYERS") or "").split()
76 revisions = []
77 for i in layers:
78 revisions.append((i, os.path.basename(i), get_metadata_git_branch(i).strip(), get_metadata_git_revision(i), is_layer_modified(i)))
79 return revisions
diff --git a/meta/lib/oe/buildhistory_analysis.py b/meta/lib/oe/buildhistory_analysis.py
index b1856846b6..4edad01580 100644
--- a/meta/lib/oe/buildhistory_analysis.py
+++ b/meta/lib/oe/buildhistory_analysis.py
@@ -562,7 +562,7 @@ def compare_siglists(a_blob, b_blob, taskdiff=False):
562 elif not hash2 in hashfiles: 562 elif not hash2 in hashfiles:
563 out.append("Unable to find matching sigdata for %s with hash %s" % (desc, hash2)) 563 out.append("Unable to find matching sigdata for %s with hash %s" % (desc, hash2))
564 else: 564 else:
565 out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb, collapsed=True) 565 out2 = bb.siggen.compare_sigfiles(hashfiles[hash1]['path'], hashfiles[hash2]['path'], recursecb, collapsed=True)
566 for line in out2: 566 for line in out2:
567 m = hashlib.sha256() 567 m = hashlib.sha256()
568 m.update(line.encode('utf-8')) 568 m.update(line.encode('utf-8'))
diff --git a/meta/lib/oe/cachedpath.py b/meta/lib/oe/cachedpath.py
index 254257a83f..0138b791d4 100644
--- a/meta/lib/oe/cachedpath.py
+++ b/meta/lib/oe/cachedpath.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4# Based on standard python library functions but avoid 6# Based on standard python library functions but avoid
diff --git a/meta/lib/oe/classextend.py b/meta/lib/oe/classextend.py
index d3d8fbe724..5161d33d2d 100644
--- a/meta/lib/oe/classextend.py
+++ b/meta/lib/oe/classextend.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -30,6 +32,9 @@ class ClassExtender(object):
30 if name.endswith("-" + self.extname): 32 if name.endswith("-" + self.extname):
31 name = name.replace("-" + self.extname, "") 33 name = name.replace("-" + self.extname, "")
32 if name.startswith("virtual/"): 34 if name.startswith("virtual/"):
35 # Assume large numbers of dashes means a triplet is present and we don't need to convert
36 if name.count("-") >= 3 and name.endswith(("-go", "-binutils", "-gcc", "-g++")):
37 return name
33 subs = name.split("/", 1)[1] 38 subs = name.split("/", 1)[1]
34 if not subs.startswith(self.extname): 39 if not subs.startswith(self.extname):
35 return "virtual/" + self.extname + "-" + subs 40 return "virtual/" + self.extname + "-" + subs
@@ -87,7 +92,7 @@ class ClassExtender(object):
87 def map_depends_variable(self, varname, suffix = ""): 92 def map_depends_variable(self, varname, suffix = ""):
88 # We need to preserve EXTENDPKGV so it can be expanded correctly later 93 # We need to preserve EXTENDPKGV so it can be expanded correctly later
89 if suffix: 94 if suffix:
90 varname = varname + "_" + suffix 95 varname = varname + ":" + suffix
91 orig = self.d.getVar("EXTENDPKGV", False) 96 orig = self.d.getVar("EXTENDPKGV", False)
92 self.d.setVar("EXTENDPKGV", "EXTENDPKGV") 97 self.d.setVar("EXTENDPKGV", "EXTENDPKGV")
93 deps = self.d.getVar(varname) 98 deps = self.d.getVar(varname)
@@ -142,15 +147,13 @@ class ClassExtender(object):
142 if pkg_mapping[0].startswith("${") and pkg_mapping[0].endswith("}"): 147 if pkg_mapping[0].startswith("${") and pkg_mapping[0].endswith("}"):
143 continue 148 continue
144 for subs in variables: 149 for subs in variables:
145 self.d.renameVar("%s_%s" % (subs, pkg_mapping[0]), "%s_%s" % (subs, pkg_mapping[1])) 150 self.d.renameVar("%s:%s" % (subs, pkg_mapping[0]), "%s:%s" % (subs, pkg_mapping[1]))
146 151
147class NativesdkClassExtender(ClassExtender): 152class NativesdkClassExtender(ClassExtender):
148 def map_depends(self, dep): 153 def map_depends(self, dep):
149 if dep.startswith(self.extname): 154 if dep.startswith(self.extname):
150 return dep 155 return dep
151 if dep.endswith(("-gcc", "-g++")): 156 if dep.endswith(("-native", "-native-runtime")) or ('nativesdk-' in dep) or ('-cross-' in dep) or ('-crosssdk-' in dep):
152 return dep + "-crosssdk"
153 elif dep.endswith(("-native", "-native-runtime")) or ('nativesdk-' in dep) or ('-cross-' in dep) or ('-crosssdk-' in dep):
154 return dep 157 return dep
155 else: 158 else:
156 return self.extend_name(dep) 159 return self.extend_name(dep)
diff --git a/meta/lib/oe/classutils.py b/meta/lib/oe/classutils.py
index 08bb66b365..ec3f6ad720 100644
--- a/meta/lib/oe/classutils.py
+++ b/meta/lib/oe/classutils.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/copy_buildsystem.py b/meta/lib/oe/copy_buildsystem.py
index 31a84f5b06..81abfbf9e2 100644
--- a/meta/lib/oe/copy_buildsystem.py
+++ b/meta/lib/oe/copy_buildsystem.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4# This class should provide easy access to the different aspects of the 6# This class should provide easy access to the different aspects of the
@@ -20,7 +22,7 @@ def _smart_copy(src, dest):
20 mode = os.stat(src).st_mode 22 mode = os.stat(src).st_mode
21 if stat.S_ISDIR(mode): 23 if stat.S_ISDIR(mode):
22 bb.utils.mkdirhier(dest) 24 bb.utils.mkdirhier(dest)
23 cmd = "tar --exclude='.git' --xattrs --xattrs-include='*' -chf - -C %s -p . \ 25 cmd = "tar --exclude='.git' --exclude='__pycache__' --xattrs --xattrs-include='*' -cf - -C %s -p . \
24 | tar --xattrs --xattrs-include='*' -xf - -C %s" % (src, dest) 26 | tar --xattrs --xattrs-include='*' -xf - -C %s" % (src, dest)
25 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) 27 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
26 else: 28 else:
@@ -45,9 +47,6 @@ class BuildSystem(object):
45 47
46 corebase = os.path.abspath(self.d.getVar('COREBASE')) 48 corebase = os.path.abspath(self.d.getVar('COREBASE'))
47 layers.append(corebase) 49 layers.append(corebase)
48 # Get relationship between TOPDIR and COREBASE
49 # Layers should respect it
50 corebase_relative = os.path.dirname(os.path.relpath(os.path.abspath(self.d.getVar('TOPDIR')), corebase))
51 # The bitbake build system uses the meta-skeleton layer as a layout 50 # The bitbake build system uses the meta-skeleton layer as a layout
52 # for common recipies, e.g: the recipetool script to create kernel recipies 51 # for common recipies, e.g: the recipetool script to create kernel recipies
53 # Add the meta-skeleton layer to be included as part of the eSDK installation 52 # Add the meta-skeleton layer to be included as part of the eSDK installation
@@ -100,11 +99,10 @@ class BuildSystem(object):
100 layerdestpath = destdir 99 layerdestpath = destdir
101 if corebase == os.path.dirname(layer): 100 if corebase == os.path.dirname(layer):
102 layerdestpath += '/' + os.path.basename(corebase) 101 layerdestpath += '/' + os.path.basename(corebase)
103 else: 102 # If the layer is located somewhere under the same parent directory
104 layer_relative = os.path.relpath(layer, corebase) 103 # as corebase we keep the layer structure.
105 if os.path.dirname(layer_relative) == corebase_relative: 104 elif os.path.commonpath([layer, corebase]) == os.path.dirname(corebase):
106 layer_relative = os.path.dirname(corebase_relative) + '/' + layernewname 105 layer_relative = os.path.relpath(layer, os.path.dirname(corebase))
107 layer_relative = os.path.basename(corebase) + '/' + layer_relative
108 if os.path.dirname(layer_relative) != layernewname: 106 if os.path.dirname(layer_relative) != layernewname:
109 layerdestpath += '/' + os.path.dirname(layer_relative) 107 layerdestpath += '/' + os.path.dirname(layer_relative)
110 108
@@ -259,7 +257,7 @@ def create_locked_sstate_cache(lockedsigs, input_sstate_cache, output_sstate_cac
259 bb.note('Generating sstate-cache...') 257 bb.note('Generating sstate-cache...')
260 258
261 nativelsbstring = d.getVar('NATIVELSBSTRING') 259 nativelsbstring = d.getVar('NATIVELSBSTRING')
262 bb.process.run("gen-lockedsig-cache %s %s %s %s %s" % (lockedsigs, input_sstate_cache, output_sstate_cache, nativelsbstring, filterfile or '')) 260 bb.process.run("PYTHONDONTWRITEBYTECODE=1 gen-lockedsig-cache %s %s %s %s %s" % (lockedsigs, input_sstate_cache, output_sstate_cache, nativelsbstring, filterfile or ''))
263 if fixedlsbstring and nativelsbstring != fixedlsbstring: 261 if fixedlsbstring and nativelsbstring != fixedlsbstring:
264 nativedir = output_sstate_cache + '/' + nativelsbstring 262 nativedir = output_sstate_cache + '/' + nativelsbstring
265 if os.path.isdir(nativedir): 263 if os.path.isdir(nativedir):
@@ -286,7 +284,7 @@ def check_sstate_task_list(d, targets, filteroutfile, cmdprefix='', cwd=None, lo
286 logparam = '-l %s' % logfile 284 logparam = '-l %s' % logfile
287 else: 285 else:
288 logparam = '' 286 logparam = ''
289 cmd = "%sBB_SETSCENE_ENFORCE=1 PSEUDO_DISABLED=1 oe-check-sstate %s -s -o %s %s" % (cmdprefix, targets, filteroutfile, logparam) 287 cmd = "%sPYTHONDONTWRITEBYTECODE=1 BB_SETSCENE_ENFORCE=1 PSEUDO_DISABLED=1 oe-check-sstate %s -s -o %s %s" % (cmdprefix, targets, filteroutfile, logparam)
290 env = dict(d.getVar('BB_ORIGENV', False)) 288 env = dict(d.getVar('BB_ORIGENV', False))
291 env.pop('BUILDDIR', '') 289 env.pop('BUILDDIR', '')
292 env.pop('BBPATH', '') 290 env.pop('BBPATH', '')
diff --git a/meta/lib/oe/cve_check.py b/meta/lib/oe/cve_check.py
index ce755f940a..ed5c714cb8 100644
--- a/meta/lib/oe/cve_check.py
+++ b/meta/lib/oe/cve_check.py
@@ -1,3 +1,9 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
1import collections 7import collections
2import re 8import re
3import itertools 9import itertools
@@ -11,8 +17,13 @@ _Version = collections.namedtuple(
11class Version(): 17class Version():
12 18
13 def __init__(self, version, suffix=None): 19 def __init__(self, version, suffix=None):
20
21 suffixes = ["alphabetical", "patch"]
22
14 if str(suffix) == "alphabetical": 23 if str(suffix) == "alphabetical":
15 version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<patch>[-_\.]?(?P<patch_l>[a-z]))?(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?""" 24 version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<patch>[-_\.]?(?P<patch_l>[a-z]))?(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?"""
25 elif str(suffix) == "patch":
26 version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<patch>[-_\.]?(p|patch)(?P<patch_l>[0-9]+))?(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?"""
16 else: 27 else:
17 version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?""" 28 version_pattern = r"""r?v?(?:(?P<release>[0-9]+(?:[-\.][0-9]+)*)(?P<pre>[-_\.]?(?P<pre_l>(rc|alpha|beta|pre|preview|dev))[-_\.]?(?P<pre_v>[0-9]+)?)?)(.*)?"""
18 regex = re.compile(r"^\s*" + version_pattern + r"\s*$", re.VERBOSE | re.IGNORECASE) 29 regex = re.compile(r"^\s*" + version_pattern + r"\s*$", re.VERBOSE | re.IGNORECASE)
@@ -23,7 +34,7 @@ class Version():
23 34
24 self._version = _Version( 35 self._version = _Version(
25 release=tuple(int(i) for i in match.group("release").replace("-",".").split(".")), 36 release=tuple(int(i) for i in match.group("release").replace("-",".").split(".")),
26 patch_l=match.group("patch_l") if str(suffix) == "alphabetical" and match.group("patch_l") else "", 37 patch_l=match.group("patch_l") if str(suffix) in suffixes and match.group("patch_l") else "",
27 pre_l=match.group("pre_l"), 38 pre_l=match.group("pre_l"),
28 pre_v=match.group("pre_v") 39 pre_v=match.group("pre_v")
29 ) 40 )
@@ -58,3 +69,177 @@ def _cmpkey(release, patch_l, pre_l, pre_v):
58 else: 69 else:
59 _pre = float(pre_v) if pre_v else float('-inf') 70 _pre = float(pre_v) if pre_v else float('-inf')
60 return _release, _patch, _pre 71 return _release, _patch, _pre
72
73
74def get_patched_cves(d):
75 """
76 Get patches that solve CVEs using the "CVE: " tag.
77 """
78
79 import re
80 import oe.patch
81
82 cve_match = re.compile(r"CVE:( CVE-\d{4}-\d+)+")
83
84 # Matches the last "CVE-YYYY-ID" in the file name, also if written
85 # in lowercase. Possible to have multiple CVE IDs in a single
86 # file name, but only the last one will be detected from the file name.
87 # However, patch files contents addressing multiple CVE IDs are supported
88 # (cve_match regular expression)
89 cve_file_name_match = re.compile(r".*(CVE-\d{4}-\d+)", re.IGNORECASE)
90
91 patched_cves = set()
92 patches = oe.patch.src_patches(d)
93 bb.debug(2, "Scanning %d patches for CVEs" % len(patches))
94 for url in patches:
95 patch_file = bb.fetch.decodeurl(url)[2]
96
97 # Check patch file name for CVE ID
98 fname_match = cve_file_name_match.search(patch_file)
99 if fname_match:
100 cve = fname_match.group(1).upper()
101 patched_cves.add(cve)
102 bb.debug(2, "Found %s from patch file name %s" % (cve, patch_file))
103
104 # Remote patches won't be present and compressed patches won't be
105 # unpacked, so say we're not scanning them
106 if not os.path.isfile(patch_file):
107 bb.note("%s is remote or compressed, not scanning content" % patch_file)
108 continue
109
110 with open(patch_file, "r", encoding="utf-8") as f:
111 try:
112 patch_text = f.read()
113 except UnicodeDecodeError:
114 bb.debug(1, "Failed to read patch %s using UTF-8 encoding"
115 " trying with iso8859-1" % patch_file)
116 f.close()
117 with open(patch_file, "r", encoding="iso8859-1") as f:
118 patch_text = f.read()
119
120 # Search for one or more "CVE: " lines
121 text_match = False
122 for match in cve_match.finditer(patch_text):
123 # Get only the CVEs without the "CVE: " tag
124 cves = patch_text[match.start()+5:match.end()]
125 for cve in cves.split():
126 bb.debug(2, "Patch %s solves %s" % (patch_file, cve))
127 patched_cves.add(cve)
128 text_match = True
129
130 if not fname_match and not text_match:
131 bb.debug(2, "Patch %s doesn't solve CVEs" % patch_file)
132
133 # Search for additional patched CVEs
134 for cve in (d.getVarFlags("CVE_STATUS") or {}):
135 decoded_status, _, _ = decode_cve_status(d, cve)
136 if decoded_status == "Patched":
137 bb.debug(2, "CVE %s is additionally patched" % cve)
138 patched_cves.add(cve)
139
140 return patched_cves
141
142
143def get_cpe_ids(cve_product, version):
144 """
145 Get list of CPE identifiers for the given product and version
146 """
147
148 version = version.split("+git")[0]
149
150 cpe_ids = []
151 for product in cve_product.split():
152 # CVE_PRODUCT in recipes may include vendor information for CPE identifiers. If not,
153 # use wildcard for vendor.
154 if ":" in product:
155 vendor, product = product.split(":", 1)
156 else:
157 vendor = "*"
158
159 cpe_id = 'cpe:2.3:*:{}:{}:{}:*:*:*:*:*:*:*'.format(vendor, product, version)
160 cpe_ids.append(cpe_id)
161
162 return cpe_ids
163
164def cve_check_merge_jsons(output, data):
165 """
166 Merge the data in the "package" property to the main data file
167 output
168 """
169 if output["version"] != data["version"]:
170 bb.error("Version mismatch when merging JSON outputs")
171 return
172
173 for product in output["package"]:
174 if product["name"] == data["package"][0]["name"]:
175 bb.error("Error adding the same package %s twice" % product["name"])
176 return
177
178 output["package"].append(data["package"][0])
179
180def update_symlinks(target_path, link_path):
181 """
182 Update a symbolic link link_path to point to target_path.
183 Remove the link and recreate it if exist and is different.
184 """
185 if link_path != target_path and os.path.exists(target_path):
186 if os.path.exists(os.path.realpath(link_path)):
187 os.remove(link_path)
188 os.symlink(os.path.basename(target_path), link_path)
189
190
191def convert_cve_version(version):
192 """
193 This function converts from CVE format to Yocto version format.
194 eg 8.3_p1 -> 8.3p1, 6.2_rc1 -> 6.2-rc1
195
196 Unless it is redefined using CVE_VERSION in the recipe,
197 cve_check uses the version in the name of the recipe (${PV})
198 to check vulnerabilities against a CVE in the database downloaded from NVD.
199
200 When the version has an update, i.e.
201 "p1" in OpenSSH 8.3p1,
202 "-rc1" in linux kernel 6.2-rc1,
203 the database stores the version as version_update (8.3_p1, 6.2_rc1).
204 Therefore, we must transform this version before comparing to the
205 recipe version.
206
207 In this case, the parameter of the function is 8.3_p1.
208 If the version uses the Release Candidate format, "rc",
209 this function replaces the '_' by '-'.
210 If the version uses the Update format, "p",
211 this function removes the '_' completely.
212 """
213 import re
214
215 matches = re.match('^([0-9.]+)_((p|rc)[0-9]+)$', version)
216
217 if not matches:
218 return version
219
220 version = matches.group(1)
221 update = matches.group(2)
222
223 if matches.group(3) == "rc":
224 return version + '-' + update
225
226 return version + update
227
228def decode_cve_status(d, cve):
229 """
230 Convert CVE_STATUS into status, detail and description.
231 """
232 status = d.getVarFlag("CVE_STATUS", cve)
233 if not status:
234 return ("", "", "")
235
236 status_split = status.split(':', 1)
237 detail = status_split[0]
238 description = status_split[1].strip() if (len(status_split) > 1) else ""
239
240 status_mapping = d.getVarFlag("CVE_CHECK_STATUSMAP", detail)
241 if status_mapping is None:
242 bb.warn('Invalid detail "%s" for CVE_STATUS[%s] = "%s", fallback to Unpatched' % (detail, cve, status))
243 status_mapping = "Unpatched"
244
245 return (status_mapping, detail, description)
diff --git a/meta/lib/oe/data.py b/meta/lib/oe/data.py
index 602130a904..37121cfad2 100644
--- a/meta/lib/oe/data.py
+++ b/meta/lib/oe/data.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/distro_check.py b/meta/lib/oe/distro_check.py
index 88e46c354d..3494520f40 100644
--- a/meta/lib/oe/distro_check.py
+++ b/meta/lib/oe/distro_check.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -26,7 +28,7 @@ def find_latest_numeric_release(url, d):
26 maxstr="" 28 maxstr=""
27 for link in get_links_from_url(url, d): 29 for link in get_links_from_url(url, d):
28 try: 30 try:
29 # TODO use LooseVersion 31 # TODO use bb.utils.vercmp_string_op()
30 release = float(link) 32 release = float(link)
31 except: 33 except:
32 release = 0 34 release = 0
diff --git a/meta/lib/oe/elf.py b/meta/lib/oe/elf.py
index df0a4593fa..eab2349a4f 100644
--- a/meta/lib/oe/elf.py
+++ b/meta/lib/oe/elf.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -19,6 +21,7 @@ def machine_dict(d):
19 "x86_64": (62, 0, 0, True, 64), 21 "x86_64": (62, 0, 0, True, 64),
20 "epiphany": (4643, 0, 0, True, 32), 22 "epiphany": (4643, 0, 0, True, 32),
21 "lm32": (138, 0, 0, False, 32), 23 "lm32": (138, 0, 0, False, 32),
24 "loongarch64":(258, 0, 0, True, 64),
22 "mips": ( 8, 0, 0, False, 32), 25 "mips": ( 8, 0, 0, False, 32),
23 "mipsel": ( 8, 0, 0, True, 32), 26 "mipsel": ( 8, 0, 0, True, 32),
24 "microblaze": (189, 0, 0, False, 32), 27 "microblaze": (189, 0, 0, False, 32),
@@ -43,6 +46,7 @@ def machine_dict(d):
43 "ia64": (50, 0, 0, True, 64), 46 "ia64": (50, 0, 0, True, 64),
44 "alpha": (36902, 0, 0, True, 64), 47 "alpha": (36902, 0, 0, True, 64),
45 "hppa": (15, 3, 0, False, 32), 48 "hppa": (15, 3, 0, False, 32),
49 "loongarch64":(258, 0, 0, True, 64),
46 "m68k": ( 4, 0, 0, False, 32), 50 "m68k": ( 4, 0, 0, False, 32),
47 "mips": ( 8, 0, 0, False, 32), 51 "mips": ( 8, 0, 0, False, 32),
48 "mipsel": ( 8, 0, 0, True, 32), 52 "mipsel": ( 8, 0, 0, True, 32),
@@ -61,6 +65,14 @@ def machine_dict(d):
61 "microblaze": (189, 0, 0, False, 32), 65 "microblaze": (189, 0, 0, False, 32),
62 "microblazeel":(189, 0, 0, True, 32), 66 "microblazeel":(189, 0, 0, True, 32),
63 }, 67 },
68 "linux-android" : {
69 "aarch64" : (183, 0, 0, True, 64),
70 "i686": ( 3, 0, 0, True, 32),
71 "x86_64": (62, 0, 0, True, 64),
72 },
73 "linux-androideabi" : {
74 "arm" : (40, 97, 0, True, 32),
75 },
64 "linux-musl" : { 76 "linux-musl" : {
65 "aarch64" : (183, 0, 0, True, 64), 77 "aarch64" : (183, 0, 0, True, 64),
66 "aarch64_be" :(183, 0, 0, False, 64), 78 "aarch64_be" :(183, 0, 0, False, 64),
diff --git a/meta/lib/oe/go.py b/meta/lib/oe/go.py
new file mode 100644
index 0000000000..dfd957d157
--- /dev/null
+++ b/meta/lib/oe/go.py
@@ -0,0 +1,34 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import re
8
9def map_arch(a):
10 if re.match('i.86', a):
11 return '386'
12 elif a == 'x86_64':
13 return 'amd64'
14 elif re.match('arm.*', a):
15 return 'arm'
16 elif re.match('aarch64.*', a):
17 return 'arm64'
18 elif re.match('mips64el.*', a):
19 return 'mips64le'
20 elif re.match('mips64.*', a):
21 return 'mips64'
22 elif a == 'mips':
23 return 'mips'
24 elif a == 'mipsel':
25 return 'mipsle'
26 elif re.match('p(pc|owerpc)(64le)', a):
27 return 'ppc64le'
28 elif re.match('p(pc|owerpc)(64)', a):
29 return 'ppc64'
30 elif a == 'riscv64':
31 return 'riscv64'
32 elif a == 'loongarch64':
33 return 'loong64'
34 return ''
diff --git a/meta/lib/oe/gpg_sign.py b/meta/lib/oe/gpg_sign.py
index 7634d7ef1d..ede6186c84 100644
--- a/meta/lib/oe/gpg_sign.py
+++ b/meta/lib/oe/gpg_sign.py
@@ -1,13 +1,16 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
5"""Helper module for GPG signing""" 7"""Helper module for GPG signing"""
6import os
7 8
8import bb 9import bb
9import subprocess 10import os
10import shlex 11import shlex
12import subprocess
13import tempfile
11 14
12class LocalSigner(object): 15class LocalSigner(object):
13 """Class for handling local (on the build host) signing""" 16 """Class for handling local (on the build host) signing"""
@@ -58,7 +61,7 @@ class LocalSigner(object):
58 for i in range(0, len(files), sign_chunk): 61 for i in range(0, len(files), sign_chunk):
59 subprocess.check_output(shlex.split(cmd + ' '.join(files[i:i+sign_chunk])), stderr=subprocess.STDOUT) 62 subprocess.check_output(shlex.split(cmd + ' '.join(files[i:i+sign_chunk])), stderr=subprocess.STDOUT)
60 63
61 def detach_sign(self, input_file, keyid, passphrase_file, passphrase=None, armor=True): 64 def detach_sign(self, input_file, keyid, passphrase_file, passphrase=None, armor=True, output_suffix=None, use_sha256=False):
62 """Create a detached signature of a file""" 65 """Create a detached signature of a file"""
63 66
64 if passphrase_file and passphrase: 67 if passphrase_file and passphrase:
@@ -71,25 +74,35 @@ class LocalSigner(object):
71 cmd += ['--homedir', self.gpg_path] 74 cmd += ['--homedir', self.gpg_path]
72 if armor: 75 if armor:
73 cmd += ['--armor'] 76 cmd += ['--armor']
77 if use_sha256:
78 cmd += ['--digest-algo', "SHA256"]
74 79
75 #gpg > 2.1 supports password pipes only through the loopback interface 80 #gpg > 2.1 supports password pipes only through the loopback interface
76 #gpg < 2.1 errors out if given unknown parameters 81 #gpg < 2.1 errors out if given unknown parameters
77 if self.gpg_version > (2,1,): 82 if self.gpg_version > (2,1,):
78 cmd += ['--pinentry-mode', 'loopback'] 83 cmd += ['--pinentry-mode', 'loopback']
79 84
80 cmd += [input_file]
81
82 try: 85 try:
83 if passphrase_file: 86 if passphrase_file:
84 with open(passphrase_file) as fobj: 87 with open(passphrase_file) as fobj:
85 passphrase = fobj.readline(); 88 passphrase = fobj.readline();
86 89
87 job = subprocess.Popen(cmd, stdin=subprocess.PIPE, stderr=subprocess.PIPE) 90 if not output_suffix:
88 (_, stderr) = job.communicate(passphrase.encode("utf-8")) 91 output_suffix = 'asc' if armor else 'sig'
92 output_file = input_file + "." + output_suffix
93 with tempfile.TemporaryDirectory(dir=os.path.dirname(output_file)) as tmp_dir:
94 tmp_file = os.path.join(tmp_dir, os.path.basename(output_file))
95 cmd += ['-o', tmp_file]
96
97 cmd += [input_file]
98
99 job = subprocess.Popen(cmd, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
100 (_, stderr) = job.communicate(passphrase.encode("utf-8"))
89 101
90 if job.returncode: 102 if job.returncode:
91 bb.fatal("GPG exited with code %d: %s" % (job.returncode, stderr.decode("utf-8"))) 103 bb.fatal("GPG exited with code %d: %s" % (job.returncode, stderr.decode("utf-8")))
92 104
105 os.rename(tmp_file, output_file)
93 except IOError as e: 106 except IOError as e:
94 bb.error("IO error (%s): %s" % (e.errno, e.strerror)) 107 bb.error("IO error (%s): %s" % (e.errno, e.strerror))
95 raise Exception("Failed to sign '%s'" % input_file) 108 raise Exception("Failed to sign '%s'" % input_file)
@@ -109,16 +122,33 @@ class LocalSigner(object):
109 bb.fatal("Could not get gpg version: %s" % e) 122 bb.fatal("Could not get gpg version: %s" % e)
110 123
111 124
112 def verify(self, sig_file): 125 def verify(self, sig_file, valid_sigs = ''):
113 """Verify signature""" 126 """Verify signature"""
114 cmd = self.gpg_cmd + [" --verify", "--no-permission-warning"] 127 cmd = self.gpg_cmd + ["--verify", "--no-permission-warning", "--status-fd", "1"]
115 if self.gpg_path: 128 if self.gpg_path:
116 cmd += ["--homedir", self.gpg_path] 129 cmd += ["--homedir", self.gpg_path]
117 130
118 cmd += [sig_file] 131 cmd += [sig_file]
119 status = subprocess.call(cmd) 132 status = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
120 ret = False if status else True 133 # Valid if any key matches if unspecified
121 return ret 134 if not valid_sigs:
135 ret = False if status.returncode else True
136 return ret
137
138 import re
139 goodsigs = []
140 sigre = re.compile(r'^\[GNUPG:\] GOODSIG (\S+)\s(.*)$')
141 for l in status.stdout.decode("utf-8").splitlines():
142 s = sigre.match(l)
143 if s:
144 goodsigs += [s.group(1)]
145
146 for sig in valid_sigs.split():
147 if sig in goodsigs:
148 return True
149 if len(goodsigs):
150 bb.warn('No accepted signatures found. Good signatures found: %s.' % ' '.join(goodsigs))
151 return False
122 152
123 153
124def get_signer(d, backend): 154def get_signer(d, backend):
diff --git a/meta/lib/oe/license.py b/meta/lib/oe/license.py
index 665d32ecbb..d9c8d94da4 100644
--- a/meta/lib/oe/license.py
+++ b/meta/lib/oe/license.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4"""Code for parsing OpenEmbedded license strings""" 6"""Code for parsing OpenEmbedded license strings"""
@@ -14,6 +16,16 @@ def license_ok(license, dont_want_licenses):
14 return False 16 return False
15 return True 17 return True
16 18
19def obsolete_license_list():
20 return ["AGPL-3", "AGPL-3+", "AGPLv3", "AGPLv3+", "AGPLv3.0", "AGPLv3.0+", "AGPL-3.0", "AGPL-3.0+", "BSD-0-Clause",
21 "GPL-1", "GPL-1+", "GPLv1", "GPLv1+", "GPLv1.0", "GPLv1.0+", "GPL-1.0", "GPL-1.0+", "GPL-2", "GPL-2+", "GPLv2",
22 "GPLv2+", "GPLv2.0", "GPLv2.0+", "GPL-2.0", "GPL-2.0+", "GPL-3", "GPL-3+", "GPLv3", "GPLv3+", "GPLv3.0", "GPLv3.0+",
23 "GPL-3.0", "GPL-3.0+", "LGPLv2", "LGPLv2+", "LGPLv2.0", "LGPLv2.0+", "LGPL-2.0", "LGPL-2.0+", "LGPL2.1", "LGPL2.1+",
24 "LGPLv2.1", "LGPLv2.1+", "LGPL-2.1", "LGPL-2.1+", "LGPLv3", "LGPLv3+", "LGPL-3.0", "LGPL-3.0+", "MPL-1", "MPLv1",
25 "MPLv1.1", "MPLv2", "MIT-X", "MIT-style", "openssl", "PSF", "PSFv2", "Python-2", "Apachev2", "Apache-2", "Artisticv1",
26 "Artistic-1", "AFL-2", "AFL-1", "AFLv2", "AFLv1", "CDDLv1", "CDDL-1", "EPLv1.0", "FreeType", "Nauman",
27 "tcl", "vim", "SGIv1"]
28
17class LicenseError(Exception): 29class LicenseError(Exception):
18 pass 30 pass
19 31
@@ -74,6 +86,9 @@ class FlattenVisitor(LicenseVisitor):
74 def visit_Str(self, node): 86 def visit_Str(self, node):
75 self.licenses.append(node.s) 87 self.licenses.append(node.s)
76 88
89 def visit_Constant(self, node):
90 self.licenses.append(node.value)
91
77 def visit_BinOp(self, node): 92 def visit_BinOp(self, node):
78 if isinstance(node.op, ast.BitOr): 93 if isinstance(node.op, ast.BitOr):
79 left = FlattenVisitor(self.choose_licenses) 94 left = FlattenVisitor(self.choose_licenses)
@@ -96,26 +111,26 @@ def flattened_licenses(licensestr, choose_licenses):
96 raise LicenseSyntaxError(licensestr, exc) 111 raise LicenseSyntaxError(licensestr, exc)
97 return flatten.licenses 112 return flatten.licenses
98 113
99def is_included(licensestr, whitelist=None, blacklist=None): 114def is_included(licensestr, include_licenses=None, exclude_licenses=None):
100 """Given a license string and whitelist and blacklist, determine if the 115 """Given a license string, a list of licenses to include and a list of
101 license string matches the whitelist and does not match the blacklist. 116 licenses to exclude, determine if the license string matches the include
117 list and does not match the exclude list.
102 118
103 Returns a tuple holding the boolean state and a list of the applicable 119 Returns a tuple holding the boolean state and a list of the applicable
104 licenses that were excluded if state is False, or the licenses that were 120 licenses that were excluded if state is False, or the licenses that were
105 included if the state is True. 121 included if the state is True."""
106 """
107 122
108 def include_license(license): 123 def include_license(license):
109 return any(fnmatch(license, pattern) for pattern in whitelist) 124 return any(fnmatch(license, pattern) for pattern in include_licenses)
110 125
111 def exclude_license(license): 126 def exclude_license(license):
112 return any(fnmatch(license, pattern) for pattern in blacklist) 127 return any(fnmatch(license, pattern) for pattern in exclude_licenses)
113 128
114 def choose_licenses(alpha, beta): 129 def choose_licenses(alpha, beta):
115 """Select the option in an OR which is the 'best' (has the most 130 """Select the option in an OR which is the 'best' (has the most
116 included licenses and no excluded licenses).""" 131 included licenses and no excluded licenses)."""
117 # The factor 1000 below is arbitrary, just expected to be much larger 132 # The factor 1000 below is arbitrary, just expected to be much larger
118 # that the number of licenses actually specified. That way the weight 133 # than the number of licenses actually specified. That way the weight
119 # will be negative if the list of licenses contains an excluded license, 134 # will be negative if the list of licenses contains an excluded license,
120 # but still gives a higher weight to the list with the most included 135 # but still gives a higher weight to the list with the most included
121 # licenses. 136 # licenses.
@@ -128,11 +143,11 @@ def is_included(licensestr, whitelist=None, blacklist=None):
128 else: 143 else:
129 return beta 144 return beta
130 145
131 if not whitelist: 146 if not include_licenses:
132 whitelist = ['*'] 147 include_licenses = ['*']
133 148
134 if not blacklist: 149 if not exclude_licenses:
135 blacklist = [] 150 exclude_licenses = []
136 151
137 licenses = flattened_licenses(licensestr, choose_licenses) 152 licenses = flattened_licenses(licensestr, choose_licenses)
138 excluded = [lic for lic in licenses if exclude_license(lic)] 153 excluded = [lic for lic in licenses if exclude_license(lic)]
@@ -227,6 +242,9 @@ class ListVisitor(LicenseVisitor):
227 def visit_Str(self, node): 242 def visit_Str(self, node):
228 self.licenses.add(node.s) 243 self.licenses.add(node.s)
229 244
245 def visit_Constant(self, node):
246 self.licenses.add(node.value)
247
230def list_licenses(licensestr): 248def list_licenses(licensestr):
231 """Simply get a list of all licenses mentioned in a license string. 249 """Simply get a list of all licenses mentioned in a license string.
232 Binary operators are not applied or taken into account in any way""" 250 Binary operators are not applied or taken into account in any way"""
@@ -236,3 +254,8 @@ def list_licenses(licensestr):
236 except SyntaxError as exc: 254 except SyntaxError as exc:
237 raise LicenseSyntaxError(licensestr, exc) 255 raise LicenseSyntaxError(licensestr, exc)
238 return visitor.licenses 256 return visitor.licenses
257
258def apply_pkg_license_exception(pkg, bad_licenses, exceptions):
259 """Return remaining bad licenses after removing any package exceptions"""
260
261 return [lic for lic in bad_licenses if pkg + ':' + lic not in exceptions]
diff --git a/meta/lib/oe/lsb.py b/meta/lib/oe/lsb.py
index 43e46380d7..3ec03e5042 100644
--- a/meta/lib/oe/lsb.py
+++ b/meta/lib/oe/lsb.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/maketype.py b/meta/lib/oe/maketype.py
index d929c8b3e5..7a83bdf602 100644
--- a/meta/lib/oe/maketype.py
+++ b/meta/lib/oe/maketype.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4"""OpenEmbedded variable typing support 6"""OpenEmbedded variable typing support
@@ -10,12 +12,7 @@ the arguments of the type's factory for details.
10 12
11import inspect 13import inspect
12import oe.types as types 14import oe.types as types
13try: 15from collections.abc import Callable
14 # Python 3.7+
15 from collections.abc import Callable
16except ImportError:
17 # Python < 3.7
18 from collections import Callable
19 16
20available_types = {} 17available_types = {}
21 18
diff --git a/meta/lib/oe/manifest.py b/meta/lib/oe/manifest.py
index 1a058dcd73..61f18adc4a 100644
--- a/meta/lib/oe/manifest.py
+++ b/meta/lib/oe/manifest.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/npm_registry.py b/meta/lib/oe/npm_registry.py
new file mode 100644
index 0000000000..d97ced7cda
--- /dev/null
+++ b/meta/lib/oe/npm_registry.py
@@ -0,0 +1,175 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import bb
8import json
9import subprocess
10
11_ALWAYS_SAFE = frozenset('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
12 'abcdefghijklmnopqrstuvwxyz'
13 '0123456789'
14 '_.-~()')
15
16MISSING_OK = object()
17
18REGISTRY = "https://registry.npmjs.org"
19
20# we can not use urllib.parse here because npm expects lowercase
21# hex-chars but urllib generates uppercase ones
22def uri_quote(s, safe = '/'):
23 res = ""
24 safe_set = set(safe)
25 for c in s:
26 if c in _ALWAYS_SAFE or c in safe_set:
27 res += c
28 else:
29 res += '%%%02x' % ord(c)
30 return res
31
32class PackageJson:
33 def __init__(self, spec):
34 self.__spec = spec
35
36 @property
37 def name(self):
38 return self.__spec['name']
39
40 @property
41 def version(self):
42 return self.__spec['version']
43
44 @property
45 def empty_manifest(self):
46 return {
47 'name': self.name,
48 'description': self.__spec.get('description', ''),
49 'versions': {},
50 }
51
52 def base_filename(self):
53 return uri_quote(self.name, safe = '@')
54
55 def as_manifest_entry(self, tarball_uri):
56 res = {}
57
58 ## NOTE: 'npm install' requires more than basic meta information;
59 ## e.g. it takes 'bin' from this manifest entry but not the actual
60 ## 'package.json'
61 for (idx,dflt) in [('name', None),
62 ('description', ""),
63 ('version', None),
64 ('bin', MISSING_OK),
65 ('man', MISSING_OK),
66 ('scripts', MISSING_OK),
67 ('directories', MISSING_OK),
68 ('dependencies', MISSING_OK),
69 ('devDependencies', MISSING_OK),
70 ('optionalDependencies', MISSING_OK),
71 ('license', "unknown")]:
72 if idx in self.__spec:
73 res[idx] = self.__spec[idx]
74 elif dflt == MISSING_OK:
75 pass
76 elif dflt != None:
77 res[idx] = dflt
78 else:
79 raise Exception("%s-%s: missing key %s" % (self.name,
80 self.version,
81 idx))
82
83 res['dist'] = {
84 'tarball': tarball_uri,
85 }
86
87 return res
88
89class ManifestImpl:
90 def __init__(self, base_fname, spec):
91 self.__base = base_fname
92 self.__spec = spec
93
94 def load(self):
95 try:
96 with open(self.filename, "r") as f:
97 res = json.load(f)
98 except IOError:
99 res = self.__spec.empty_manifest
100
101 return res
102
103 def save(self, meta):
104 with open(self.filename, "w") as f:
105 json.dump(meta, f, indent = 2)
106
107 @property
108 def filename(self):
109 return self.__base + ".meta"
110
111class Manifest:
112 def __init__(self, base_fname, spec):
113 self.__base = base_fname
114 self.__spec = spec
115 self.__lockf = None
116 self.__impl = None
117
118 def __enter__(self):
119 self.__lockf = bb.utils.lockfile(self.__base + ".lock")
120 self.__impl = ManifestImpl(self.__base, self.__spec)
121 return self.__impl
122
123 def __exit__(self, exc_type, exc_val, exc_tb):
124 bb.utils.unlockfile(self.__lockf)
125
126class NpmCache:
127 def __init__(self, cache):
128 self.__cache = cache
129
130 @property
131 def path(self):
132 return self.__cache
133
134 def run(self, type, key, fname):
135 subprocess.run(['oe-npm-cache', self.__cache, type, key, fname],
136 check = True)
137
138class NpmRegistry:
139 def __init__(self, path, cache):
140 self.__path = path
141 self.__cache = NpmCache(cache + '/_cacache')
142 bb.utils.mkdirhier(self.__path)
143 bb.utils.mkdirhier(self.__cache.path)
144
145 @staticmethod
146 ## This function is critical and must match nodejs expectations
147 def _meta_uri(spec):
148 return REGISTRY + '/' + uri_quote(spec.name, safe = '@')
149
150 @staticmethod
151 ## Exact return value does not matter; just make it look like a
152 ## usual registry url
153 def _tarball_uri(spec):
154 return '%s/%s/-/%s-%s.tgz' % (REGISTRY,
155 uri_quote(spec.name, safe = '@'),
156 uri_quote(spec.name, safe = '@/'),
157 spec.version)
158
159 def add_pkg(self, tarball, pkg_json):
160 pkg_json = PackageJson(pkg_json)
161 base = os.path.join(self.__path, pkg_json.base_filename())
162
163 with Manifest(base, pkg_json) as manifest:
164 meta = manifest.load()
165 tarball_uri = self._tarball_uri(pkg_json)
166
167 meta['versions'][pkg_json.version] = pkg_json.as_manifest_entry(tarball_uri)
168
169 manifest.save(meta)
170
171 ## Cache entries are a little bit dependent on the nodejs
172 ## version; version specific cache implementation must
173 ## mitigate differences
174 self.__cache.run('meta', self._meta_uri(pkg_json), manifest.filename);
175 self.__cache.run('tgz', tarball_uri, tarball);
diff --git a/meta/lib/oe/overlayfs.py b/meta/lib/oe/overlayfs.py
new file mode 100644
index 0000000000..8b88900f71
--- /dev/null
+++ b/meta/lib/oe/overlayfs.py
@@ -0,0 +1,54 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6# This file contains common functions for overlayfs and its QA check
7
8# this function is based on https://github.com/systemd/systemd/blob/main/src/basic/unit-name.c
9def escapeSystemdUnitName(path):
10 escapeMap = {
11 '/': '-',
12 '-': "\\x2d",
13 '\\': "\\x5d"
14 }
15 return "".join([escapeMap.get(c, c) for c in path.strip('/')])
16
17def strForBash(s):
18 return s.replace('\\', '\\\\')
19
20def allOverlaysUnitName(d):
21 return d.getVar('PN') + '-overlays.service'
22
23def mountUnitName(unit):
24 return escapeSystemdUnitName(unit) + '.mount'
25
26def helperUnitName(unit):
27 return escapeSystemdUnitName(unit) + '-create-upper-dir.service'
28
29def unitFileList(d):
30 fileList = []
31 overlayMountPoints = d.getVarFlags("OVERLAYFS_MOUNT_POINT")
32
33 if not overlayMountPoints:
34 bb.fatal("A recipe uses overlayfs class but there is no OVERLAYFS_MOUNT_POINT set in your MACHINE configuration")
35
36 # check that we have required mount points set first
37 requiredMountPoints = d.getVarFlags('OVERLAYFS_WRITABLE_PATHS')
38 for mountPoint in requiredMountPoints:
39 if mountPoint not in overlayMountPoints:
40 bb.fatal("Missing required mount point for OVERLAYFS_MOUNT_POINT[%s] in your MACHINE configuration" % mountPoint)
41
42 for mountPoint in overlayMountPoints:
43 mountPointList = d.getVarFlag('OVERLAYFS_WRITABLE_PATHS', mountPoint)
44 if not mountPointList:
45 bb.debug(1, "No mount points defined for %s flag, don't add to file list", mountPoint)
46 continue
47 for path in mountPointList.split():
48 fileList.append(mountUnitName(path))
49 fileList.append(helperUnitName(path))
50
51 fileList.append(allOverlaysUnitName(d))
52
53 return fileList
54
diff --git a/meta/lib/oe/package.py b/meta/lib/oe/package.py
index dd700cbb0c..1511ba47c4 100644
--- a/meta/lib/oe/package.py
+++ b/meta/lib/oe/package.py
@@ -1,11 +1,22 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
7import errno
8import fnmatch
9import itertools
10import os
11import shlex
12import re
13import glob
5import stat 14import stat
6import mmap 15import mmap
7import subprocess 16import subprocess
8 17
18import oe.cachedpath
19
9def runstrip(arg): 20def runstrip(arg):
10 # Function to strip a single file, called from split_and_strip_files below 21 # Function to strip a single file, called from split_and_strip_files below
11 # A working 'file' (one which works on the target architecture) 22 # A working 'file' (one which works on the target architecture)
@@ -16,7 +27,11 @@ def runstrip(arg):
16 # 8 - shared library 27 # 8 - shared library
17 # 16 - kernel module 28 # 16 - kernel module
18 29
19 (file, elftype, strip) = arg 30 if len(arg) == 3:
31 (file, elftype, strip) = arg
32 extra_strip_sections = ''
33 else:
34 (file, elftype, strip, extra_strip_sections) = arg
20 35
21 newmode = None 36 newmode = None
22 if not os.access(file, os.W_OK) or os.access(file, os.R_OK): 37 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
@@ -26,7 +41,7 @@ def runstrip(arg):
26 41
27 stripcmd = [strip] 42 stripcmd = [strip]
28 skip_strip = False 43 skip_strip = False
29 # kernel module 44 # kernel module
30 if elftype & 16: 45 if elftype & 16:
31 if is_kernel_module_signed(file): 46 if is_kernel_module_signed(file):
32 bb.debug(1, "Skip strip on signed module %s" % file) 47 bb.debug(1, "Skip strip on signed module %s" % file)
@@ -40,6 +55,9 @@ def runstrip(arg):
40 # shared or executable: 55 # shared or executable:
41 elif elftype & 8 or elftype & 4: 56 elif elftype & 8 or elftype & 4:
42 stripcmd.extend(["--remove-section=.comment", "--remove-section=.note"]) 57 stripcmd.extend(["--remove-section=.comment", "--remove-section=.note"])
58 if extra_strip_sections != '':
59 for section in extra_strip_sections.split():
60 stripcmd.extend(["--remove-section=" + section])
43 61
44 stripcmd.append(file) 62 stripcmd.append(file)
45 bb.debug(1, "runstrip: %s" % stripcmd) 63 bb.debug(1, "runstrip: %s" % stripcmd)
@@ -96,7 +114,7 @@ def is_static_lib(path):
96 return start == magic 114 return start == magic
97 return False 115 return False
98 116
99def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, d, qa_already_stripped=False): 117def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, max_process, qa_already_stripped=False):
100 """ 118 """
101 Strip executable code (like executables, shared libraries) _in_place_ 119 Strip executable code (like executables, shared libraries) _in_place_
102 - Based on sysroot_strip in staging.bbclass 120 - Based on sysroot_strip in staging.bbclass
@@ -104,6 +122,7 @@ def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, d, qa_already_stripp
104 :param strip_cmd: Strip command (usually ${STRIP}) 122 :param strip_cmd: Strip command (usually ${STRIP})
105 :param libdir: ${libdir} - strip .so files in this directory 123 :param libdir: ${libdir} - strip .so files in this directory
106 :param base_libdir: ${base_libdir} - strip .so files in this directory 124 :param base_libdir: ${base_libdir} - strip .so files in this directory
125 :param max_process: number of stripping processes started in parallel
107 :param qa_already_stripped: Set to True if already-stripped' in ${INSANE_SKIP} 126 :param qa_already_stripped: Set to True if already-stripped' in ${INSANE_SKIP}
108 This is for proper logging and messages only. 127 This is for proper logging and messages only.
109 """ 128 """
@@ -146,7 +165,7 @@ def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, d, qa_already_stripp
146 # ...but is it ELF, and is it already stripped? 165 # ...but is it ELF, and is it already stripped?
147 checkelf.append(file) 166 checkelf.append(file)
148 inodecache[file] = s.st_ino 167 inodecache[file] = s.st_ino
149 results = oe.utils.multiprocess_launch(is_elf, checkelf, d) 168 results = oe.utils.multiprocess_launch_mp(is_elf, checkelf, max_process)
150 for (file, elf_file) in results: 169 for (file, elf_file) in results:
151 #elf_file = is_elf(file) 170 #elf_file = is_elf(file)
152 if elf_file & 1: 171 if elf_file & 1:
@@ -174,7 +193,7 @@ def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, d, qa_already_stripp
174 elf_file = int(elffiles[file]) 193 elf_file = int(elffiles[file])
175 sfiles.append((file, elf_file, strip_cmd)) 194 sfiles.append((file, elf_file, strip_cmd))
176 195
177 oe.utils.multiprocess_launch(runstrip, sfiles, d) 196 oe.utils.multiprocess_launch_mp(runstrip, sfiles, max_process)
178 197
179 198
180def file_translate(file): 199def file_translate(file):
@@ -283,3 +302,1725 @@ def read_shlib_providers(d):
283 shlib_provider[s[0]] = {} 302 shlib_provider[s[0]] = {}
284 shlib_provider[s[0]][s[1]] = (dep_pkg, s[2]) 303 shlib_provider[s[0]][s[1]] = (dep_pkg, s[2])
285 return shlib_provider 304 return shlib_provider
305
306# We generate a master list of directories to process, we start by
307# seeding this list with reasonable defaults, then load from
308# the fs-perms.txt files
309def fixup_perms(d):
310 import pwd, grp
311
312 cpath = oe.cachedpath.CachedPath()
313 dvar = d.getVar('PKGD')
314
315 # init using a string with the same format as a line as documented in
316 # the fs-perms.txt file
317 # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
318 # <path> link <link target>
319 #
320 # __str__ can be used to print out an entry in the input format
321 #
322 # if fs_perms_entry.path is None:
323 # an error occurred
324 # if fs_perms_entry.link, you can retrieve:
325 # fs_perms_entry.path = path
326 # fs_perms_entry.link = target of link
327 # if not fs_perms_entry.link, you can retrieve:
328 # fs_perms_entry.path = path
329 # fs_perms_entry.mode = expected dir mode or None
330 # fs_perms_entry.uid = expected uid or -1
331 # fs_perms_entry.gid = expected gid or -1
332 # fs_perms_entry.walk = 'true' or something else
333 # fs_perms_entry.fmode = expected file mode or None
334 # fs_perms_entry.fuid = expected file uid or -1
335 # fs_perms_entry_fgid = expected file gid or -1
336 class fs_perms_entry():
337 def __init__(self, line):
338 lsplit = line.split()
339 if len(lsplit) == 3 and lsplit[1].lower() == "link":
340 self._setlink(lsplit[0], lsplit[2])
341 elif len(lsplit) == 8:
342 self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
343 else:
344 msg = "Fixup Perms: invalid config line %s" % line
345 oe.qa.handle_error("perm-config", msg, d)
346 self.path = None
347 self.link = None
348
349 def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
350 self.path = os.path.normpath(path)
351 self.link = None
352 self.mode = self._procmode(mode)
353 self.uid = self._procuid(uid)
354 self.gid = self._procgid(gid)
355 self.walk = walk.lower()
356 self.fmode = self._procmode(fmode)
357 self.fuid = self._procuid(fuid)
358 self.fgid = self._procgid(fgid)
359
360 def _setlink(self, path, link):
361 self.path = os.path.normpath(path)
362 self.link = link
363
364 def _procmode(self, mode):
365 if not mode or (mode and mode == "-"):
366 return None
367 else:
368 return int(mode,8)
369
370 # Note uid/gid -1 has special significance in os.lchown
371 def _procuid(self, uid):
372 if uid is None or uid == "-":
373 return -1
374 elif uid.isdigit():
375 return int(uid)
376 else:
377 return pwd.getpwnam(uid).pw_uid
378
379 def _procgid(self, gid):
380 if gid is None or gid == "-":
381 return -1
382 elif gid.isdigit():
383 return int(gid)
384 else:
385 return grp.getgrnam(gid).gr_gid
386
387 # Use for debugging the entries
388 def __str__(self):
389 if self.link:
390 return "%s link %s" % (self.path, self.link)
391 else:
392 mode = "-"
393 if self.mode:
394 mode = "0%o" % self.mode
395 fmode = "-"
396 if self.fmode:
397 fmode = "0%o" % self.fmode
398 uid = self._mapugid(self.uid)
399 gid = self._mapugid(self.gid)
400 fuid = self._mapugid(self.fuid)
401 fgid = self._mapugid(self.fgid)
402 return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
403
404 def _mapugid(self, id):
405 if id is None or id == -1:
406 return "-"
407 else:
408 return "%d" % id
409
410 # Fix the permission, owner and group of path
411 def fix_perms(path, mode, uid, gid, dir):
412 if mode and not os.path.islink(path):
413 #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
414 os.chmod(path, mode)
415 # -1 is a special value that means don't change the uid/gid
416 # if they are BOTH -1, don't bother to lchown
417 if not (uid == -1 and gid == -1):
418 #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
419 os.lchown(path, uid, gid)
420
421 # Return a list of configuration files based on either the default
422 # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
423 # paths are resolved via BBPATH
424 def get_fs_perms_list(d):
425 str = ""
426 bbpath = d.getVar('BBPATH')
427 fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES') or ""
428 for conf_file in fs_perms_tables.split():
429 confpath = bb.utils.which(bbpath, conf_file)
430 if confpath:
431 str += " %s" % bb.utils.which(bbpath, conf_file)
432 else:
433 bb.warn("cannot find %s specified in FILESYSTEM_PERMS_TABLES" % conf_file)
434 return str
435
436 fs_perms_table = {}
437 fs_link_table = {}
438
439 # By default all of the standard directories specified in
440 # bitbake.conf will get 0755 root:root.
441 target_path_vars = [ 'base_prefix',
442 'prefix',
443 'exec_prefix',
444 'base_bindir',
445 'base_sbindir',
446 'base_libdir',
447 'datadir',
448 'sysconfdir',
449 'servicedir',
450 'sharedstatedir',
451 'localstatedir',
452 'infodir',
453 'mandir',
454 'docdir',
455 'bindir',
456 'sbindir',
457 'libexecdir',
458 'libdir',
459 'includedir' ]
460
461 for path in target_path_vars:
462 dir = d.getVar(path) or ""
463 if dir == "":
464 continue
465 fs_perms_table[dir] = fs_perms_entry(d.expand("%s 0755 root root false - - -" % (dir)))
466
467 # Now we actually load from the configuration files
468 for conf in get_fs_perms_list(d).split():
469 if not os.path.exists(conf):
470 continue
471 with open(conf) as f:
472 for line in f:
473 if line.startswith('#'):
474 continue
475 lsplit = line.split()
476 if len(lsplit) == 0:
477 continue
478 if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
479 msg = "Fixup perms: %s invalid line: %s" % (conf, line)
480 oe.qa.handle_error("perm-line", msg, d)
481 continue
482 entry = fs_perms_entry(d.expand(line))
483 if entry and entry.path:
484 if entry.link:
485 fs_link_table[entry.path] = entry
486 if entry.path in fs_perms_table:
487 fs_perms_table.pop(entry.path)
488 else:
489 fs_perms_table[entry.path] = entry
490 if entry.path in fs_link_table:
491 fs_link_table.pop(entry.path)
492
493 # Debug -- list out in-memory table
494 #for dir in fs_perms_table:
495 # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
496 #for link in fs_link_table:
497 # bb.note("Fixup Perms: %s: %s" % (link, str(fs_link_table[link])))
498
499 # We process links first, so we can go back and fixup directory ownership
500 # for any newly created directories
501 # Process in sorted order so /run gets created before /run/lock, etc.
502 for entry in sorted(fs_link_table.values(), key=lambda x: x.link):
503 link = entry.link
504 dir = entry.path
505 origin = dvar + dir
506 if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
507 continue
508
509 if link[0] == "/":
510 target = dvar + link
511 ptarget = link
512 else:
513 target = os.path.join(os.path.dirname(origin), link)
514 ptarget = os.path.join(os.path.dirname(dir), link)
515 if os.path.exists(target):
516 msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
517 oe.qa.handle_error("perm-link", msg, d)
518 continue
519
520 # Create path to move directory to, move it, and then setup the symlink
521 bb.utils.mkdirhier(os.path.dirname(target))
522 #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
523 bb.utils.rename(origin, target)
524 #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
525 os.symlink(link, origin)
526
527 for dir in fs_perms_table:
528 origin = dvar + dir
529 if not (cpath.exists(origin) and cpath.isdir(origin)):
530 continue
531
532 fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
533
534 if fs_perms_table[dir].walk == 'true':
535 for root, dirs, files in os.walk(origin):
536 for dr in dirs:
537 each_dir = os.path.join(root, dr)
538 fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
539 for f in files:
540 each_file = os.path.join(root, f)
541 fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
542
543# Get a list of files from file vars by searching files under current working directory
544# The list contains symlinks, directories and normal files.
545def files_from_filevars(filevars):
546 cpath = oe.cachedpath.CachedPath()
547 files = []
548 for f in filevars:
549 if os.path.isabs(f):
550 f = '.' + f
551 if not f.startswith("./"):
552 f = './' + f
553 globbed = glob.glob(f, recursive=True)
554 if globbed:
555 if [ f ] != globbed:
556 files += globbed
557 continue
558 files.append(f)
559
560 symlink_paths = []
561 for ind, f in enumerate(files):
562 # Handle directory symlinks. Truncate path to the lowest level symlink
563 parent = ''
564 for dirname in f.split('/')[:-1]:
565 parent = os.path.join(parent, dirname)
566 if dirname == '.':
567 continue
568 if cpath.islink(parent):
569 bb.warn("FILES contains file '%s' which resides under a "
570 "directory symlink. Please fix the recipe and use the "
571 "real path for the file." % f[1:])
572 symlink_paths.append(f)
573 files[ind] = parent
574 f = parent
575 break
576
577 if not cpath.islink(f):
578 if cpath.isdir(f):
579 newfiles = [ os.path.join(f,x) for x in os.listdir(f) ]
580 if newfiles:
581 files += newfiles
582
583 return files, symlink_paths
584
585# Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files
586def get_conffiles(pkg, d):
587 pkgdest = d.getVar('PKGDEST')
588 root = os.path.join(pkgdest, pkg)
589 cwd = os.getcwd()
590 os.chdir(root)
591
592 conffiles = d.getVar('CONFFILES:%s' % pkg);
593 if conffiles == None:
594 conffiles = d.getVar('CONFFILES')
595 if conffiles == None:
596 conffiles = ""
597 conffiles = conffiles.split()
598 conf_orig_list = files_from_filevars(conffiles)[0]
599
600 # Remove links and directories from conf_orig_list to get conf_list which only contains normal files
601 conf_list = []
602 for f in conf_orig_list:
603 if os.path.isdir(f):
604 continue
605 if os.path.islink(f):
606 continue
607 if not os.path.exists(f):
608 continue
609 conf_list.append(f)
610
611 # Remove the leading './'
612 for i in range(0, len(conf_list)):
613 conf_list[i] = conf_list[i][1:]
614
615 os.chdir(cwd)
616 return sorted(conf_list)
617
618def legitimize_package_name(s):
619 """
620 Make sure package names are legitimate strings
621 """
622
623 def fixutf(m):
624 cp = m.group(1)
625 if cp:
626 return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape')
627
628 # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
629 s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s)
630
631 # Remaining package name validity fixes
632 return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
633
634def split_locales(d):
635 cpath = oe.cachedpath.CachedPath()
636 if (d.getVar('PACKAGE_NO_LOCALE') == '1'):
637 bb.debug(1, "package requested not splitting locales")
638 return
639
640 packages = (d.getVar('PACKAGES') or "").split()
641
642 dvar = d.getVar('PKGD')
643 pn = d.getVar('LOCALEBASEPN')
644
645 try:
646 locale_index = packages.index(pn + '-locale')
647 packages.pop(locale_index)
648 except ValueError:
649 locale_index = len(packages)
650
651 localepaths = []
652 locales = set()
653 for localepath in (d.getVar('LOCALE_PATHS') or "").split():
654 localedir = dvar + localepath
655 if not cpath.isdir(localedir):
656 bb.debug(1, 'No locale files in %s' % localepath)
657 continue
658
659 localepaths.append(localepath)
660 with os.scandir(localedir) as it:
661 for entry in it:
662 if entry.is_dir():
663 locales.add(entry.name)
664
665 if len(locales) == 0:
666 bb.debug(1, "No locale files in this package")
667 return
668
669 summary = d.getVar('SUMMARY') or pn
670 description = d.getVar('DESCRIPTION') or ""
671 locale_section = d.getVar('LOCALE_SECTION')
672 mlprefix = d.getVar('MLPREFIX') or ""
673 for l in sorted(locales):
674 ln = legitimize_package_name(l)
675 pkg = pn + '-locale-' + ln
676 packages.insert(locale_index, pkg)
677 locale_index += 1
678 files = []
679 for localepath in localepaths:
680 files.append(os.path.join(localepath, l))
681 d.setVar('FILES:' + pkg, " ".join(files))
682 d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
683 d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
684 d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l))
685 d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l))
686 if locale_section:
687 d.setVar('SECTION:' + pkg, locale_section)
688
689 d.setVar('PACKAGES', ' '.join(packages))
690
691 # Disabled by RP 18/06/07
692 # Wildcards aren't supported in debian
693 # They break with ipkg since glibc-locale* will mean that
694 # glibc-localedata-translit* won't install as a dependency
695 # for some other package which breaks meta-toolchain
696 # Probably breaks since virtual-locale- isn't provided anywhere
697 #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split()
698 #rdep.append('%s-locale*' % pn)
699 #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep))
700
701def package_debug_vars(d):
702 # We default to '.debug' style
703 if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory':
704 # Single debug-file-directory style debug info
705 debug_vars = {
706 "append": ".debug",
707 "staticappend": "",
708 "dir": "",
709 "staticdir": "",
710 "libdir": "/usr/lib/debug",
711 "staticlibdir": "/usr/lib/debug-static",
712 "srcdir": "/usr/src/debug",
713 }
714 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src':
715 # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
716 debug_vars = {
717 "append": "",
718 "staticappend": "",
719 "dir": "/.debug",
720 "staticdir": "/.debug-static",
721 "libdir": "",
722 "staticlibdir": "",
723 "srcdir": "",
724 }
725 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg':
726 debug_vars = {
727 "append": "",
728 "staticappend": "",
729 "dir": "/.debug",
730 "staticdir": "/.debug-static",
731 "libdir": "",
732 "staticlibdir": "",
733 "srcdir": "/usr/src/debug",
734 }
735 else:
736 # Original OE-core, a.k.a. ".debug", style debug info
737 debug_vars = {
738 "append": "",
739 "staticappend": "",
740 "dir": "/.debug",
741 "staticdir": "/.debug-static",
742 "libdir": "",
743 "staticlibdir": "",
744 "srcdir": "/usr/src/debug",
745 }
746
747 return debug_vars
748
749
750def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output):
751 debugfiles = {}
752
753 for line in dwarfsrcfiles_output.splitlines():
754 if line.startswith("\t"):
755 debugfiles[os.path.normpath(line.split()[0])] = ""
756
757 return debugfiles.keys()
758
759def source_info(file, d, fatal=True):
760 cmd = ["dwarfsrcfiles", file]
761 try:
762 output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT)
763 retval = 0
764 except subprocess.CalledProcessError as exc:
765 output = exc.output
766 retval = exc.returncode
767
768 # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure
769 if retval != 0 and retval != 255:
770 msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "")
771 if fatal:
772 bb.fatal(msg)
773 bb.note(msg)
774
775 debugsources = parse_debugsources_from_dwarfsrcfiles_output(output)
776
777 return list(debugsources)
778
779def splitdebuginfo(file, dvar, dv, d):
780 # Function to split a single file into two components, one is the stripped
781 # target system binary, the other contains any debugging information. The
782 # two files are linked to reference each other.
783 #
784 # return a mapping of files:debugsources
785
786 src = file[len(dvar):]
787 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
788 debugfile = dvar + dest
789 sources = []
790
791 if file.endswith(".ko") and file.find("/lib/modules/") != -1:
792 if oe.package.is_kernel_module_signed(file):
793 bb.debug(1, "Skip strip on signed module %s" % file)
794 return (file, sources)
795
796 # Split the file...
797 bb.utils.mkdirhier(os.path.dirname(debugfile))
798 #bb.note("Split %s -> %s" % (file, debugfile))
799 # Only store off the hard link reference if we successfully split!
800
801 dvar = d.getVar('PKGD')
802 objcopy = d.getVar("OBJCOPY")
803
804 newmode = None
805 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
806 origmode = os.stat(file)[stat.ST_MODE]
807 newmode = origmode | stat.S_IWRITE | stat.S_IREAD
808 os.chmod(file, newmode)
809
810 # We need to extract the debug src information here...
811 if dv["srcdir"]:
812 sources = source_info(file, d)
813
814 bb.utils.mkdirhier(os.path.dirname(debugfile))
815
816 subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT)
817
818 # Set the debuglink to have the view of the file path on the target
819 subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT)
820
821 if newmode:
822 os.chmod(file, origmode)
823
824 return (file, sources)
825
826def splitstaticdebuginfo(file, dvar, dv, d):
827 # Unlike the function above, there is no way to split a static library
828 # two components. So to get similar results we will copy the unmodified
829 # static library (containing the debug symbols) into a new directory.
830 # We will then strip (preserving symbols) the static library in the
831 # typical location.
832 #
833 # return a mapping of files:debugsources
834
835 src = file[len(dvar):]
836 dest = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(src) + dv["staticappend"]
837 debugfile = dvar + dest
838 sources = []
839
840 # Copy the file...
841 bb.utils.mkdirhier(os.path.dirname(debugfile))
842 #bb.note("Copy %s -> %s" % (file, debugfile))
843
844 dvar = d.getVar('PKGD')
845
846 newmode = None
847 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
848 origmode = os.stat(file)[stat.ST_MODE]
849 newmode = origmode | stat.S_IWRITE | stat.S_IREAD
850 os.chmod(file, newmode)
851
852 # We need to extract the debug src information here...
853 if dv["srcdir"]:
854 sources = source_info(file, d)
855
856 bb.utils.mkdirhier(os.path.dirname(debugfile))
857
858 # Copy the unmodified item to the debug directory
859 shutil.copy2(file, debugfile)
860
861 if newmode:
862 os.chmod(file, origmode)
863
864 return (file, sources)
865
866def inject_minidebuginfo(file, dvar, dv, d):
867 # Extract just the symbols from debuginfo into minidebuginfo,
868 # compress it with xz and inject it back into the binary in a .gnu_debugdata section.
869 # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html
870
871 readelf = d.getVar('READELF')
872 nm = d.getVar('NM')
873 objcopy = d.getVar('OBJCOPY')
874
875 minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo')
876
877 src = file[len(dvar):]
878 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
879 debugfile = dvar + dest
880 minidebugfile = minidebuginfodir + src + '.minidebug'
881 bb.utils.mkdirhier(os.path.dirname(minidebugfile))
882
883 # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either
884 # so skip it.
885 if not os.path.exists(debugfile):
886 bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file))
887 return
888
889 # minidebuginfo does not make sense to apply to ELF objects other than
890 # executables and shared libraries, skip applying the minidebuginfo
891 # generation for objects like kernel modules.
892 for line in subprocess.check_output([readelf, '-h', debugfile], universal_newlines=True).splitlines():
893 if not line.strip().startswith("Type:"):
894 continue
895 elftype = line.split(":")[1].strip()
896 if not any(elftype.startswith(i) for i in ["EXEC", "DYN"]):
897 bb.debug(1, 'ELF file {} is not executable/shared, skipping minidebuginfo injection'.format(file))
898 return
899 break
900
901 # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo.
902 # We will exclude all of these from minidebuginfo to save space.
903 remove_section_names = []
904 for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines():
905 # strip the leading " [ 1]" section index to allow splitting on space
906 if ']' not in line:
907 continue
908 fields = line[line.index(']') + 1:].split()
909 if len(fields) < 7:
910 continue
911 name = fields[0]
912 type = fields[1]
913 flags = fields[6]
914 # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them
915 if name.startswith('.debug_'):
916 continue
917 if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']:
918 remove_section_names.append(name)
919
920 # List dynamic symbols in the binary. We can exclude these from minidebuginfo
921 # because they are always present in the binary.
922 dynsyms = set()
923 for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines():
924 dynsyms.add(line.split()[0])
925
926 # Find all function symbols from debuginfo which aren't in the dynamic symbols table.
927 # These are the ones we want to keep in minidebuginfo.
928 keep_symbols_file = minidebugfile + '.symlist'
929 found_any_symbols = False
930 with open(keep_symbols_file, 'w') as f:
931 for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines():
932 fields = line.split('|')
933 if len(fields) < 7:
934 continue
935 name = fields[0].strip()
936 type = fields[3].strip()
937 if type == 'FUNC' and name not in dynsyms:
938 f.write('{}\n'.format(name))
939 found_any_symbols = True
940
941 if not found_any_symbols:
942 bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file))
943 return
944
945 bb.utils.remove(minidebugfile)
946 bb.utils.remove(minidebugfile + '.xz')
947
948 subprocess.check_call([objcopy, '-S'] +
949 ['--remove-section={}'.format(s) for s in remove_section_names] +
950 ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile])
951
952 subprocess.check_call(['xz', '--keep', minidebugfile])
953
954 subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file])
955
956def copydebugsources(debugsrcdir, sources, d):
957 # The debug src information written out to sourcefile is further processed
958 # and copied to the destination here.
959
960 cpath = oe.cachedpath.CachedPath()
961
962 if debugsrcdir and sources:
963 sourcefile = d.expand("${WORKDIR}/debugsources.list")
964 bb.utils.remove(sourcefile)
965
966 # filenames are null-separated - this is an artefact of the previous use
967 # of rpm's debugedit, which was writing them out that way, and the code elsewhere
968 # is still assuming that.
969 debuglistoutput = '\0'.join(sources) + '\0'
970 with open(sourcefile, 'a') as sf:
971 sf.write(debuglistoutput)
972
973 dvar = d.getVar('PKGD')
974 strip = d.getVar("STRIP")
975 objcopy = d.getVar("OBJCOPY")
976 workdir = d.getVar("WORKDIR")
977 sdir = d.getVar("S")
978 cflags = d.expand("${CFLAGS}")
979
980 prefixmap = {}
981 for flag in cflags.split():
982 if not flag.startswith("-fdebug-prefix-map"):
983 continue
984 if "recipe-sysroot" in flag:
985 continue
986 flag = flag.split("=")
987 prefixmap[flag[1]] = flag[2]
988
989 nosuchdir = []
990 basepath = dvar
991 for p in debugsrcdir.split("/"):
992 basepath = basepath + "/" + p
993 if not cpath.exists(basepath):
994 nosuchdir.append(basepath)
995 bb.utils.mkdirhier(basepath)
996 cpath.updatecache(basepath)
997
998 for pmap in prefixmap:
999 # Ignore files from the recipe sysroots (target and native)
1000 cmd = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | " % sourcefile
1001 # We need to ignore files that are not actually ours
1002 # we do this by only paying attention to items from this package
1003 cmd += "fgrep -zw '%s' | " % prefixmap[pmap]
1004 # Remove prefix in the source paths
1005 cmd += "sed 's#%s/##g' | " % (prefixmap[pmap])
1006 cmd += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)" % (pmap, dvar, prefixmap[pmap])
1007
1008 try:
1009 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
1010 except subprocess.CalledProcessError:
1011 # Can "fail" if internal headers/transient sources are attempted
1012 pass
1013 # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
1014 # Work around this by manually finding and copying any symbolic links that made it through.
1015 cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \
1016 (dvar, prefixmap[pmap], dvar, prefixmap[pmap], pmap, dvar, prefixmap[pmap])
1017 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
1018
1019 # debugsources.list may be polluted from the host if we used externalsrc,
1020 # cpio uses copy-pass and may have just created a directory structure
1021 # matching the one from the host, if thats the case move those files to
1022 # debugsrcdir to avoid host contamination.
1023 # Empty dir structure will be deleted in the next step.
1024
1025 # Same check as above for externalsrc
1026 if workdir not in sdir:
1027 if os.path.exists(dvar + debugsrcdir + sdir):
1028 cmd = "mv %s%s%s/* %s%s" % (dvar, debugsrcdir, sdir, dvar,debugsrcdir)
1029 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
1030
1031 # The copy by cpio may have resulted in some empty directories! Remove these
1032 cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
1033 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
1034
1035 # Also remove debugsrcdir if its empty
1036 for p in nosuchdir[::-1]:
1037 if os.path.exists(p) and not os.listdir(p):
1038 os.rmdir(p)
1039
1040
1041def process_split_and_strip_files(d):
1042 cpath = oe.cachedpath.CachedPath()
1043
1044 dvar = d.getVar('PKGD')
1045 pn = d.getVar('PN')
1046 hostos = d.getVar('HOST_OS')
1047
1048 oldcwd = os.getcwd()
1049 os.chdir(dvar)
1050
1051 dv = package_debug_vars(d)
1052
1053 #
1054 # First lets figure out all of the files we may have to process ... do this only once!
1055 #
1056 elffiles = {}
1057 symlinks = {}
1058 staticlibs = []
1059 inodes = {}
1060 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
1061 baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
1062 skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split()
1063 if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \
1064 d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
1065 checkelf = {}
1066 checkelflinks = {}
1067 for root, dirs, files in cpath.walk(dvar):
1068 for f in files:
1069 file = os.path.join(root, f)
1070
1071 # Skip debug files
1072 if dv["append"] and file.endswith(dv["append"]):
1073 continue
1074 if dv["dir"] and dv["dir"] in os.path.dirname(file[len(dvar):]):
1075 continue
1076
1077 if file in skipfiles:
1078 continue
1079
1080 if oe.package.is_static_lib(file):
1081 staticlibs.append(file)
1082 continue
1083
1084 try:
1085 ltarget = cpath.realpath(file, dvar, False)
1086 s = cpath.lstat(ltarget)
1087 except OSError as e:
1088 (err, strerror) = e.args
1089 if err != errno.ENOENT:
1090 raise
1091 # Skip broken symlinks
1092 continue
1093 if not s:
1094 continue
1095 # Check its an executable
1096 if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \
1097 or (s[stat.ST_MODE] & stat.S_IXOTH) \
1098 or ((file.startswith(libdir) or file.startswith(baselibdir)) \
1099 and (".so" in f or ".node" in f)) \
1100 or (f.startswith('vmlinux') or ".ko" in f):
1101
1102 if cpath.islink(file):
1103 checkelflinks[file] = ltarget
1104 continue
1105 # Use a reference of device ID and inode number to identify files
1106 file_reference = "%d_%d" % (s.st_dev, s.st_ino)
1107 checkelf[file] = (file, file_reference)
1108
1109 results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d)
1110 results_map = {}
1111 for (ltarget, elf_file) in results:
1112 results_map[ltarget] = elf_file
1113 for file in checkelflinks:
1114 ltarget = checkelflinks[file]
1115 # If it's a symlink, and points to an ELF file, we capture the readlink target
1116 if results_map[ltarget]:
1117 target = os.readlink(file)
1118 #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget]))
1119 symlinks[file] = target
1120
1121 results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d)
1122
1123 # Sort results by file path. This ensures that the files are always
1124 # processed in the same order, which is important to make sure builds
1125 # are reproducible when dealing with hardlinks
1126 results.sort(key=lambda x: x[0])
1127
1128 for (file, elf_file) in results:
1129 # It's a file (or hardlink), not a link
1130 # ...but is it ELF, and is it already stripped?
1131 if elf_file & 1:
1132 if elf_file & 2:
1133 if 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split():
1134 bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
1135 else:
1136 msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
1137 oe.qa.handle_error("already-stripped", msg, d)
1138 continue
1139
1140 # At this point we have an unstripped elf file. We need to:
1141 # a) Make sure any file we strip is not hardlinked to anything else outside this tree
1142 # b) Only strip any hardlinked file once (no races)
1143 # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks
1144
1145 # Use a reference of device ID and inode number to identify files
1146 file_reference = checkelf[file][1]
1147 if file_reference in inodes:
1148 os.unlink(file)
1149 os.link(inodes[file_reference][0], file)
1150 inodes[file_reference].append(file)
1151 else:
1152 inodes[file_reference] = [file]
1153 # break hardlink
1154 bb.utils.break_hardlinks(file)
1155 elffiles[file] = elf_file
1156 # Modified the file so clear the cache
1157 cpath.updatecache(file)
1158
1159 def strip_pkgd_prefix(f):
1160 nonlocal dvar
1161
1162 if f.startswith(dvar):
1163 return f[len(dvar):]
1164
1165 return f
1166
1167 #
1168 # First lets process debug splitting
1169 #
1170 if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
1171 results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, dv, d))
1172
1173 if dv["srcdir"] and not hostos.startswith("mingw"):
1174 if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1175 results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, dv, d))
1176 else:
1177 for file in staticlibs:
1178 results.append( (file,source_info(file, d)) )
1179
1180 d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results})
1181
1182 sources = set()
1183 for r in results:
1184 sources.update(r[1])
1185
1186 # Hardlink our debug symbols to the other hardlink copies
1187 for ref in inodes:
1188 if len(inodes[ref]) == 1:
1189 continue
1190
1191 target = inodes[ref][0][len(dvar):]
1192 for file in inodes[ref][1:]:
1193 src = file[len(dvar):]
1194 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
1195 fpath = dvar + dest
1196 ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
1197 bb.utils.mkdirhier(os.path.dirname(fpath))
1198 # Only one hardlink of separated debug info file in each directory
1199 if not os.access(fpath, os.R_OK):
1200 #bb.note("Link %s -> %s" % (fpath, ftarget))
1201 os.link(ftarget, fpath)
1202
1203 # Create symlinks for all cases we were able to split symbols
1204 for file in symlinks:
1205 src = file[len(dvar):]
1206 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
1207 fpath = dvar + dest
1208 # Skip it if the target doesn't exist
1209 try:
1210 s = os.stat(fpath)
1211 except OSError as e:
1212 (err, strerror) = e.args
1213 if err != errno.ENOENT:
1214 raise
1215 continue
1216
1217 ltarget = symlinks[file]
1218 lpath = os.path.dirname(ltarget)
1219 lbase = os.path.basename(ltarget)
1220 ftarget = ""
1221 if lpath and lpath != ".":
1222 ftarget += lpath + dv["dir"] + "/"
1223 ftarget += lbase + dv["append"]
1224 if lpath.startswith(".."):
1225 ftarget = os.path.join("..", ftarget)
1226 bb.utils.mkdirhier(os.path.dirname(fpath))
1227 #bb.note("Symlink %s -> %s" % (fpath, ftarget))
1228 os.symlink(ftarget, fpath)
1229
1230 # Process the dv["srcdir"] if requested...
1231 # This copies and places the referenced sources for later debugging...
1232 copydebugsources(dv["srcdir"], sources, d)
1233 #
1234 # End of debug splitting
1235 #
1236
1237 #
1238 # Now lets go back over things and strip them
1239 #
1240 if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'):
1241 strip = d.getVar("STRIP")
1242 sfiles = []
1243 for file in elffiles:
1244 elf_file = int(elffiles[file])
1245 #bb.note("Strip %s" % file)
1246 sfiles.append((file, elf_file, strip))
1247 if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1248 for f in staticlibs:
1249 sfiles.append((f, 16, strip))
1250
1251 oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d)
1252
1253 # Build "minidebuginfo" and reinject it back into the stripped binaries
1254 if bb.utils.contains('DISTRO_FEATURES', 'minidebuginfo', True, False, d):
1255 oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d,
1256 extraargs=(dvar, dv, d))
1257
1258 #
1259 # End of strip
1260 #
1261 os.chdir(oldcwd)
1262
1263
1264def populate_packages(d):
1265 cpath = oe.cachedpath.CachedPath()
1266
1267 workdir = d.getVar('WORKDIR')
1268 outdir = d.getVar('DEPLOY_DIR')
1269 dvar = d.getVar('PKGD')
1270 packages = d.getVar('PACKAGES').split()
1271 pn = d.getVar('PN')
1272
1273 bb.utils.mkdirhier(outdir)
1274 os.chdir(dvar)
1275
1276 autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False)
1277
1278 split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg')
1279
1280 # If debug-with-srcpkg mode is enabled then add the source package if it
1281 # doesn't exist and add the source file contents to the source package.
1282 if split_source_package:
1283 src_package_name = ('%s-src' % d.getVar('PN'))
1284 if not src_package_name in packages:
1285 packages.append(src_package_name)
1286 d.setVar('FILES:%s' % src_package_name, '/usr/src/debug')
1287
1288 # Sanity check PACKAGES for duplicates
1289 # Sanity should be moved to sanity.bbclass once we have the infrastructure
1290 package_dict = {}
1291
1292 for i, pkg in enumerate(packages):
1293 if pkg in package_dict:
1294 msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
1295 oe.qa.handle_error("packages-list", msg, d)
1296 # Ensure the source package gets the chance to pick up the source files
1297 # before the debug package by ordering it first in PACKAGES. Whether it
1298 # actually picks up any source files is controlled by
1299 # PACKAGE_DEBUG_SPLIT_STYLE.
1300 elif pkg.endswith("-src"):
1301 package_dict[pkg] = (10, i)
1302 elif autodebug and pkg.endswith("-dbg"):
1303 package_dict[pkg] = (30, i)
1304 else:
1305 package_dict[pkg] = (50, i)
1306 packages = sorted(package_dict.keys(), key=package_dict.get)
1307 d.setVar('PACKAGES', ' '.join(packages))
1308 pkgdest = d.getVar('PKGDEST')
1309
1310 seen = []
1311
1312 # os.mkdir masks the permissions with umask so we have to unset it first
1313 oldumask = os.umask(0)
1314
1315 debug = []
1316 for root, dirs, files in cpath.walk(dvar):
1317 dir = root[len(dvar):]
1318 if not dir:
1319 dir = os.sep
1320 for f in (files + dirs):
1321 path = "." + os.path.join(dir, f)
1322 if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"):
1323 debug.append(path)
1324
1325 for pkg in packages:
1326 root = os.path.join(pkgdest, pkg)
1327 bb.utils.mkdirhier(root)
1328
1329 filesvar = d.getVar('FILES:%s' % pkg) or ""
1330 if "//" in filesvar:
1331 msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
1332 oe.qa.handle_error("files-invalid", msg, d)
1333 filesvar.replace("//", "/")
1334
1335 origfiles = filesvar.split()
1336 files, symlink_paths = oe.package.files_from_filevars(origfiles)
1337
1338 if autodebug and pkg.endswith("-dbg"):
1339 files.extend(debug)
1340
1341 for file in files:
1342 if (not cpath.islink(file)) and (not cpath.exists(file)):
1343 continue
1344 if file in seen:
1345 continue
1346 seen.append(file)
1347
1348 def mkdir(src, dest, p):
1349 src = os.path.join(src, p)
1350 dest = os.path.join(dest, p)
1351 fstat = cpath.stat(src)
1352 os.mkdir(dest)
1353 os.chmod(dest, fstat.st_mode)
1354 os.chown(dest, fstat.st_uid, fstat.st_gid)
1355 if p not in seen:
1356 seen.append(p)
1357 cpath.updatecache(dest)
1358
1359 def mkdir_recurse(src, dest, paths):
1360 if cpath.exists(dest + '/' + paths):
1361 return
1362 while paths.startswith("./"):
1363 paths = paths[2:]
1364 p = "."
1365 for c in paths.split("/"):
1366 p = os.path.join(p, c)
1367 if not cpath.exists(os.path.join(dest, p)):
1368 mkdir(src, dest, p)
1369
1370 if cpath.isdir(file) and not cpath.islink(file):
1371 mkdir_recurse(dvar, root, file)
1372 continue
1373
1374 mkdir_recurse(dvar, root, os.path.dirname(file))
1375 fpath = os.path.join(root,file)
1376 if not cpath.islink(file):
1377 os.link(file, fpath)
1378 continue
1379 ret = bb.utils.copyfile(file, fpath)
1380 if ret is False or ret == 0:
1381 bb.fatal("File population failed")
1382
1383 # Check if symlink paths exist
1384 for file in symlink_paths:
1385 if not os.path.exists(os.path.join(root,file)):
1386 bb.fatal("File '%s' cannot be packaged into '%s' because its "
1387 "parent directory structure does not exist. One of "
1388 "its parent directories is a symlink whose target "
1389 "directory is not included in the package." %
1390 (file, pkg))
1391
1392 os.umask(oldumask)
1393 os.chdir(workdir)
1394
1395 # Handle excluding packages with incompatible licenses
1396 package_list = []
1397 for pkg in packages:
1398 licenses = d.getVar('_exclude_incompatible-' + pkg)
1399 if licenses:
1400 msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses)
1401 oe.qa.handle_error("incompatible-license", msg, d)
1402 else:
1403 package_list.append(pkg)
1404 d.setVar('PACKAGES', ' '.join(package_list))
1405
1406 unshipped = []
1407 for root, dirs, files in cpath.walk(dvar):
1408 dir = root[len(dvar):]
1409 if not dir:
1410 dir = os.sep
1411 for f in (files + dirs):
1412 path = os.path.join(dir, f)
1413 if ('.' + path) not in seen:
1414 unshipped.append(path)
1415
1416 if unshipped != []:
1417 msg = pn + ": Files/directories were installed but not shipped in any package:"
1418 if "installed-vs-shipped" in (d.getVar('INSANE_SKIP:' + pn) or "").split():
1419 bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
1420 else:
1421 for f in unshipped:
1422 msg = msg + "\n " + f
1423 msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n"
1424 msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped))
1425 oe.qa.handle_error("installed-vs-shipped", msg, d)
1426
1427def process_fixsymlinks(pkgfiles, d):
1428 cpath = oe.cachedpath.CachedPath()
1429 pkgdest = d.getVar('PKGDEST')
1430 packages = d.getVar("PACKAGES", False).split()
1431
1432 dangling_links = {}
1433 pkg_files = {}
1434 for pkg in packages:
1435 dangling_links[pkg] = []
1436 pkg_files[pkg] = []
1437 inst_root = os.path.join(pkgdest, pkg)
1438 for path in pkgfiles[pkg]:
1439 rpath = path[len(inst_root):]
1440 pkg_files[pkg].append(rpath)
1441 rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
1442 if not cpath.lexists(rtarget):
1443 dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
1444
1445 newrdepends = {}
1446 for pkg in dangling_links:
1447 for l in dangling_links[pkg]:
1448 found = False
1449 bb.debug(1, "%s contains dangling link %s" % (pkg, l))
1450 for p in packages:
1451 if l in pkg_files[p]:
1452 found = True
1453 bb.debug(1, "target found in %s" % p)
1454 if p == pkg:
1455 break
1456 if pkg not in newrdepends:
1457 newrdepends[pkg] = []
1458 newrdepends[pkg].append(p)
1459 break
1460 if found == False:
1461 bb.note("%s contains dangling symlink to %s" % (pkg, l))
1462
1463 for pkg in newrdepends:
1464 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
1465 for p in newrdepends[pkg]:
1466 if p not in rdepends:
1467 rdepends[p] = []
1468 d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1469
1470def process_filedeps(pkgfiles, d):
1471 """
1472 Collect perfile run-time dependency metadata
1473 Output:
1474 FILERPROVIDESFLIST:pkg - list of all files w/ deps
1475 FILERPROVIDES:filepath:pkg - per file dep
1476
1477 FILERDEPENDSFLIST:pkg - list of all files w/ deps
1478 FILERDEPENDS:filepath:pkg - per file dep
1479 """
1480 if d.getVar('SKIP_FILEDEPS') == '1':
1481 return
1482
1483 pkgdest = d.getVar('PKGDEST')
1484 packages = d.getVar('PACKAGES')
1485 rpmdeps = d.getVar('RPMDEPS')
1486
1487 def chunks(files, n):
1488 return [files[i:i+n] for i in range(0, len(files), n)]
1489
1490 pkglist = []
1491 for pkg in packages.split():
1492 if d.getVar('SKIP_FILEDEPS:' + pkg) == '1':
1493 continue
1494 if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'):
1495 continue
1496 for files in chunks(pkgfiles[pkg], 100):
1497 pkglist.append((pkg, files, rpmdeps, pkgdest))
1498
1499 processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d)
1500
1501 provides_files = {}
1502 requires_files = {}
1503
1504 for result in processed:
1505 (pkg, provides, requires) = result
1506
1507 if pkg not in provides_files:
1508 provides_files[pkg] = []
1509 if pkg not in requires_files:
1510 requires_files[pkg] = []
1511
1512 for file in sorted(provides):
1513 provides_files[pkg].append(file)
1514 key = "FILERPROVIDES:" + file + ":" + pkg
1515 d.appendVar(key, " " + " ".join(provides[file]))
1516
1517 for file in sorted(requires):
1518 requires_files[pkg].append(file)
1519 key = "FILERDEPENDS:" + file + ":" + pkg
1520 d.appendVar(key, " " + " ".join(requires[file]))
1521
1522 for pkg in requires_files:
1523 d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg])))
1524 for pkg in provides_files:
1525 d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg])))
1526
1527def process_shlibs(pkgfiles, d):
1528 cpath = oe.cachedpath.CachedPath()
1529
1530 exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False)
1531 if exclude_shlibs:
1532 bb.note("not generating shlibs")
1533 return
1534
1535 lib_re = re.compile(r"^.*\.so")
1536 libdir_re = re.compile(r".*/%s$" % d.getVar('baselib'))
1537
1538 packages = d.getVar('PACKAGES')
1539
1540 shlib_pkgs = []
1541 exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS")
1542 if exclusion_list:
1543 for pkg in packages.split():
1544 if pkg not in exclusion_list.split():
1545 shlib_pkgs.append(pkg)
1546 else:
1547 bb.note("not generating shlibs for %s" % pkg)
1548 else:
1549 shlib_pkgs = packages.split()
1550
1551 hostos = d.getVar('HOST_OS')
1552
1553 workdir = d.getVar('WORKDIR')
1554
1555 ver = d.getVar('PKGV')
1556 if not ver:
1557 msg = "PKGV not defined"
1558 oe.qa.handle_error("pkgv-undefined", msg, d)
1559 return
1560
1561 pkgdest = d.getVar('PKGDEST')
1562
1563 shlibswork_dir = d.getVar('SHLIBSWORKDIR')
1564
1565 def linux_so(file, pkg, pkgver, d):
1566 needs_ldconfig = False
1567 needed = set()
1568 sonames = set()
1569 renames = []
1570 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1571 cmd = d.getVar('OBJDUMP') + " -p " + shlex.quote(file) + " 2>/dev/null"
1572 fd = os.popen(cmd)
1573 lines = fd.readlines()
1574 fd.close()
1575 rpath = tuple()
1576 for l in lines:
1577 m = re.match(r"\s+RPATH\s+([^\s]*)", l)
1578 if m:
1579 rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
1580 rpath = tuple(map(os.path.normpath, rpaths))
1581 for l in lines:
1582 m = re.match(r"\s+NEEDED\s+([^\s]*)", l)
1583 if m:
1584 dep = m.group(1)
1585 if dep not in needed:
1586 needed.add((dep, file, rpath))
1587 m = re.match(r"\s+SONAME\s+([^\s]*)", l)
1588 if m:
1589 this_soname = m.group(1)
1590 prov = (this_soname, ldir, pkgver)
1591 if not prov in sonames:
1592 # if library is private (only used by package) then do not build shlib for it
1593 if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0:
1594 sonames.add(prov)
1595 if libdir_re.match(os.path.dirname(file)):
1596 needs_ldconfig = True
1597 if needs_ldconfig and snap_symlinks and (os.path.basename(file) != this_soname):
1598 renames.append((file, os.path.join(os.path.dirname(file), this_soname)))
1599 return (needs_ldconfig, needed, sonames, renames)
1600
1601 def darwin_so(file, needed, sonames, renames, pkgver):
1602 if not os.path.exists(file):
1603 return
1604 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1605
1606 def get_combinations(base):
1607 #
1608 # Given a base library name, find all combinations of this split by "." and "-"
1609 #
1610 combos = []
1611 options = base.split(".")
1612 for i in range(1, len(options) + 1):
1613 combos.append(".".join(options[0:i]))
1614 options = base.split("-")
1615 for i in range(1, len(options) + 1):
1616 combos.append("-".join(options[0:i]))
1617 return combos
1618
1619 if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'):
1620 # Drop suffix
1621 name = os.path.basename(file).rsplit(".",1)[0]
1622 # Find all combinations
1623 combos = get_combinations(name)
1624 for combo in combos:
1625 if not combo in sonames:
1626 prov = (combo, ldir, pkgver)
1627 sonames.add(prov)
1628 if file.endswith('.dylib') or file.endswith('.so'):
1629 rpath = []
1630 p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
1631 out, err = p.communicate()
1632 # If returned successfully, process stdout for results
1633 if p.returncode == 0:
1634 for l in out.split("\n"):
1635 l = l.strip()
1636 if l.startswith('path '):
1637 rpath.append(l.split()[1])
1638
1639 p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
1640 out, err = p.communicate()
1641 # If returned successfully, process stdout for results
1642 if p.returncode == 0:
1643 for l in out.split("\n"):
1644 l = l.strip()
1645 if not l or l.endswith(":"):
1646 continue
1647 if "is not an object file" in l:
1648 continue
1649 name = os.path.basename(l.split()[0]).rsplit(".", 1)[0]
1650 if name and name not in needed[pkg]:
1651 needed[pkg].add((name, file, tuple()))
1652
1653 def mingw_dll(file, needed, sonames, renames, pkgver):
1654 if not os.path.exists(file):
1655 return
1656
1657 if file.endswith(".dll"):
1658 # assume all dlls are shared objects provided by the package
1659 sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver))
1660
1661 if (file.endswith(".dll") or file.endswith(".exe")):
1662 # use objdump to search for "DLL Name: .*\.dll"
1663 p = subprocess.Popen([d.expand("${OBJDUMP}"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1664 out, err = p.communicate()
1665 # process the output, grabbing all .dll names
1666 if p.returncode == 0:
1667 for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE):
1668 dllname = m.group(1)
1669 if dllname:
1670 needed[pkg].add((dllname, file, tuple()))
1671
1672 if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1":
1673 snap_symlinks = True
1674 else:
1675 snap_symlinks = False
1676
1677 needed = {}
1678
1679 shlib_provider = oe.package.read_shlib_providers(d)
1680
1681 for pkg in shlib_pkgs:
1682 private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
1683 private_libs = private_libs.split()
1684 needs_ldconfig = False
1685 bb.debug(2, "calculating shlib provides for %s" % pkg)
1686
1687 pkgver = d.getVar('PKGV:' + pkg)
1688 if not pkgver:
1689 pkgver = d.getVar('PV_' + pkg)
1690 if not pkgver:
1691 pkgver = ver
1692
1693 needed[pkg] = set()
1694 sonames = set()
1695 renames = []
1696 linuxlist = []
1697 for file in pkgfiles[pkg]:
1698 soname = None
1699 if cpath.islink(file):
1700 continue
1701 if hostos.startswith("darwin"):
1702 darwin_so(file, needed, sonames, renames, pkgver)
1703 elif hostos.startswith("mingw"):
1704 mingw_dll(file, needed, sonames, renames, pkgver)
1705 elif os.access(file, os.X_OK) or lib_re.match(file):
1706 linuxlist.append(file)
1707
1708 if linuxlist:
1709 results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d))
1710 for r in results:
1711 ldconfig = r[0]
1712 needed[pkg] |= r[1]
1713 sonames |= r[2]
1714 renames.extend(r[3])
1715 needs_ldconfig = needs_ldconfig or ldconfig
1716
1717 for (old, new) in renames:
1718 bb.note("Renaming %s to %s" % (old, new))
1719 bb.utils.rename(old, new)
1720 pkgfiles[pkg].remove(old)
1721
1722 shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
1723 if len(sonames):
1724 with open(shlibs_file, 'w') as fd:
1725 for s in sorted(sonames):
1726 if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]:
1727 (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]]
1728 if old_pkg != pkg:
1729 bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver))
1730 bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0]))
1731 fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n')
1732 if s[0] not in shlib_provider:
1733 shlib_provider[s[0]] = {}
1734 shlib_provider[s[0]][s[1]] = (pkg, pkgver)
1735 if needs_ldconfig:
1736 bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
1737 postinst = d.getVar('pkg_postinst:%s' % pkg)
1738 if not postinst:
1739 postinst = '#!/bin/sh\n'
1740 postinst += d.getVar('ldconfig_postinst_fragment')
1741 d.setVar('pkg_postinst:%s' % pkg, postinst)
1742 bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
1743
1744 assumed_libs = d.getVar('ASSUME_SHLIBS')
1745 if assumed_libs:
1746 libdir = d.getVar("libdir")
1747 for e in assumed_libs.split():
1748 l, dep_pkg = e.split(":")
1749 lib_ver = None
1750 dep_pkg = dep_pkg.rsplit("_", 1)
1751 if len(dep_pkg) == 2:
1752 lib_ver = dep_pkg[1]
1753 dep_pkg = dep_pkg[0]
1754 if l not in shlib_provider:
1755 shlib_provider[l] = {}
1756 shlib_provider[l][libdir] = (dep_pkg, lib_ver)
1757
1758 libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')]
1759
1760 for pkg in shlib_pkgs:
1761 bb.debug(2, "calculating shlib requirements for %s" % pkg)
1762
1763 private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
1764 private_libs = private_libs.split()
1765
1766 deps = list()
1767 for n in needed[pkg]:
1768 # if n is in private libraries, don't try to search provider for it
1769 # this could cause problem in case some abc.bb provides private
1770 # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
1771 # but skipping it is still better alternative than providing own
1772 # version and then adding runtime dependency for the same system library
1773 if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0:
1774 bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
1775 continue
1776 if n[0] in shlib_provider.keys():
1777 shlib_provider_map = shlib_provider[n[0]]
1778 matches = set()
1779 for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath):
1780 if p in shlib_provider_map:
1781 matches.add(p)
1782 if len(matches) > 1:
1783 matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches])
1784 bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1]))
1785 elif len(matches) == 1:
1786 (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()]
1787
1788 bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1]))
1789
1790 if dep_pkg == pkg:
1791 continue
1792
1793 if ver_needed:
1794 dep = "%s (>= %s)" % (dep_pkg, ver_needed)
1795 else:
1796 dep = dep_pkg
1797 if not dep in deps:
1798 deps.append(dep)
1799 continue
1800 bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1]))
1801
1802 deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
1803 if os.path.exists(deps_file):
1804 os.remove(deps_file)
1805 if deps:
1806 with open(deps_file, 'w') as fd:
1807 for dep in sorted(deps):
1808 fd.write(dep + '\n')
1809
1810def process_pkgconfig(pkgfiles, d):
1811 packages = d.getVar('PACKAGES')
1812 workdir = d.getVar('WORKDIR')
1813 pkgdest = d.getVar('PKGDEST')
1814
1815 shlibs_dirs = d.getVar('SHLIBSDIRS').split()
1816 shlibswork_dir = d.getVar('SHLIBSWORKDIR')
1817
1818 pc_re = re.compile(r'(.*)\.pc$')
1819 var_re = re.compile(r'(.*)=(.*)')
1820 field_re = re.compile(r'(.*): (.*)')
1821
1822 pkgconfig_provided = {}
1823 pkgconfig_needed = {}
1824 for pkg in packages.split():
1825 pkgconfig_provided[pkg] = []
1826 pkgconfig_needed[pkg] = []
1827 for file in sorted(pkgfiles[pkg]):
1828 m = pc_re.match(file)
1829 if m:
1830 pd = bb.data.init()
1831 name = m.group(1)
1832 pkgconfig_provided[pkg].append(os.path.basename(name))
1833 if not os.access(file, os.R_OK):
1834 continue
1835 with open(file, 'r') as f:
1836 lines = f.readlines()
1837 for l in lines:
1838 m = field_re.match(l)
1839 if m:
1840 hdr = m.group(1)
1841 exp = pd.expand(m.group(2))
1842 if hdr == 'Requires':
1843 pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
1844 continue
1845 m = var_re.match(l)
1846 if m:
1847 name = m.group(1)
1848 val = m.group(2)
1849 pd.setVar(name, pd.expand(val))
1850
1851 for pkg in packages.split():
1852 pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
1853 if pkgconfig_provided[pkg] != []:
1854 with open(pkgs_file, 'w') as f:
1855 for p in sorted(pkgconfig_provided[pkg]):
1856 f.write('%s\n' % p)
1857
1858 # Go from least to most specific since the last one found wins
1859 for dir in reversed(shlibs_dirs):
1860 if not os.path.exists(dir):
1861 continue
1862 for file in sorted(os.listdir(dir)):
1863 m = re.match(r'^(.*)\.pclist$', file)
1864 if m:
1865 pkg = m.group(1)
1866 with open(os.path.join(dir, file)) as fd:
1867 lines = fd.readlines()
1868 pkgconfig_provided[pkg] = []
1869 for l in lines:
1870 pkgconfig_provided[pkg].append(l.rstrip())
1871
1872 for pkg in packages.split():
1873 deps = []
1874 for n in pkgconfig_needed[pkg]:
1875 found = False
1876 for k in pkgconfig_provided.keys():
1877 if n in pkgconfig_provided[k]:
1878 if k != pkg and not (k in deps):
1879 deps.append(k)
1880 found = True
1881 if found == False:
1882 bb.note("couldn't find pkgconfig module '%s' in any package" % n)
1883 deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
1884 if len(deps):
1885 with open(deps_file, 'w') as fd:
1886 for dep in deps:
1887 fd.write(dep + '\n')
1888
1889def read_libdep_files(d):
1890 pkglibdeps = {}
1891 packages = d.getVar('PACKAGES').split()
1892 for pkg in packages:
1893 pkglibdeps[pkg] = {}
1894 for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
1895 depsfile = d.expand("${PKGDEST}/" + pkg + extension)
1896 if os.access(depsfile, os.R_OK):
1897 with open(depsfile) as fd:
1898 lines = fd.readlines()
1899 for l in lines:
1900 l.rstrip()
1901 deps = bb.utils.explode_dep_versions2(l)
1902 for dep in deps:
1903 if not dep in pkglibdeps[pkg]:
1904 pkglibdeps[pkg][dep] = deps[dep]
1905 return pkglibdeps
1906
1907def process_depchains(pkgfiles, d):
1908 """
1909 For a given set of prefix and postfix modifiers, make those packages
1910 RRECOMMENDS on the corresponding packages for its RDEPENDS.
1911
1912 Example: If package A depends upon package B, and A's .bb emits an
1913 A-dev package, this would make A-dev Recommends: B-dev.
1914
1915 If only one of a given suffix is specified, it will take the RRECOMMENDS
1916 based on the RDEPENDS of *all* other packages. If more than one of a given
1917 suffix is specified, its will only use the RDEPENDS of the single parent
1918 package.
1919 """
1920
1921 packages = d.getVar('PACKAGES')
1922 postfixes = (d.getVar('DEPCHAIN_POST') or '').split()
1923 prefixes = (d.getVar('DEPCHAIN_PRE') or '').split()
1924
1925 def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
1926
1927 #bb.note('depends for %s is %s' % (base, depends))
1928 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
1929
1930 for depend in sorted(depends):
1931 if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
1932 #bb.note("Skipping %s" % depend)
1933 continue
1934 if depend.endswith('-dev'):
1935 depend = depend[:-4]
1936 if depend.endswith('-dbg'):
1937 depend = depend[:-4]
1938 pkgname = getname(depend, suffix)
1939 #bb.note("Adding %s for %s" % (pkgname, depend))
1940 if pkgname not in rreclist and pkgname != pkg:
1941 rreclist[pkgname] = []
1942
1943 #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
1944 d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
1945
1946 def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
1947
1948 #bb.note('rdepends for %s is %s' % (base, rdepends))
1949 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
1950
1951 for depend in sorted(rdepends):
1952 if depend.find('virtual-locale-') != -1:
1953 #bb.note("Skipping %s" % depend)
1954 continue
1955 if depend.endswith('-dev'):
1956 depend = depend[:-4]
1957 if depend.endswith('-dbg'):
1958 depend = depend[:-4]
1959 pkgname = getname(depend, suffix)
1960 #bb.note("Adding %s for %s" % (pkgname, depend))
1961 if pkgname not in rreclist and pkgname != pkg:
1962 rreclist[pkgname] = []
1963
1964 #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
1965 d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
1966
1967 def add_dep(list, dep):
1968 if dep not in list:
1969 list.append(dep)
1970
1971 depends = []
1972 for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""):
1973 add_dep(depends, dep)
1974
1975 rdepends = []
1976 for pkg in packages.split():
1977 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + pkg) or ""):
1978 add_dep(rdepends, dep)
1979
1980 #bb.note('rdepends is %s' % rdepends)
1981
1982 def post_getname(name, suffix):
1983 return '%s%s' % (name, suffix)
1984 def pre_getname(name, suffix):
1985 return '%s%s' % (suffix, name)
1986
1987 pkgs = {}
1988 for pkg in packages.split():
1989 for postfix in postfixes:
1990 if pkg.endswith(postfix):
1991 if not postfix in pkgs:
1992 pkgs[postfix] = {}
1993 pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
1994
1995 for prefix in prefixes:
1996 if pkg.startswith(prefix):
1997 if not prefix in pkgs:
1998 pkgs[prefix] = {}
1999 pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
2000
2001 if "-dbg" in pkgs:
2002 pkglibdeps = read_libdep_files(d)
2003 pkglibdeplist = []
2004 for pkg in pkglibdeps:
2005 for k in pkglibdeps[pkg]:
2006 add_dep(pkglibdeplist, k)
2007 dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d)))
2008
2009 for suffix in pkgs:
2010 for pkg in pkgs[suffix]:
2011 if d.getVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs'):
2012 continue
2013 (base, func) = pkgs[suffix][pkg]
2014 if suffix == "-dev":
2015 pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
2016 elif suffix == "-dbg":
2017 if not dbgdefaultdeps:
2018 pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
2019 continue
2020 if len(pkgs[suffix]) == 1:
2021 pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
2022 else:
2023 rdeps = []
2024 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + base) or ""):
2025 add_dep(rdeps, dep)
2026 pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
diff --git a/meta/lib/oe/package_manager/__init__.py b/meta/lib/oe/package_manager/__init__.py
index 8e7128b195..6774cdb794 100644
--- a/meta/lib/oe/package_manager/__init__.py
+++ b/meta/lib/oe/package_manager/__init__.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -90,7 +92,7 @@ def opkg_query(cmd_output):
90 92
91def failed_postinsts_abort(pkgs, log_path): 93def failed_postinsts_abort(pkgs, log_path):
92 bb.fatal("""Postinstall scriptlets of %s have failed. If the intention is to defer them to first boot, 94 bb.fatal("""Postinstall scriptlets of %s have failed. If the intention is to defer them to first boot,
93then please place them into pkg_postinst_ontarget_${PN} (). 95then please place them into pkg_postinst_ontarget:${PN} ().
94Deferring to first boot via 'exit 1' is no longer supported. 96Deferring to first boot via 'exit 1' is no longer supported.
95Details of the failure are in %s.""" %(pkgs, log_path)) 97Details of the failure are in %s.""" %(pkgs, log_path))
96 98
@@ -120,7 +122,8 @@ def generate_locale_archive(d, rootfs, target_arch, localedir):
120 "riscv32": ["--uint32-align=4", "--little-endian"], 122 "riscv32": ["--uint32-align=4", "--little-endian"],
121 "i586": ["--uint32-align=4", "--little-endian"], 123 "i586": ["--uint32-align=4", "--little-endian"],
122 "i686": ["--uint32-align=4", "--little-endian"], 124 "i686": ["--uint32-align=4", "--little-endian"],
123 "x86_64": ["--uint32-align=4", "--little-endian"] 125 "x86_64": ["--uint32-align=4", "--little-endian"],
126 "loongarch64": ["--uint32-align=4", "--little-endian"]
124 } 127 }
125 if target_arch in locale_arch_options: 128 if target_arch in locale_arch_options:
126 arch_options = locale_arch_options[target_arch] 129 arch_options = locale_arch_options[target_arch]
@@ -189,7 +192,7 @@ class PackageManager(object, metaclass=ABCMeta):
189 bb.utils.remove(self.intercepts_dir, True) 192 bb.utils.remove(self.intercepts_dir, True)
190 bb.utils.mkdirhier(self.intercepts_dir) 193 bb.utils.mkdirhier(self.intercepts_dir)
191 for intercept in postinst_intercepts: 194 for intercept in postinst_intercepts:
192 bb.utils.copyfile(intercept, os.path.join(self.intercepts_dir, os.path.basename(intercept))) 195 shutil.copy(intercept, os.path.join(self.intercepts_dir, os.path.basename(intercept)))
193 196
194 @abstractmethod 197 @abstractmethod
195 def _handle_intercept_failure(self, failed_script): 198 def _handle_intercept_failure(self, failed_script):
@@ -266,7 +269,7 @@ class PackageManager(object, metaclass=ABCMeta):
266 pass 269 pass
267 270
268 @abstractmethod 271 @abstractmethod
269 def install(self, pkgs, attempt_only=False): 272 def install(self, pkgs, attempt_only=False, hard_depends_only=False):
270 """ 273 """
271 Install a list of packages. 'pkgs' is a list object. If 'attempt_only' is 274 Install a list of packages. 'pkgs' is a list object. If 'attempt_only' is
272 True, installation failures are ignored. 275 True, installation failures are ignored.
@@ -321,7 +324,7 @@ class PackageManager(object, metaclass=ABCMeta):
321 # TODO don't have sdk here but have a property on the superclass 324 # TODO don't have sdk here but have a property on the superclass
322 # (and respect in install_complementary) 325 # (and respect in install_complementary)
323 if sdk: 326 if sdk:
324 pkgdatadir = self.d.expand("${TMPDIR}/pkgdata/${SDK_SYS}") 327 pkgdatadir = self.d.getVar("PKGDATA_DIR_SDK")
325 else: 328 else:
326 pkgdatadir = self.d.getVar("PKGDATA_DIR") 329 pkgdatadir = self.d.getVar("PKGDATA_DIR")
327 330
@@ -344,10 +347,8 @@ class PackageManager(object, metaclass=ABCMeta):
344 def install_complementary(self, globs=None): 347 def install_complementary(self, globs=None):
345 """ 348 """
346 Install complementary packages based upon the list of currently installed 349 Install complementary packages based upon the list of currently installed
347 packages e.g. locales, *-dev, *-dbg, etc. This will only attempt to install 350 packages e.g. locales, *-dev, *-dbg, etc. Note: every backend needs to
348 these packages, if they don't exist then no error will occur. Note: every 351 call this function explicitly after the normal package installation.
349 backend needs to call this function explicitly after the normal package
350 installation
351 """ 352 """
352 if globs is None: 353 if globs is None:
353 globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY') 354 globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY')
@@ -398,7 +399,7 @@ class PackageManager(object, metaclass=ABCMeta):
398 bb.note("Installing complementary packages ... %s (skipped already provided packages %s)" % ( 399 bb.note("Installing complementary packages ... %s (skipped already provided packages %s)" % (
399 ' '.join(install_pkgs), 400 ' '.join(install_pkgs),
400 ' '.join(skip_pkgs))) 401 ' '.join(skip_pkgs)))
401 self.install(install_pkgs, attempt_only=True) 402 self.install(install_pkgs, hard_depends_only=True)
402 except subprocess.CalledProcessError as e: 403 except subprocess.CalledProcessError as e:
403 bb.fatal("Could not compute complementary packages list. Command " 404 bb.fatal("Could not compute complementary packages list. Command "
404 "'%s' returned %d:\n%s" % 405 "'%s' returned %d:\n%s" %
@@ -469,7 +470,10 @@ def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencie
469 # Detect bitbake -b usage 470 # Detect bitbake -b usage
470 nodeps = d.getVar("BB_LIMITEDDEPS") or False 471 nodeps = d.getVar("BB_LIMITEDDEPS") or False
471 if nodeps or not filterbydependencies: 472 if nodeps or not filterbydependencies:
472 oe.path.symlink(deploydir, subrepo_dir, True) 473 for arch in d.getVar("ALL_MULTILIB_PACKAGE_ARCHS").split() + d.getVar("ALL_MULTILIB_PACKAGE_ARCHS").replace("-", "_").split():
474 target = os.path.join(deploydir + "/" + arch)
475 if os.path.exists(target):
476 oe.path.symlink(target, subrepo_dir + "/" + arch, True)
473 return 477 return
474 478
475 start = None 479 start = None
diff --git a/meta/lib/oe/package_manager/deb/__init__.py b/meta/lib/oe/package_manager/deb/__init__.py
index 2ee68fefb1..0c23c884c1 100644
--- a/meta/lib/oe/package_manager/deb/__init__.py
+++ b/meta/lib/oe/package_manager/deb/__init__.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -53,6 +55,7 @@ class DpkgIndexer(Indexer):
53 55
54 index_cmds = [] 56 index_cmds = []
55 deb_dirs_found = False 57 deb_dirs_found = False
58 index_sign_files = set()
56 for arch in arch_list: 59 for arch in arch_list:
57 arch_dir = os.path.join(self.deploy_dir, arch) 60 arch_dir = os.path.join(self.deploy_dir, arch)
58 if not os.path.isdir(arch_dir): 61 if not os.path.isdir(arch_dir):
@@ -62,7 +65,10 @@ class DpkgIndexer(Indexer):
62 65
63 cmd += "%s -fcn Packages > Packages.gz;" % gzip 66 cmd += "%s -fcn Packages > Packages.gz;" % gzip
64 67
65 with open(os.path.join(arch_dir, "Release"), "w+") as release: 68 release_file = os.path.join(arch_dir, "Release")
69 index_sign_files.add(release_file)
70
71 with open(release_file, "w+") as release:
66 release.write("Label: %s\n" % arch) 72 release.write("Label: %s\n" % arch)
67 73
68 cmd += "PSEUDO_UNLOAD=1 %s release . >> Release" % apt_ftparchive 74 cmd += "PSEUDO_UNLOAD=1 %s release . >> Release" % apt_ftparchive
@@ -77,7 +83,16 @@ class DpkgIndexer(Indexer):
77 83
78 oe.utils.multiprocess_launch(create_index, index_cmds, self.d) 84 oe.utils.multiprocess_launch(create_index, index_cmds, self.d)
79 if self.d.getVar('PACKAGE_FEED_SIGN') == '1': 85 if self.d.getVar('PACKAGE_FEED_SIGN') == '1':
80 raise NotImplementedError('Package feed signing not implementd for dpkg') 86 signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND'))
87 else:
88 signer = None
89 if signer:
90 for f in index_sign_files:
91 signer.detach_sign(f,
92 self.d.getVar('PACKAGE_FEED_GPG_NAME'),
93 self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE'),
94 output_suffix="gpg",
95 use_sha256=True)
81 96
82class PMPkgsList(PkgsList): 97class PMPkgsList(PkgsList):
83 98
@@ -214,7 +229,7 @@ class DpkgPM(OpkgDpkgPM):
214 229
215 tmp_sf.write(status) 230 tmp_sf.write(status)
216 231
217 os.rename(status_file + ".tmp", status_file) 232 bb.utils.rename(status_file + ".tmp", status_file)
218 233
219 def run_pre_post_installs(self, package_name=None): 234 def run_pre_post_installs(self, package_name=None):
220 """ 235 """
@@ -276,14 +291,18 @@ class DpkgPM(OpkgDpkgPM):
276 291
277 self.deploy_dir_unlock() 292 self.deploy_dir_unlock()
278 293
279 def install(self, pkgs, attempt_only=False): 294 def install(self, pkgs, attempt_only=False, hard_depends_only=False):
280 if attempt_only and len(pkgs) == 0: 295 if attempt_only and len(pkgs) == 0:
281 return 296 return
282 297
283 os.environ['APT_CONFIG'] = self.apt_conf_file 298 os.environ['APT_CONFIG'] = self.apt_conf_file
284 299
285 cmd = "%s %s install --allow-downgrades --allow-remove-essential --allow-change-held-packages --allow-unauthenticated --no-remove %s" % \ 300 extra_args = ""
286 (self.apt_get_cmd, self.apt_args, ' '.join(pkgs)) 301 if hard_depends_only:
302 extra_args = "--no-install-recommends"
303
304 cmd = "%s %s install --allow-downgrades --allow-remove-essential --allow-change-held-packages --allow-unauthenticated --no-remove %s %s" % \
305 (self.apt_get_cmd, self.apt_args, extra_args, ' '.join(pkgs))
287 306
288 try: 307 try:
289 bb.note("Installing the following packages: %s" % ' '.join(pkgs)) 308 bb.note("Installing the following packages: %s" % ' '.join(pkgs))
@@ -299,13 +318,13 @@ class DpkgPM(OpkgDpkgPM):
299 for dir in dirs: 318 for dir in dirs:
300 new_dir = re.sub(r"\.dpkg-new", "", dir) 319 new_dir = re.sub(r"\.dpkg-new", "", dir)
301 if dir != new_dir: 320 if dir != new_dir:
302 os.rename(os.path.join(root, dir), 321 bb.utils.rename(os.path.join(root, dir),
303 os.path.join(root, new_dir)) 322 os.path.join(root, new_dir))
304 323
305 for file in files: 324 for file in files:
306 new_file = re.sub(r"\.dpkg-new", "", file) 325 new_file = re.sub(r"\.dpkg-new", "", file)
307 if file != new_file: 326 if file != new_file:
308 os.rename(os.path.join(root, file), 327 bb.utils.rename(os.path.join(root, file),
309 os.path.join(root, new_file)) 328 os.path.join(root, new_file))
310 329
311 330
@@ -422,7 +441,7 @@ class DpkgPM(OpkgDpkgPM):
422 multilib_variants = self.d.getVar("MULTILIB_VARIANTS"); 441 multilib_variants = self.d.getVar("MULTILIB_VARIANTS");
423 for variant in multilib_variants.split(): 442 for variant in multilib_variants.split():
424 localdata = bb.data.createCopy(self.d) 443 localdata = bb.data.createCopy(self.d)
425 variant_tune = localdata.getVar("DEFAULTTUNE_virtclass-multilib-" + variant, False) 444 variant_tune = localdata.getVar("DEFAULTTUNE:virtclass-multilib-" + variant, False)
426 orig_arch = localdata.getVar("DPKG_ARCH") 445 orig_arch = localdata.getVar("DPKG_ARCH")
427 localdata.setVar("DEFAULTTUNE", variant_tune) 446 localdata.setVar("DEFAULTTUNE", variant_tune)
428 variant_arch = localdata.getVar("DPKG_ARCH") 447 variant_arch = localdata.getVar("DPKG_ARCH")
diff --git a/meta/lib/oe/package_manager/deb/manifest.py b/meta/lib/oe/package_manager/deb/manifest.py
index d8eab24a06..72983bae98 100644
--- a/meta/lib/oe/package_manager/deb/manifest.py
+++ b/meta/lib/oe/package_manager/deb/manifest.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/package_manager/deb/rootfs.py b/meta/lib/oe/package_manager/deb/rootfs.py
index 8fbaca11d6..1e25b64ed9 100644
--- a/meta/lib/oe/package_manager/deb/rootfs.py
+++ b/meta/lib/oe/package_manager/deb/rootfs.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/package_manager/deb/sdk.py b/meta/lib/oe/package_manager/deb/sdk.py
index 9859d8f32d..6f3005053e 100644
--- a/meta/lib/oe/package_manager/deb/sdk.py
+++ b/meta/lib/oe/package_manager/deb/sdk.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -65,7 +67,14 @@ class PkgSdk(Sdk):
65 67
66 self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY')) 68 self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY'))
67 69
70 self.target_pm.run_pre_post_installs()
71
72 env_bkp = os.environ.copy()
73 os.environ['PATH'] = self.d.expand("${COREBASE}/scripts/nativesdk-intercept") + \
74 os.pathsep + os.environ["PATH"]
75
68 self.target_pm.run_intercepts(populate_sdk='target') 76 self.target_pm.run_intercepts(populate_sdk='target')
77 os.environ.update(env_bkp)
69 78
70 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND")) 79 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND"))
71 80
@@ -78,6 +87,8 @@ class PkgSdk(Sdk):
78 self._populate_sysroot(self.host_pm, self.host_manifest) 87 self._populate_sysroot(self.host_pm, self.host_manifest)
79 self.install_locales(self.host_pm) 88 self.install_locales(self.host_pm)
80 89
90 self.host_pm.run_pre_post_installs()
91
81 self.host_pm.run_intercepts(populate_sdk='host') 92 self.host_pm.run_intercepts(populate_sdk='host')
82 93
83 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND")) 94 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND"))
diff --git a/meta/lib/oe/package_manager/ipk/__init__.py b/meta/lib/oe/package_manager/ipk/__init__.py
index da488c1c7f..8cc9953a02 100644
--- a/meta/lib/oe/package_manager/ipk/__init__.py
+++ b/meta/lib/oe/package_manager/ipk/__init__.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -14,6 +16,7 @@ class OpkgIndexer(Indexer):
14 ] 16 ]
15 17
16 opkg_index_cmd = bb.utils.which(os.getenv('PATH'), "opkg-make-index") 18 opkg_index_cmd = bb.utils.which(os.getenv('PATH'), "opkg-make-index")
19 opkg_index_cmd_extra_params = self.d.getVar('OPKG_MAKE_INDEX_EXTRA_PARAMS') or ""
17 if self.d.getVar('PACKAGE_FEED_SIGN') == '1': 20 if self.d.getVar('PACKAGE_FEED_SIGN') == '1':
18 signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND')) 21 signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND'))
19 else: 22 else:
@@ -39,8 +42,8 @@ class OpkgIndexer(Indexer):
39 if not os.path.exists(pkgs_file): 42 if not os.path.exists(pkgs_file):
40 open(pkgs_file, "w").close() 43 open(pkgs_file, "w").close()
41 44
42 index_cmds.add('%s --checksum md5 --checksum sha256 -r %s -p %s -m %s' % 45 index_cmds.add('%s --checksum md5 --checksum sha256 -r %s -p %s -m %s %s' %
43 (opkg_index_cmd, pkgs_file, pkgs_file, pkgs_dir)) 46 (opkg_index_cmd, pkgs_file, pkgs_file, pkgs_dir, opkg_index_cmd_extra_params))
44 47
45 index_sign_files.add(pkgs_file) 48 index_sign_files.add(pkgs_file)
46 49
@@ -102,12 +105,14 @@ class OpkgDpkgPM(PackageManager):
102 This method extracts the common parts for Opkg and Dpkg 105 This method extracts the common parts for Opkg and Dpkg
103 """ 106 """
104 107
105 try: 108 proc = subprocess.run(cmd, capture_output=True, encoding="utf-8", shell=True)
106 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8") 109 if proc.returncode:
107 except subprocess.CalledProcessError as e:
108 bb.fatal("Unable to list available packages. Command '%s' " 110 bb.fatal("Unable to list available packages. Command '%s' "
109 "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8"))) 111 "returned %d:\n%s" % (cmd, proc.returncode, proc.stderr))
110 return opkg_query(output) 112 elif proc.stderr:
113 bb.note("Command '%s' returned stderr: %s" % (cmd, proc.stderr))
114
115 return opkg_query(proc.stdout)
111 116
112 def extract(self, pkg, pkg_info): 117 def extract(self, pkg, pkg_info):
113 """ 118 """
@@ -129,7 +134,7 @@ class OpkgDpkgPM(PackageManager):
129 tmp_dir = tempfile.mkdtemp() 134 tmp_dir = tempfile.mkdtemp()
130 current_dir = os.getcwd() 135 current_dir = os.getcwd()
131 os.chdir(tmp_dir) 136 os.chdir(tmp_dir)
132 data_tar = 'data.tar.xz' 137 data_tar = 'data.tar.zst'
133 138
134 try: 139 try:
135 cmd = [ar_cmd, 'x', pkg_path] 140 cmd = [ar_cmd, 'x', pkg_path]
@@ -213,7 +218,7 @@ class OpkgPM(OpkgDpkgPM):
213 218
214 tmp_sf.write(status) 219 tmp_sf.write(status)
215 220
216 os.rename(status_file + ".tmp", status_file) 221 bb.utils.rename(status_file + ".tmp", status_file)
217 222
218 def _create_custom_config(self): 223 def _create_custom_config(self):
219 bb.note("Building from feeds activated!") 224 bb.note("Building from feeds activated!")
@@ -243,7 +248,7 @@ class OpkgPM(OpkgDpkgPM):
243 """ 248 """
244 if (self.d.getVar('FEED_DEPLOYDIR_BASE_URI') or "") != "": 249 if (self.d.getVar('FEED_DEPLOYDIR_BASE_URI') or "") != "":
245 for arch in self.pkg_archs.split(): 250 for arch in self.pkg_archs.split():
246 cfg_file_name = os.path.join(self.target_rootfs, 251 cfg_file_name = oe.path.join(self.target_rootfs,
247 self.d.getVar("sysconfdir"), 252 self.d.getVar("sysconfdir"),
248 "opkg", 253 "opkg",
249 "local-%s-feed.conf" % arch) 254 "local-%s-feed.conf" % arch)
@@ -337,7 +342,7 @@ class OpkgPM(OpkgDpkgPM):
337 342
338 self.deploy_dir_unlock() 343 self.deploy_dir_unlock()
339 344
340 def install(self, pkgs, attempt_only=False): 345 def install(self, pkgs, attempt_only=False, hard_depends_only=False):
341 if not pkgs: 346 if not pkgs:
342 return 347 return
343 348
@@ -346,6 +351,8 @@ class OpkgPM(OpkgDpkgPM):
346 cmd += " --add-exclude %s" % exclude 351 cmd += " --add-exclude %s" % exclude
347 for bad_recommendation in (self.d.getVar("BAD_RECOMMENDATIONS") or "").split(): 352 for bad_recommendation in (self.d.getVar("BAD_RECOMMENDATIONS") or "").split():
348 cmd += " --add-ignore-recommends %s" % bad_recommendation 353 cmd += " --add-ignore-recommends %s" % bad_recommendation
354 if hard_depends_only:
355 cmd += " --no-install-recommends"
349 cmd += " install " 356 cmd += " install "
350 cmd += " ".join(pkgs) 357 cmd += " ".join(pkgs)
351 358
@@ -443,15 +450,16 @@ class OpkgPM(OpkgDpkgPM):
443 cmd = "%s %s --noaction install %s " % (self.opkg_cmd, 450 cmd = "%s %s --noaction install %s " % (self.opkg_cmd,
444 opkg_args, 451 opkg_args,
445 ' '.join(pkgs)) 452 ' '.join(pkgs))
446 try: 453 proc = subprocess.run(cmd, capture_output=True, encoding="utf-8", shell=True)
447 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) 454 if proc.returncode:
448 except subprocess.CalledProcessError as e:
449 bb.fatal("Unable to dummy install packages. Command '%s' " 455 bb.fatal("Unable to dummy install packages. Command '%s' "
450 "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8"))) 456 "returned %d:\n%s" % (cmd, proc.returncode, proc.stderr))
457 elif proc.stderr:
458 bb.note("Command '%s' returned stderr: %s" % (cmd, proc.stderr))
451 459
452 bb.utils.remove(temp_rootfs, True) 460 bb.utils.remove(temp_rootfs, True)
453 461
454 return output 462 return proc.stdout
455 463
456 def backup_packaging_data(self): 464 def backup_packaging_data(self):
457 # Save the opkglib for increment ipk image generation 465 # Save the opkglib for increment ipk image generation
@@ -498,6 +506,6 @@ class OpkgPM(OpkgDpkgPM):
498 "trying to extract the package." % pkg) 506 "trying to extract the package." % pkg)
499 507
500 tmp_dir = super(OpkgPM, self).extract(pkg, pkg_info) 508 tmp_dir = super(OpkgPM, self).extract(pkg, pkg_info)
501 bb.utils.remove(os.path.join(tmp_dir, "data.tar.xz")) 509 bb.utils.remove(os.path.join(tmp_dir, "data.tar.zst"))
502 510
503 return tmp_dir 511 return tmp_dir
diff --git a/meta/lib/oe/package_manager/ipk/manifest.py b/meta/lib/oe/package_manager/ipk/manifest.py
index ee4b57bcb0..3549d7428d 100644
--- a/meta/lib/oe/package_manager/ipk/manifest.py
+++ b/meta/lib/oe/package_manager/ipk/manifest.py
@@ -1,8 +1,11 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
5from oe.manifest import Manifest 7from oe.manifest import Manifest
8import re
6 9
7class PkgManifest(Manifest): 10class PkgManifest(Manifest):
8 """ 11 """
diff --git a/meta/lib/oe/package_manager/ipk/rootfs.py b/meta/lib/oe/package_manager/ipk/rootfs.py
index 26dbee6f6a..ba93eb62ea 100644
--- a/meta/lib/oe/package_manager/ipk/rootfs.py
+++ b/meta/lib/oe/package_manager/ipk/rootfs.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -145,51 +147,14 @@ class PkgRootfs(DpkgOpkgRootfs):
145 self.pm.recover_packaging_data() 147 self.pm.recover_packaging_data()
146 148
147 bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True) 149 bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True)
148
149 def _prelink_file(self, root_dir, filename):
150 bb.note('prelink %s in %s' % (filename, root_dir))
151 prelink_cfg = oe.path.join(root_dir,
152 self.d.expand('${sysconfdir}/prelink.conf'))
153 if not os.path.exists(prelink_cfg):
154 shutil.copy(self.d.expand('${STAGING_DIR_NATIVE}${sysconfdir_native}/prelink.conf'),
155 prelink_cfg)
156
157 cmd_prelink = self.d.expand('${STAGING_DIR_NATIVE}${sbindir_native}/prelink')
158 self._exec_shell_cmd([cmd_prelink,
159 '--root',
160 root_dir,
161 '-amR',
162 '-N',
163 '-c',
164 self.d.expand('${sysconfdir}/prelink.conf')])
165
166 ''' 150 '''
167 Compare two files with the same key twice to see if they are equal. 151 Compare two files with the same key twice to see if they are equal.
168 If they are not equal, it means they are duplicated and come from 152 If they are not equal, it means they are duplicated and come from
169 different packages. 153 different packages.
170 1st: Comapre them directly;
171 2nd: While incremental image creation is enabled, one of the
172 files could be probaly prelinked in the previous image
173 creation and the file has been changed, so we need to
174 prelink the other one and compare them.
175 ''' 154 '''
176 def _file_equal(self, key, f1, f2): 155 def _file_equal(self, key, f1, f2):
177
178 # Both of them are not prelinked
179 if filecmp.cmp(f1, f2): 156 if filecmp.cmp(f1, f2):
180 return True 157 return True
181
182 if bb.data.inherits_class('image-prelink', self.d):
183 if self.image_rootfs not in f1:
184 self._prelink_file(f1.replace(key, ''), f1)
185
186 if self.image_rootfs not in f2:
187 self._prelink_file(f2.replace(key, ''), f2)
188
189 # Both of them are prelinked
190 if filecmp.cmp(f1, f2):
191 return True
192
193 # Not equal 158 # Not equal
194 return False 159 return False
195 160
@@ -200,7 +165,7 @@ class PkgRootfs(DpkgOpkgRootfs):
200 """ 165 """
201 def _multilib_sanity_test(self, dirs): 166 def _multilib_sanity_test(self, dirs):
202 167
203 allow_replace = self.d.getVar("MULTILIBRE_ALLOW_REP") 168 allow_replace = "|".join((self.d.getVar("MULTILIBRE_ALLOW_REP") or "").split())
204 if allow_replace is None: 169 if allow_replace is None:
205 allow_replace = "" 170 allow_replace = ""
206 171
diff --git a/meta/lib/oe/package_manager/ipk/sdk.py b/meta/lib/oe/package_manager/ipk/sdk.py
index e2ca415c8e..3acd55f548 100644
--- a/meta/lib/oe/package_manager/ipk/sdk.py
+++ b/meta/lib/oe/package_manager/ipk/sdk.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -61,12 +63,19 @@ class PkgSdk(Sdk):
61 63
62 self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY')) 64 self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY'))
63 65
66 env_bkp = os.environ.copy()
67 os.environ['PATH'] = self.d.expand("${COREBASE}/scripts/nativesdk-intercept") + \
68 os.pathsep + os.environ["PATH"]
69
64 self.target_pm.run_intercepts(populate_sdk='target') 70 self.target_pm.run_intercepts(populate_sdk='target')
71 os.environ.update(env_bkp)
65 72
66 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND")) 73 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND"))
67 74
68 if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): 75 if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d):
69 self.target_pm.remove_packaging_data() 76 self.target_pm.remove_packaging_data()
77 else:
78 self.target_pm.remove_lists()
70 79
71 bb.note("Installing NATIVESDK packages") 80 bb.note("Installing NATIVESDK packages")
72 self._populate_sysroot(self.host_pm, self.host_manifest) 81 self._populate_sysroot(self.host_pm, self.host_manifest)
@@ -78,6 +87,8 @@ class PkgSdk(Sdk):
78 87
79 if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): 88 if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d):
80 self.host_pm.remove_packaging_data() 89 self.host_pm.remove_packaging_data()
90 else:
91 self.host_pm.remove_lists()
81 92
82 target_sysconfdir = os.path.join(self.sdk_target_sysroot, self.sysconfdir) 93 target_sysconfdir = os.path.join(self.sdk_target_sysroot, self.sysconfdir)
83 host_sysconfdir = os.path.join(self.sdk_host_sysroot, self.sysconfdir) 94 host_sysconfdir = os.path.join(self.sdk_host_sysroot, self.sysconfdir)
diff --git a/meta/lib/oe/package_manager/rpm/__init__.py b/meta/lib/oe/package_manager/rpm/__init__.py
index 6df0092281..f40c880af4 100644
--- a/meta/lib/oe/package_manager/rpm/__init__.py
+++ b/meta/lib/oe/package_manager/rpm/__init__.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -96,11 +98,15 @@ class RpmPM(PackageManager):
96 archs = ["sdk_provides_dummy_target"] + archs 98 archs = ["sdk_provides_dummy_target"] + archs
97 confdir = "%s/%s" %(self.target_rootfs, "etc/dnf/vars/") 99 confdir = "%s/%s" %(self.target_rootfs, "etc/dnf/vars/")
98 bb.utils.mkdirhier(confdir) 100 bb.utils.mkdirhier(confdir)
99 open(confdir + "arch", 'w').write(":".join(archs)) 101 with open(confdir + "arch", 'w') as f:
102 f.write(":".join(archs))
103
100 distro_codename = self.d.getVar('DISTRO_CODENAME') 104 distro_codename = self.d.getVar('DISTRO_CODENAME')
101 open(confdir + "releasever", 'w').write(distro_codename if distro_codename is not None else '') 105 with open(confdir + "releasever", 'w') as f:
106 f.write(distro_codename if distro_codename is not None else '')
102 107
103 open(oe.path.join(self.target_rootfs, "etc/dnf/dnf.conf"), 'w').write("") 108 with open(oe.path.join(self.target_rootfs, "etc/dnf/dnf.conf"), 'w') as f:
109 f.write("")
104 110
105 111
106 def _configure_rpm(self): 112 def _configure_rpm(self):
@@ -110,14 +116,17 @@ class RpmPM(PackageManager):
110 platformconfdir = "%s/%s" %(self.target_rootfs, "etc/rpm/") 116 platformconfdir = "%s/%s" %(self.target_rootfs, "etc/rpm/")
111 rpmrcconfdir = "%s/%s" %(self.target_rootfs, "etc/") 117 rpmrcconfdir = "%s/%s" %(self.target_rootfs, "etc/")
112 bb.utils.mkdirhier(platformconfdir) 118 bb.utils.mkdirhier(platformconfdir)
113 open(platformconfdir + "platform", 'w').write("%s-pc-linux" % self.primary_arch) 119 with open(platformconfdir + "platform", 'w') as f:
120 f.write("%s-pc-linux" % self.primary_arch)
114 with open(rpmrcconfdir + "rpmrc", 'w') as f: 121 with open(rpmrcconfdir + "rpmrc", 'w') as f:
115 f.write("arch_compat: %s: %s\n" % (self.primary_arch, self.archs if len(self.archs) > 0 else self.primary_arch)) 122 f.write("arch_compat: %s: %s\n" % (self.primary_arch, self.archs if len(self.archs) > 0 else self.primary_arch))
116 f.write("buildarch_compat: %s: noarch\n" % self.primary_arch) 123 f.write("buildarch_compat: %s: noarch\n" % self.primary_arch)
117 124
118 open(platformconfdir + "macros", 'w').write("%_transaction_color 7\n") 125 with open(platformconfdir + "macros", 'w') as f:
126 f.write("%_transaction_color 7\n")
119 if self.d.getVar('RPM_PREFER_ELF_ARCH'): 127 if self.d.getVar('RPM_PREFER_ELF_ARCH'):
120 open(platformconfdir + "macros", 'a').write("%%_prefer_color %s" % (self.d.getVar('RPM_PREFER_ELF_ARCH'))) 128 with open(platformconfdir + "macros", 'a') as f:
129 f.write("%%_prefer_color %s" % (self.d.getVar('RPM_PREFER_ELF_ARCH')))
121 130
122 if self.d.getVar('RPM_SIGN_PACKAGES') == '1': 131 if self.d.getVar('RPM_SIGN_PACKAGES') == '1':
123 signer = get_signer(self.d, self.d.getVar('RPM_GPG_BACKEND')) 132 signer = get_signer(self.d, self.d.getVar('RPM_GPG_BACKEND'))
@@ -164,13 +173,13 @@ class RpmPM(PackageManager):
164 repo_uri = uri + "/" + arch 173 repo_uri = uri + "/" + arch
165 repo_id = "oe-remote-repo" + "-".join(urlparse(repo_uri).path.split("/")) 174 repo_id = "oe-remote-repo" + "-".join(urlparse(repo_uri).path.split("/"))
166 repo_name = "OE Remote Repo:" + " ".join(urlparse(repo_uri).path.split("/")) 175 repo_name = "OE Remote Repo:" + " ".join(urlparse(repo_uri).path.split("/"))
167 open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'a').write( 176 with open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'a') as f:
168 "[%s]\nname=%s\nbaseurl=%s\n%s\n" % (repo_id, repo_name, repo_uri, gpg_opts)) 177 f.write("[%s]\nname=%s\nbaseurl=%s\n%s\n" % (repo_id, repo_name, repo_uri, gpg_opts))
169 else: 178 else:
170 repo_name = "OE Remote Repo:" + " ".join(urlparse(uri).path.split("/")) 179 repo_name = "OE Remote Repo:" + " ".join(urlparse(uri).path.split("/"))
171 repo_uri = uri 180 repo_uri = uri
172 open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'w').write( 181 with open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'w') as f:
173 "[%s]\nname=%s\nbaseurl=%s\n%s" % (repo_base, repo_name, repo_uri, gpg_opts)) 182 f.write("[%s]\nname=%s\nbaseurl=%s\n%s" % (repo_base, repo_name, repo_uri, gpg_opts))
174 183
175 def _prepare_pkg_transaction(self): 184 def _prepare_pkg_transaction(self):
176 os.environ['D'] = self.target_rootfs 185 os.environ['D'] = self.target_rootfs
@@ -181,7 +190,7 @@ class RpmPM(PackageManager):
181 os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE') 190 os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE')
182 191
183 192
184 def install(self, pkgs, attempt_only = False): 193 def install(self, pkgs, attempt_only=False, hard_depends_only=False):
185 if len(pkgs) == 0: 194 if len(pkgs) == 0:
186 return 195 return
187 self._prepare_pkg_transaction() 196 self._prepare_pkg_transaction()
@@ -192,13 +201,16 @@ class RpmPM(PackageManager):
192 201
193 output = self._invoke_dnf((["--skip-broken"] if attempt_only else []) + 202 output = self._invoke_dnf((["--skip-broken"] if attempt_only else []) +
194 (["-x", ",".join(exclude_pkgs)] if len(exclude_pkgs) > 0 else []) + 203 (["-x", ",".join(exclude_pkgs)] if len(exclude_pkgs) > 0 else []) +
195 (["--setopt=install_weak_deps=False"] if self.d.getVar('NO_RECOMMENDATIONS') == "1" else []) + 204 (["--setopt=install_weak_deps=False"] if (hard_depends_only or self.d.getVar('NO_RECOMMENDATIONS') == "1") else []) +
196 (["--nogpgcheck"] if self.d.getVar('RPM_SIGN_PACKAGES') != '1' else ["--setopt=gpgcheck=True"]) + 205 (["--nogpgcheck"] if self.d.getVar('RPM_SIGN_PACKAGES') != '1' else ["--setopt=gpgcheck=True"]) +
197 ["install"] + 206 ["install"] +
198 pkgs) 207 pkgs)
199 208
200 failed_scriptlets_pkgnames = collections.OrderedDict() 209 failed_scriptlets_pkgnames = collections.OrderedDict()
201 for line in output.splitlines(): 210 for line in output.splitlines():
211 if line.startswith("Error: Systemctl"):
212 bb.error(line)
213
202 if line.startswith("Error in POSTIN scriptlet in rpm package"): 214 if line.startswith("Error in POSTIN scriptlet in rpm package"):
203 failed_scriptlets_pkgnames[line.split()[-1]] = True 215 failed_scriptlets_pkgnames[line.split()[-1]] = True
204 216
@@ -326,7 +338,8 @@ class RpmPM(PackageManager):
326 return e.output.decode("utf-8") 338 return e.output.decode("utf-8")
327 339
328 def dump_install_solution(self, pkgs): 340 def dump_install_solution(self, pkgs):
329 open(self.solution_manifest, 'w').write(" ".join(pkgs)) 341 with open(self.solution_manifest, 'w') as f:
342 f.write(" ".join(pkgs))
330 return pkgs 343 return pkgs
331 344
332 def load_old_install_solution(self): 345 def load_old_install_solution(self):
@@ -360,7 +373,8 @@ class RpmPM(PackageManager):
360 bb.utils.mkdirhier(target_path) 373 bb.utils.mkdirhier(target_path)
361 num = self._script_num_prefix(target_path) 374 num = self._script_num_prefix(target_path)
362 saved_script_name = oe.path.join(target_path, "%d-%s" % (num, pkg)) 375 saved_script_name = oe.path.join(target_path, "%d-%s" % (num, pkg))
363 open(saved_script_name, 'w').write(output) 376 with open(saved_script_name, 'w') as f:
377 f.write(output)
364 os.chmod(saved_script_name, 0o755) 378 os.chmod(saved_script_name, 0o755)
365 379
366 def _handle_intercept_failure(self, registered_pkgs): 380 def _handle_intercept_failure(self, registered_pkgs):
@@ -372,11 +386,12 @@ class RpmPM(PackageManager):
372 self.save_rpmpostinst(pkg) 386 self.save_rpmpostinst(pkg)
373 387
374 def extract(self, pkg): 388 def extract(self, pkg):
375 output = self._invoke_dnf(["repoquery", "--queryformat", "%{location}", pkg]) 389 output = self._invoke_dnf(["repoquery", "--location", pkg])
376 pkg_name = output.splitlines()[-1] 390 pkg_name = output.splitlines()[-1]
377 if not pkg_name.endswith(".rpm"): 391 if not pkg_name.endswith(".rpm"):
378 bb.fatal("dnf could not find package %s in repository: %s" %(pkg, output)) 392 bb.fatal("dnf could not find package %s in repository: %s" %(pkg, output))
379 pkg_path = oe.path.join(self.rpm_repo_dir, pkg_name) 393 # Strip file: prefix
394 pkg_path = pkg_name[5:]
380 395
381 cpio_cmd = bb.utils.which(os.getenv("PATH"), "cpio") 396 cpio_cmd = bb.utils.which(os.getenv("PATH"), "cpio")
382 rpm2cpio_cmd = bb.utils.which(os.getenv("PATH"), "rpm2cpio") 397 rpm2cpio_cmd = bb.utils.which(os.getenv("PATH"), "rpm2cpio")
diff --git a/meta/lib/oe/package_manager/rpm/manifest.py b/meta/lib/oe/package_manager/rpm/manifest.py
index e6604b301f..6ee7c329f0 100644
--- a/meta/lib/oe/package_manager/rpm/manifest.py
+++ b/meta/lib/oe/package_manager/rpm/manifest.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/package_manager/rpm/rootfs.py b/meta/lib/oe/package_manager/rpm/rootfs.py
index 00d07cd9cc..3ba5396320 100644
--- a/meta/lib/oe/package_manager/rpm/rootfs.py
+++ b/meta/lib/oe/package_manager/rpm/rootfs.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -108,7 +110,7 @@ class PkgRootfs(Rootfs):
108 if self.progress_reporter: 110 if self.progress_reporter:
109 self.progress_reporter.next_stage() 111 self.progress_reporter.next_stage()
110 112
111 self._setup_dbg_rootfs(['/etc', '/var/lib/rpm', '/var/cache/dnf', '/var/lib/dnf']) 113 self._setup_dbg_rootfs(['/etc/rpm', '/etc/rpmrc', '/etc/dnf', '/var/lib/rpm', '/var/cache/dnf', '/var/lib/dnf'])
112 114
113 execute_pre_post_process(self.d, rpm_post_process_cmds) 115 execute_pre_post_process(self.d, rpm_post_process_cmds)
114 116
diff --git a/meta/lib/oe/package_manager/rpm/sdk.py b/meta/lib/oe/package_manager/rpm/sdk.py
index c5f232431f..ea79fe050b 100644
--- a/meta/lib/oe/package_manager/rpm/sdk.py
+++ b/meta/lib/oe/package_manager/rpm/sdk.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -65,7 +67,12 @@ class PkgSdk(Sdk):
65 67
66 self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY')) 68 self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY'))
67 69
70 env_bkp = os.environ.copy()
71 os.environ['PATH'] = self.d.expand("${COREBASE}/scripts/nativesdk-intercept") + \
72 os.pathsep + os.environ["PATH"]
73
68 self.target_pm.run_intercepts(populate_sdk='target') 74 self.target_pm.run_intercepts(populate_sdk='target')
75 os.environ.update(env_bkp)
69 76
70 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND")) 77 execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND"))
71 78
@@ -110,5 +117,6 @@ class PkgSdk(Sdk):
110 for f in glob.glob(os.path.join(self.sdk_output, "etc", "rpm*")): 117 for f in glob.glob(os.path.join(self.sdk_output, "etc", "rpm*")):
111 self.movefile(f, native_sysconf_dir) 118 self.movefile(f, native_sysconf_dir)
112 for f in glob.glob(os.path.join(self.sdk_output, "etc", "dnf", "*")): 119 for f in glob.glob(os.path.join(self.sdk_output, "etc", "dnf", "*")):
113 self.movefile(f, native_sysconf_dir) 120 self.mkdirhier(native_sysconf_dir + "/dnf")
121 self.movefile(f, native_sysconf_dir + "/dnf")
114 self.remove(os.path.join(self.sdk_output, "etc"), True) 122 self.remove(os.path.join(self.sdk_output, "etc"), True)
diff --git a/meta/lib/oe/packagedata.py b/meta/lib/oe/packagedata.py
index a82085a792..2d1d6ddeb7 100644
--- a/meta/lib/oe/packagedata.py
+++ b/meta/lib/oe/packagedata.py
@@ -1,9 +1,16 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
5import codecs 7import codecs
6import os 8import os
9import json
10import bb.compress.zstd
11import oe.path
12
13from glob import glob
7 14
8def packaged(pkg, d): 15def packaged(pkg, d):
9 return os.access(get_subpkgedata_fn(pkg, d) + '.packaged', os.R_OK) 16 return os.access(get_subpkgedata_fn(pkg, d) + '.packaged', os.R_OK)
@@ -19,7 +26,7 @@ def read_pkgdatafile(fn):
19 import re 26 import re
20 with open(fn, 'r') as f: 27 with open(fn, 'r') as f:
21 lines = f.readlines() 28 lines = f.readlines()
22 r = re.compile("([^:]+):\s*(.*)") 29 r = re.compile(r"(^.+?):\s+(.*)")
23 for l in lines: 30 for l in lines:
24 m = r.match(l) 31 m = r.match(l)
25 if m: 32 if m:
@@ -45,18 +52,30 @@ def read_pkgdata(pn, d):
45 return read_pkgdatafile(fn) 52 return read_pkgdatafile(fn)
46 53
47# 54#
48# Collapse FOO_pkg variables into FOO 55# Collapse FOO:pkg variables into FOO
49# 56#
50def read_subpkgdata_dict(pkg, d): 57def read_subpkgdata_dict(pkg, d):
51 ret = {} 58 ret = {}
52 subd = read_pkgdatafile(get_subpkgedata_fn(pkg, d)) 59 subd = read_pkgdatafile(get_subpkgedata_fn(pkg, d))
53 for var in subd: 60 for var in subd:
54 newvar = var.replace("_" + pkg, "") 61 newvar = var.replace(":" + pkg, "")
55 if newvar == var and var + "_" + pkg in subd: 62 if newvar == var and var + ":" + pkg in subd:
56 continue 63 continue
57 ret[newvar] = subd[var] 64 ret[newvar] = subd[var]
58 return ret 65 return ret
59 66
67def read_subpkgdata_extended(pkg, d):
68 import json
69 import bb.compress.zstd
70
71 fn = d.expand("${PKGDATA_DIR}/extended/%s.json.zstd" % pkg)
72 try:
73 num_threads = int(d.getVar("BB_NUMBER_THREADS"))
74 with bb.compress.zstd.open(fn, "rt", encoding="utf-8", num_threads=num_threads) as f:
75 return json.load(f)
76 except FileNotFoundError:
77 return None
78
60def _pkgmap(d): 79def _pkgmap(d):
61 """Return a dictionary mapping package to recipe name.""" 80 """Return a dictionary mapping package to recipe name."""
62 81
@@ -96,3 +115,252 @@ def recipename(pkg, d):
96 """Return the recipe name for the given binary package name.""" 115 """Return the recipe name for the given binary package name."""
97 116
98 return pkgmap(d).get(pkg) 117 return pkgmap(d).get(pkg)
118
119def foreach_runtime_provider_pkgdata(d, rdep, include_rdep=False):
120 pkgdata_dir = d.getVar("PKGDATA_DIR")
121 possibles = set()
122 try:
123 possibles |= set(os.listdir("%s/runtime-rprovides/%s/" % (pkgdata_dir, rdep)))
124 except OSError:
125 pass
126
127 if include_rdep:
128 possibles.add(rdep)
129
130 for p in sorted(list(possibles)):
131 rdep_data = read_subpkgdata(p, d)
132 yield p, rdep_data
133
134def get_package_mapping(pkg, basepkg, d, depversions=None):
135 import oe.packagedata
136
137 data = oe.packagedata.read_subpkgdata(pkg, d)
138 key = "PKG:%s" % pkg
139
140 if key in data:
141 if bb.data.inherits_class('allarch', d) and bb.data.inherits_class('packagegroup', d) and pkg != data[key]:
142 bb.error("An allarch packagegroup shouldn't depend on packages which are dynamically renamed (%s to %s)" % (pkg, data[key]))
143 # Have to avoid undoing the write_extra_pkgs(global_variants...)
144 if bb.data.inherits_class('allarch', d) and not d.getVar('MULTILIB_VARIANTS') \
145 and data[key] == basepkg:
146 return pkg
147 if depversions == []:
148 # Avoid returning a mapping if the renamed package rprovides its original name
149 rprovkey = "RPROVIDES:%s" % pkg
150 if rprovkey in data:
151 if pkg in bb.utils.explode_dep_versions2(data[rprovkey]):
152 bb.note("%s rprovides %s, not replacing the latter" % (data[key], pkg))
153 return pkg
154 # Do map to rewritten package name
155 return data[key]
156
157 return pkg
158
159def get_package_additional_metadata(pkg_type, d):
160 base_key = "PACKAGE_ADD_METADATA"
161 for key in ("%s_%s" % (base_key, pkg_type.upper()), base_key):
162 if d.getVar(key, False) is None:
163 continue
164 d.setVarFlag(key, "type", "list")
165 if d.getVarFlag(key, "separator") is None:
166 d.setVarFlag(key, "separator", "\\n")
167 metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)]
168 return "\n".join(metadata_fields).strip()
169
170def runtime_mapping_rename(varname, pkg, d):
171 #bb.note("%s before: %s" % (varname, d.getVar(varname)))
172
173 new_depends = {}
174 deps = bb.utils.explode_dep_versions2(d.getVar(varname) or "")
175 for depend, depversions in deps.items():
176 new_depend = get_package_mapping(depend, pkg, d, depversions)
177 if depend != new_depend:
178 bb.note("package name mapping done: %s -> %s" % (depend, new_depend))
179 new_depends[new_depend] = deps[depend]
180
181 d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
182
183 #bb.note("%s after: %s" % (varname, d.getVar(varname)))
184
185def emit_pkgdata(pkgfiles, d):
186 def process_postinst_on_target(pkg, mlprefix):
187 pkgval = d.getVar('PKG:%s' % pkg)
188 if pkgval is None:
189 pkgval = pkg
190
191 defer_fragment = """
192if [ -n "$D" ]; then
193 $INTERCEPT_DIR/postinst_intercept delay_to_first_boot %s mlprefix=%s
194 exit 0
195fi
196""" % (pkgval, mlprefix)
197
198 postinst = d.getVar('pkg_postinst:%s' % pkg)
199 postinst_ontarget = d.getVar('pkg_postinst_ontarget:%s' % pkg)
200
201 if postinst_ontarget:
202 bb.debug(1, 'adding deferred pkg_postinst_ontarget() to pkg_postinst() for %s' % pkg)
203 if not postinst:
204 postinst = '#!/bin/sh\n'
205 postinst += defer_fragment
206 postinst += postinst_ontarget
207 d.setVar('pkg_postinst:%s' % pkg, postinst)
208
209 def add_set_e_to_scriptlets(pkg):
210 for scriptlet_name in ('pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm'):
211 scriptlet = d.getVar('%s:%s' % (scriptlet_name, pkg))
212 if scriptlet:
213 scriptlet_split = scriptlet.split('\n')
214 if scriptlet_split[0].startswith("#!"):
215 scriptlet = scriptlet_split[0] + "\nset -e\n" + "\n".join(scriptlet_split[1:])
216 else:
217 scriptlet = "set -e\n" + "\n".join(scriptlet_split[0:])
218 d.setVar('%s:%s' % (scriptlet_name, pkg), scriptlet)
219
220 def write_if_exists(f, pkg, var):
221 def encode(str):
222 import codecs
223 c = codecs.getencoder("unicode_escape")
224 return c(str)[0].decode("latin1")
225
226 val = d.getVar('%s:%s' % (var, pkg))
227 if val:
228 f.write('%s:%s: %s\n' % (var, pkg, encode(val)))
229 return val
230 val = d.getVar('%s' % (var))
231 if val:
232 f.write('%s: %s\n' % (var, encode(val)))
233 return val
234
235 def write_extra_pkgs(variants, pn, packages, pkgdatadir):
236 for variant in variants:
237 with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd:
238 fd.write("PACKAGES: %s\n" % ' '.join(
239 map(lambda pkg: '%s-%s' % (variant, pkg), packages.split())))
240
241 def write_extra_runtime_pkgs(variants, packages, pkgdatadir):
242 for variant in variants:
243 for pkg in packages.split():
244 ml_pkg = "%s-%s" % (variant, pkg)
245 subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg)
246 with open(subdata_file, 'w') as fd:
247 fd.write("PKG:%s: %s" % (ml_pkg, pkg))
248
249 packages = d.getVar('PACKAGES')
250 pkgdest = d.getVar('PKGDEST')
251 pkgdatadir = d.getVar('PKGDESTWORK')
252
253 data_file = pkgdatadir + d.expand("/${PN}")
254 with open(data_file, 'w') as fd:
255 fd.write("PACKAGES: %s\n" % packages)
256
257 pkgdebugsource = d.getVar("PKGDEBUGSOURCES") or []
258
259 pn = d.getVar('PN')
260 global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split()
261 variants = (d.getVar('MULTILIB_VARIANTS') or "").split()
262
263 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
264 write_extra_pkgs(variants, pn, packages, pkgdatadir)
265
266 if bb.data.inherits_class('allarch', d) and not variants \
267 and not bb.data.inherits_class('packagegroup', d):
268 write_extra_pkgs(global_variants, pn, packages, pkgdatadir)
269
270 workdir = d.getVar('WORKDIR')
271
272 for pkg in packages.split():
273 pkgval = d.getVar('PKG:%s' % pkg)
274 if pkgval is None:
275 pkgval = pkg
276 d.setVar('PKG:%s' % pkg, pkg)
277
278 extended_data = {
279 "files_info": {}
280 }
281
282 pkgdestpkg = os.path.join(pkgdest, pkg)
283 files = {}
284 files_extra = {}
285 total_size = 0
286 seen = set()
287 for f in pkgfiles[pkg]:
288 fpath = os.sep + os.path.relpath(f, pkgdestpkg)
289
290 fstat = os.lstat(f)
291 files[fpath] = fstat.st_size
292
293 extended_data["files_info"].setdefault(fpath, {})
294 extended_data["files_info"][fpath]['size'] = fstat.st_size
295
296 if fstat.st_ino not in seen:
297 seen.add(fstat.st_ino)
298 total_size += fstat.st_size
299
300 if fpath in pkgdebugsource:
301 extended_data["files_info"][fpath]['debugsrc'] = pkgdebugsource[fpath]
302 del pkgdebugsource[fpath]
303
304 d.setVar('FILES_INFO:' + pkg , json.dumps(files, sort_keys=True))
305
306 process_postinst_on_target(pkg, d.getVar("MLPREFIX"))
307 add_set_e_to_scriptlets(pkg)
308
309 subdata_file = pkgdatadir + "/runtime/%s" % pkg
310 with open(subdata_file, 'w') as sf:
311 for var in (d.getVar('PKGDATA_VARS') or "").split():
312 val = write_if_exists(sf, pkg, var)
313
314 write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
315 for dfile in sorted((d.getVar('FILERPROVIDESFLIST:' + pkg) or "").split()):
316 write_if_exists(sf, pkg, 'FILERPROVIDES:' + dfile)
317
318 write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
319 for dfile in sorted((d.getVar('FILERDEPENDSFLIST:' + pkg) or "").split()):
320 write_if_exists(sf, pkg, 'FILERDEPENDS:' + dfile)
321
322 sf.write('%s:%s: %d\n' % ('PKGSIZE', pkg, total_size))
323
324 subdata_extended_file = pkgdatadir + "/extended/%s.json.zstd" % pkg
325 num_threads = int(d.getVar("BB_NUMBER_THREADS"))
326 with bb.compress.zstd.open(subdata_extended_file, "wt", encoding="utf-8", num_threads=num_threads) as f:
327 json.dump(extended_data, f, sort_keys=True, separators=(",", ":"))
328
329 # Symlinks needed for rprovides lookup
330 rprov = d.getVar('RPROVIDES:%s' % pkg) or d.getVar('RPROVIDES')
331 if rprov:
332 for p in bb.utils.explode_deps(rprov):
333 subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg)
334 bb.utils.mkdirhier(os.path.dirname(subdata_sym))
335 oe.path.relsymlink(subdata_file, subdata_sym, True)
336
337 allow_empty = d.getVar('ALLOW_EMPTY:%s' % pkg)
338 if not allow_empty:
339 allow_empty = d.getVar('ALLOW_EMPTY')
340 root = "%s/%s" % (pkgdest, pkg)
341 os.chdir(root)
342 g = glob('*')
343 if g or allow_empty == "1":
344 # Symlinks needed for reverse lookups (from the final package name)
345 subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval
346 oe.path.relsymlink(subdata_file, subdata_sym, True)
347
348 packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
349 open(packagedfile, 'w').close()
350
351 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
352 write_extra_runtime_pkgs(variants, packages, pkgdatadir)
353
354 if bb.data.inherits_class('allarch', d) and not variants \
355 and not bb.data.inherits_class('packagegroup', d):
356 write_extra_runtime_pkgs(global_variants, packages, pkgdatadir)
357
358def mapping_rename_hook(d):
359 """
360 Rewrite variables to account for package renaming in things
361 like debian.bbclass or manual PKG variable name changes
362 """
363 pkg = d.getVar("PKG")
364 oe.packagedata.runtime_mapping_rename("RDEPENDS", pkg, d)
365 oe.packagedata.runtime_mapping_rename("RRECOMMENDS", pkg, d)
366 oe.packagedata.runtime_mapping_rename("RSUGGESTS", pkg, d)
diff --git a/meta/lib/oe/packagegroup.py b/meta/lib/oe/packagegroup.py
index 8fcaecde82..7b7594751a 100644
--- a/meta/lib/oe/packagegroup.py
+++ b/meta/lib/oe/packagegroup.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/patch.py b/meta/lib/oe/patch.py
index fccbedb519..58c6e34fe8 100644
--- a/meta/lib/oe/patch.py
+++ b/meta/lib/oe/patch.py
@@ -1,7 +1,12 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
7import os
8import shlex
9import subprocess
5import oe.path 10import oe.path
6import oe.types 11import oe.types
7 12
@@ -24,9 +29,6 @@ class CmdError(bb.BBHandledException):
24 29
25 30
26def runcmd(args, dir = None): 31def runcmd(args, dir = None):
27 import pipes
28 import subprocess
29
30 if dir: 32 if dir:
31 olddir = os.path.abspath(os.curdir) 33 olddir = os.path.abspath(os.curdir)
32 if not os.path.exists(dir): 34 if not os.path.exists(dir):
@@ -35,7 +37,7 @@ def runcmd(args, dir = None):
35 # print("cwd: %s -> %s" % (olddir, dir)) 37 # print("cwd: %s -> %s" % (olddir, dir))
36 38
37 try: 39 try:
38 args = [ pipes.quote(str(arg)) for arg in args ] 40 args = [ shlex.quote(str(arg)) for arg in args ]
39 cmd = " ".join(args) 41 cmd = " ".join(args)
40 # print("cmd: %s" % cmd) 42 # print("cmd: %s" % cmd)
41 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) 43 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
@@ -56,6 +58,7 @@ def runcmd(args, dir = None):
56 if dir: 58 if dir:
57 os.chdir(olddir) 59 os.chdir(olddir)
58 60
61
59class PatchError(Exception): 62class PatchError(Exception):
60 def __init__(self, msg): 63 def __init__(self, msg):
61 self.msg = msg 64 self.msg = msg
@@ -214,7 +217,7 @@ class PatchTree(PatchSet):
214 with open(self.seriespath, 'w') as f: 217 with open(self.seriespath, 'w') as f:
215 for p in patches: 218 for p in patches:
216 f.write(p) 219 f.write(p)
217 220
218 def Import(self, patch, force = None): 221 def Import(self, patch, force = None):
219 """""" 222 """"""
220 PatchSet.Import(self, patch, force) 223 PatchSet.Import(self, patch, force)
@@ -291,13 +294,32 @@ class PatchTree(PatchSet):
291 self.Pop(all=True) 294 self.Pop(all=True)
292 295
293class GitApplyTree(PatchTree): 296class GitApplyTree(PatchTree):
294 patch_line_prefix = '%% original patch' 297 notes_ref = "refs/notes/devtool"
295 ignore_commit_prefix = '%% ignore' 298 original_patch = 'original patch'
299 ignore_commit = 'ignore'
296 300
297 def __init__(self, dir, d): 301 def __init__(self, dir, d):
298 PatchTree.__init__(self, dir, d) 302 PatchTree.__init__(self, dir, d)
299 self.commituser = d.getVar('PATCH_GIT_USER_NAME') 303 self.commituser = d.getVar('PATCH_GIT_USER_NAME')
300 self.commitemail = d.getVar('PATCH_GIT_USER_EMAIL') 304 self.commitemail = d.getVar('PATCH_GIT_USER_EMAIL')
305 if not self._isInitialized(d):
306 self._initRepo()
307
308 def _isInitialized(self, d):
309 cmd = "git rev-parse --show-toplevel"
310 try:
311 output = runcmd(cmd.split(), self.dir).strip()
312 except CmdError as err:
313 ## runcmd returned non-zero which most likely means 128
314 ## Not a git directory
315 return False
316 ## Make sure repo is in builddir to not break top-level git repos, or under workdir
317 return os.path.samefile(output, self.dir) or oe.path.is_path_parent(d.getVar('WORKDIR'), output)
318
319 def _initRepo(self):
320 runcmd("git init".split(), self.dir)
321 runcmd("git add .".split(), self.dir)
322 runcmd("git commit -a --allow-empty -m bitbake_patching_started".split(), self.dir)
301 323
302 @staticmethod 324 @staticmethod
303 def extractPatchHeader(patchfile): 325 def extractPatchHeader(patchfile):
@@ -431,7 +453,7 @@ class GitApplyTree(PatchTree):
431 # Prepare git command 453 # Prepare git command
432 cmd = ["git"] 454 cmd = ["git"]
433 GitApplyTree.gitCommandUserOptions(cmd, commituser, commitemail) 455 GitApplyTree.gitCommandUserOptions(cmd, commituser, commitemail)
434 cmd += ["commit", "-F", tmpfile] 456 cmd += ["commit", "-F", tmpfile, "--no-verify"]
435 # git doesn't like plain email addresses as authors 457 # git doesn't like plain email addresses as authors
436 if author and '<' in author: 458 if author and '<' in author:
437 cmd.append('--author="%s"' % author) 459 cmd.append('--author="%s"' % author)
@@ -440,44 +462,131 @@ class GitApplyTree(PatchTree):
440 return (tmpfile, cmd) 462 return (tmpfile, cmd)
441 463
442 @staticmethod 464 @staticmethod
443 def extractPatches(tree, startcommit, outdir, paths=None): 465 def addNote(repo, ref, key, value=None):
466 note = key + (": %s" % value if value else "")
467 notes_ref = GitApplyTree.notes_ref
468 runcmd(["git", "config", "notes.rewriteMode", "ignore"], repo)
469 runcmd(["git", "config", "notes.displayRef", notes_ref, notes_ref], repo)
470 runcmd(["git", "config", "notes.rewriteRef", notes_ref, notes_ref], repo)
471 runcmd(["git", "notes", "--ref", notes_ref, "append", "-m", note, ref], repo)
472
473 @staticmethod
474 def removeNote(repo, ref, key):
475 notes = GitApplyTree.getNotes(repo, ref)
476 notes = {k: v for k, v in notes.items() if k != key and not k.startswith(key + ":")}
477 runcmd(["git", "notes", "--ref", GitApplyTree.notes_ref, "remove", "--ignore-missing", ref], repo)
478 for note, value in notes.items():
479 GitApplyTree.addNote(repo, ref, note, value)
480
481 @staticmethod
482 def getNotes(repo, ref):
483 import re
484
485 note = None
486 try:
487 note = runcmd(["git", "notes", "--ref", GitApplyTree.notes_ref, "show", ref], repo)
488 prefix = ""
489 except CmdError:
490 note = runcmd(['git', 'show', '-s', '--format=%B', ref], repo)
491 prefix = "%% "
492
493 note_re = re.compile(r'^%s(.*?)(?::\s*(.*))?$' % prefix)
494 notes = dict()
495 for line in note.splitlines():
496 m = note_re.match(line)
497 if m:
498 notes[m.group(1)] = m.group(2)
499
500 return notes
501
502 @staticmethod
503 def commitIgnored(subject, dir=None, files=None, d=None):
504 if files:
505 runcmd(['git', 'add'] + files, dir)
506 cmd = ["git"]
507 GitApplyTree.gitCommandUserOptions(cmd, d=d)
508 cmd += ["commit", "-m", subject, "--no-verify"]
509 runcmd(cmd, dir)
510 GitApplyTree.addNote(dir, "HEAD", GitApplyTree.ignore_commit)
511
512 @staticmethod
513 def extractPatches(tree, startcommits, outdir, paths=None):
444 import tempfile 514 import tempfile
445 import shutil 515 import shutil
446 tempdir = tempfile.mkdtemp(prefix='oepatch') 516 tempdir = tempfile.mkdtemp(prefix='oepatch')
447 try: 517 try:
448 shellcmd = ["git", "format-patch", "--no-signature", "--no-numbered", startcommit, "-o", tempdir] 518 for name, rev in startcommits.items():
449 if paths: 519 shellcmd = ["git", "format-patch", "--no-signature", "--no-numbered", rev, "-o", tempdir]
450 shellcmd.append('--') 520 if paths:
451 shellcmd.extend(paths) 521 shellcmd.append('--')
452 out = runcmd(["sh", "-c", " ".join(shellcmd)], tree) 522 shellcmd.extend(paths)
453 if out: 523 out = runcmd(["sh", "-c", " ".join(shellcmd)], os.path.join(tree, name))
454 for srcfile in out.split(): 524 if out:
455 for encoding in ['utf-8', 'latin-1']: 525 for srcfile in out.split():
456 patchlines = [] 526 # This loop, which is used to remove any line that
457 outfile = None 527 # starts with "%% original patch", is kept for backwards
458 try: 528 # compatibility. If/when that compatibility is dropped,
459 with open(srcfile, 'r', encoding=encoding) as f: 529 # it can be replaced with code to just read the first
460 for line in f: 530 # line of the patch file to get the SHA-1, and the code
461 if line.startswith(GitApplyTree.patch_line_prefix): 531 # below that writes the modified patch file can be
462 outfile = line.split()[-1].strip() 532 # replaced with a simple file move.
463 continue 533 for encoding in ['utf-8', 'latin-1']:
464 if line.startswith(GitApplyTree.ignore_commit_prefix): 534 patchlines = []
465 continue 535 try:
466 patchlines.append(line) 536 with open(srcfile, 'r', encoding=encoding, newline='') as f:
467 except UnicodeDecodeError: 537 for line in f:
538 if line.startswith("%% " + GitApplyTree.original_patch):
539 continue
540 patchlines.append(line)
541 except UnicodeDecodeError:
542 continue
543 break
544 else:
545 raise PatchError('Unable to find a character encoding to decode %s' % srcfile)
546
547 sha1 = patchlines[0].split()[1]
548 notes = GitApplyTree.getNotes(os.path.join(tree, name), sha1)
549 if GitApplyTree.ignore_commit in notes:
468 continue 550 continue
469 break 551 outfile = notes.get(GitApplyTree.original_patch, os.path.basename(srcfile))
470 else: 552
471 raise PatchError('Unable to find a character encoding to decode %s' % srcfile) 553 bb.utils.mkdirhier(os.path.join(outdir, name))
472 554 with open(os.path.join(outdir, name, outfile), 'w') as of:
473 if not outfile: 555 for line in patchlines:
474 outfile = os.path.basename(srcfile) 556 of.write(line)
475 with open(os.path.join(outdir, outfile), 'w') as of:
476 for line in patchlines:
477 of.write(line)
478 finally: 557 finally:
479 shutil.rmtree(tempdir) 558 shutil.rmtree(tempdir)
480 559
560 def _need_dirty_check(self):
561 fetch = bb.fetch2.Fetch([], self.d)
562 check_dirtyness = False
563 for url in fetch.urls:
564 url_data = fetch.ud[url]
565 parm = url_data.parm
566 # a git url with subpath param will surely be dirty
567 # since the git tree from which we clone will be emptied
568 # from all files that are not in the subpath
569 if url_data.type == 'git' and parm.get('subpath'):
570 check_dirtyness = True
571 return check_dirtyness
572
573 def _commitpatch(self, patch, patchfilevar):
574 output = ""
575 # Add all files
576 shellcmd = ["git", "add", "-f", "-A", "."]
577 output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
578 # Exclude the patches directory
579 shellcmd = ["git", "reset", "HEAD", self.patchdir]
580 output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
581 # Commit the result
582 (tmpfile, shellcmd) = self.prepareCommit(patch['file'], self.commituser, self.commitemail)
583 try:
584 shellcmd.insert(0, patchfilevar)
585 output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
586 finally:
587 os.remove(tmpfile)
588 return output
589
481 def _applypatch(self, patch, force = False, reverse = False, run = True): 590 def _applypatch(self, patch, force = False, reverse = False, run = True):
482 import shutil 591 import shutil
483 592
@@ -492,27 +601,26 @@ class GitApplyTree(PatchTree):
492 601
493 return runcmd(["sh", "-c", " ".join(shellcmd)], self.dir) 602 return runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
494 603
495 # Add hooks which add a pointer to the original patch file name in the commit message
496 reporoot = (runcmd("git rev-parse --show-toplevel".split(), self.dir) or '').strip() 604 reporoot = (runcmd("git rev-parse --show-toplevel".split(), self.dir) or '').strip()
497 if not reporoot: 605 if not reporoot:
498 raise Exception("Cannot get repository root for directory %s" % self.dir) 606 raise Exception("Cannot get repository root for directory %s" % self.dir)
499 hooks_dir = os.path.join(reporoot, '.git', 'hooks') 607
500 hooks_dir_backup = hooks_dir + '.devtool-orig' 608 patch_applied = True
501 if os.path.lexists(hooks_dir_backup):
502 raise Exception("Git hooks backup directory already exists: %s" % hooks_dir_backup)
503 if os.path.lexists(hooks_dir):
504 shutil.move(hooks_dir, hooks_dir_backup)
505 os.mkdir(hooks_dir)
506 commithook = os.path.join(hooks_dir, 'commit-msg')
507 applyhook = os.path.join(hooks_dir, 'applypatch-msg')
508 with open(commithook, 'w') as f:
509 # NOTE: the formatting here is significant; if you change it you'll also need to
510 # change other places which read it back
511 f.write('echo "\n%s: $PATCHFILE" >> $1' % GitApplyTree.patch_line_prefix)
512 os.chmod(commithook, 0o755)
513 shutil.copy2(commithook, applyhook)
514 try: 609 try:
515 patchfilevar = 'PATCHFILE="%s"' % os.path.basename(patch['file']) 610 patchfilevar = 'PATCHFILE="%s"' % os.path.basename(patch['file'])
611 if self._need_dirty_check():
612 # Check dirtyness of the tree
613 try:
614 output = runcmd(["git", "--work-tree=%s" % reporoot, "status", "--short"])
615 except CmdError:
616 pass
617 else:
618 if output:
619 # The tree is dirty, no need to try to apply patches with git anymore
620 # since they fail, fallback directly to patch
621 output = PatchTree._applypatch(self, patch, force, reverse, run)
622 output += self._commitpatch(patch, patchfilevar)
623 return output
516 try: 624 try:
517 shellcmd = [patchfilevar, "git", "--work-tree=%s" % reporoot] 625 shellcmd = [patchfilevar, "git", "--work-tree=%s" % reporoot]
518 self.gitCommandUserOptions(shellcmd, self.commituser, self.commitemail) 626 self.gitCommandUserOptions(shellcmd, self.commituser, self.commitemail)
@@ -539,24 +647,14 @@ class GitApplyTree(PatchTree):
539 except CmdError: 647 except CmdError:
540 # Fall back to patch 648 # Fall back to patch
541 output = PatchTree._applypatch(self, patch, force, reverse, run) 649 output = PatchTree._applypatch(self, patch, force, reverse, run)
542 # Add all files 650 output += self._commitpatch(patch, patchfilevar)
543 shellcmd = ["git", "add", "-f", "-A", "."]
544 output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
545 # Exclude the patches directory
546 shellcmd = ["git", "reset", "HEAD", self.patchdir]
547 output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
548 # Commit the result
549 (tmpfile, shellcmd) = self.prepareCommit(patch['file'], self.commituser, self.commitemail)
550 try:
551 shellcmd.insert(0, patchfilevar)
552 output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
553 finally:
554 os.remove(tmpfile)
555 return output 651 return output
652 except:
653 patch_applied = False
654 raise
556 finally: 655 finally:
557 shutil.rmtree(hooks_dir) 656 if patch_applied:
558 if os.path.lexists(hooks_dir_backup): 657 GitApplyTree.addNote(self.dir, "HEAD", GitApplyTree.original_patch, os.path.basename(patch['file']))
559 shutil.move(hooks_dir_backup, hooks_dir)
560 658
561 659
562class QuiltTree(PatchSet): 660class QuiltTree(PatchSet):
@@ -579,6 +677,8 @@ class QuiltTree(PatchSet):
579 677
580 def Clean(self): 678 def Clean(self):
581 try: 679 try:
680 # make sure that patches/series file exists before quilt pop to keep quilt-0.67 happy
681 open(os.path.join(self.dir, "patches","series"), 'a').close()
582 self._runcmd(["pop", "-a", "-f"]) 682 self._runcmd(["pop", "-a", "-f"])
583 oe.path.remove(os.path.join(self.dir, "patches","series")) 683 oe.path.remove(os.path.join(self.dir, "patches","series"))
584 except Exception: 684 except Exception:
@@ -715,8 +815,9 @@ class NOOPResolver(Resolver):
715 self.patchset.Push() 815 self.patchset.Push()
716 except Exception: 816 except Exception:
717 import sys 817 import sys
718 os.chdir(olddir)
719 raise 818 raise
819 finally:
820 os.chdir(olddir)
720 821
721# Patch resolver which relies on the user doing all the work involved in the 822# Patch resolver which relies on the user doing all the work involved in the
722# resolution, with the exception of refreshing the remote copy of the patch 823# resolution, with the exception of refreshing the remote copy of the patch
@@ -776,12 +877,12 @@ class UserResolver(Resolver):
776 # User did not fix the problem. Abort. 877 # User did not fix the problem. Abort.
777 raise PatchError("Patch application failed, and user did not fix and refresh the patch.") 878 raise PatchError("Patch application failed, and user did not fix and refresh the patch.")
778 except Exception: 879 except Exception:
779 os.chdir(olddir)
780 raise 880 raise
781 os.chdir(olddir) 881 finally:
882 os.chdir(olddir)
782 883
783 884
784def patch_path(url, fetch, workdir, expand=True): 885def patch_path(url, fetch, unpackdir, expand=True):
785 """Return the local path of a patch, or return nothing if this isn't a patch""" 886 """Return the local path of a patch, or return nothing if this isn't a patch"""
786 887
787 local = fetch.localpath(url) 888 local = fetch.localpath(url)
@@ -790,7 +891,7 @@ def patch_path(url, fetch, workdir, expand=True):
790 base, ext = os.path.splitext(os.path.basename(local)) 891 base, ext = os.path.splitext(os.path.basename(local))
791 if ext in ('.gz', '.bz2', '.xz', '.Z'): 892 if ext in ('.gz', '.bz2', '.xz', '.Z'):
792 if expand: 893 if expand:
793 local = os.path.join(workdir, base) 894 local = os.path.join(unpackdir, base)
794 ext = os.path.splitext(base)[1] 895 ext = os.path.splitext(base)[1]
795 896
796 urldata = fetch.ud[url] 897 urldata = fetch.ud[url]
@@ -804,12 +905,12 @@ def patch_path(url, fetch, workdir, expand=True):
804 return local 905 return local
805 906
806def src_patches(d, all=False, expand=True): 907def src_patches(d, all=False, expand=True):
807 workdir = d.getVar('WORKDIR') 908 unpackdir = d.getVar('UNPACKDIR')
808 fetch = bb.fetch2.Fetch([], d) 909 fetch = bb.fetch2.Fetch([], d)
809 patches = [] 910 patches = []
810 sources = [] 911 sources = []
811 for url in fetch.urls: 912 for url in fetch.urls:
812 local = patch_path(url, fetch, workdir, expand) 913 local = patch_path(url, fetch, unpackdir, expand)
813 if not local: 914 if not local:
814 if all: 915 if all:
815 local = fetch.localpath(url) 916 local = fetch.localpath(url)
@@ -898,4 +999,3 @@ def should_apply(parm, d):
898 return False, "applies to later version" 999 return False, "applies to later version"
899 1000
900 return True, None 1001 return True, None
901
diff --git a/meta/lib/oe/path.py b/meta/lib/oe/path.py
index c8d8ad05b9..5d21cdcbdf 100644
--- a/meta/lib/oe/path.py
+++ b/meta/lib/oe/path.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -123,7 +125,8 @@ def copyhardlinktree(src, dst):
123 if os.path.isdir(src): 125 if os.path.isdir(src):
124 if len(glob.glob('%s/.??*' % src)) > 0: 126 if len(glob.glob('%s/.??*' % src)) > 0:
125 source = './.??* ' 127 source = './.??* '
126 source += './*' 128 if len(glob.glob('%s/**' % src)) > 0:
129 source += './*'
127 s_dir = src 130 s_dir = src
128 else: 131 else:
129 source = src 132 source = src
@@ -169,6 +172,9 @@ def symlink(source, destination, force=False):
169 if e.errno != errno.EEXIST or os.readlink(destination) != source: 172 if e.errno != errno.EEXIST or os.readlink(destination) != source:
170 raise 173 raise
171 174
175def relsymlink(target, name, force=False):
176 symlink(os.path.relpath(target, os.path.dirname(name)), name, force=force)
177
172def find(dir, **walkoptions): 178def find(dir, **walkoptions):
173 """ Given a directory, recurses into that directory, 179 """ Given a directory, recurses into that directory,
174 returning all files as absolute paths. """ 180 returning all files as absolute paths. """
diff --git a/meta/lib/oe/prservice.py b/meta/lib/oe/prservice.py
index fcdbe66c19..c41242c878 100644
--- a/meta/lib/oe/prservice.py
+++ b/meta/lib/oe/prservice.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -7,11 +9,10 @@ def prserv_make_conn(d, check = False):
7 host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f]) 9 host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f])
8 try: 10 try:
9 conn = None 11 conn = None
10 conn = prserv.serv.PRServerConnection(host_params[0], int(host_params[1])) 12 conn = prserv.serv.connect(host_params[0], int(host_params[1]))
11 if check: 13 if check:
12 if not conn.ping(): 14 if not conn.ping():
13 raise Exception('service not available') 15 raise Exception('service not available')
14 d.setVar("__PRSERV_CONN",conn)
15 except Exception as exc: 16 except Exception as exc:
16 bb.fatal("Connecting to PR service %s:%s failed: %s" % (host_params[0], host_params[1], str(exc))) 17 bb.fatal("Connecting to PR service %s:%s failed: %s" % (host_params[0], host_params[1], str(exc)))
17 18
@@ -22,31 +23,29 @@ def prserv_dump_db(d):
22 bb.error("Not using network based PR service") 23 bb.error("Not using network based PR service")
23 return None 24 return None
24 25
25 conn = d.getVar("__PRSERV_CONN") 26 conn = prserv_make_conn(d)
26 if conn is None: 27 if conn is None:
27 conn = prserv_make_conn(d) 28 bb.error("Making connection failed to remote PR service")
28 if conn is None: 29 return None
29 bb.error("Making connection failed to remote PR service")
30 return None
31 30
32 #dump db 31 #dump db
33 opt_version = d.getVar('PRSERV_DUMPOPT_VERSION') 32 opt_version = d.getVar('PRSERV_DUMPOPT_VERSION')
34 opt_pkgarch = d.getVar('PRSERV_DUMPOPT_PKGARCH') 33 opt_pkgarch = d.getVar('PRSERV_DUMPOPT_PKGARCH')
35 opt_checksum = d.getVar('PRSERV_DUMPOPT_CHECKSUM') 34 opt_checksum = d.getVar('PRSERV_DUMPOPT_CHECKSUM')
36 opt_col = ("1" == d.getVar('PRSERV_DUMPOPT_COL')) 35 opt_col = ("1" == d.getVar('PRSERV_DUMPOPT_COL'))
37 return conn.export(opt_version, opt_pkgarch, opt_checksum, opt_col) 36 d = conn.export(opt_version, opt_pkgarch, opt_checksum, opt_col)
37 conn.close()
38 return d
38 39
39def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksum=None): 40def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksum=None):
40 if not d.getVar('PRSERV_HOST'): 41 if not d.getVar('PRSERV_HOST'):
41 bb.error("Not using network based PR service") 42 bb.error("Not using network based PR service")
42 return None 43 return None
43 44
44 conn = d.getVar("__PRSERV_CONN") 45 conn = prserv_make_conn(d)
45 if conn is None: 46 if conn is None:
46 conn = prserv_make_conn(d) 47 bb.error("Making connection failed to remote PR service")
47 if conn is None: 48 return None
48 bb.error("Making connection failed to remote PR service")
49 return None
50 #get the entry values 49 #get the entry values
51 imported = [] 50 imported = []
52 prefix = "PRAUTO$" 51 prefix = "PRAUTO$"
@@ -70,6 +69,7 @@ def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksu
70 bb.error("importing(%s,%s,%s,%d) failed. DB may have larger value %d" % (version,pkgarch,checksum,value,ret)) 69 bb.error("importing(%s,%s,%s,%d) failed. DB may have larger value %d" % (version,pkgarch,checksum,value,ret))
71 else: 70 else:
72 imported.append((version,pkgarch,checksum,value)) 71 imported.append((version,pkgarch,checksum,value))
72 conn.close()
73 return imported 73 return imported
74 74
75def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False): 75def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False):
@@ -78,8 +78,7 @@ def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False):
78 bb.utils.mkdirhier(d.getVar('PRSERV_DUMPDIR')) 78 bb.utils.mkdirhier(d.getVar('PRSERV_DUMPDIR'))
79 df = d.getVar('PRSERV_DUMPFILE') 79 df = d.getVar('PRSERV_DUMPFILE')
80 #write data 80 #write data
81 lf = bb.utils.lockfile("%s.lock" % df) 81 with open(df, "a") as f, bb.utils.fileslocked(["%s.lock" % df]) as locks:
82 with open(df, "a") as f:
83 if metainfo: 82 if metainfo:
84 #dump column info 83 #dump column info
85 f.write("#PR_core_ver = \"%s\"\n\n" % metainfo['core_ver']); 84 f.write("#PR_core_ver = \"%s\"\n\n" % metainfo['core_ver']);
@@ -113,7 +112,6 @@ def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False):
113 if not nomax: 112 if not nomax:
114 for i in idx: 113 for i in idx:
115 f.write("PRAUTO_%s_%s = \"%s\"\n" % (str(datainfo[idx[i]]['version']),str(datainfo[idx[i]]['pkgarch']),str(datainfo[idx[i]]['value']))) 114 f.write("PRAUTO_%s_%s = \"%s\"\n" % (str(datainfo[idx[i]]['version']),str(datainfo[idx[i]]['pkgarch']),str(datainfo[idx[i]]['value'])))
116 bb.utils.unlockfile(lf)
117 115
118def prserv_check_avail(d): 116def prserv_check_avail(d):
119 host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f]) 117 host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f])
@@ -125,4 +123,5 @@ def prserv_check_avail(d):
125 except TypeError: 123 except TypeError:
126 bb.fatal('Undefined/incorrect PRSERV_HOST value. Format: "host:port"') 124 bb.fatal('Undefined/incorrect PRSERV_HOST value. Format: "host:port"')
127 else: 125 else:
128 prserv_make_conn(d, True) 126 conn = prserv_make_conn(d, True)
127 conn.close()
diff --git a/meta/lib/oe/qa.py b/meta/lib/oe/qa.py
index e8a854a302..f8ae3c743f 100644
--- a/meta/lib/oe/qa.py
+++ b/meta/lib/oe/qa.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -48,6 +50,9 @@ class ELFFile:
48 return self 50 return self
49 51
50 def __exit__(self, exc_type, exc_value, traceback): 52 def __exit__(self, exc_type, exc_value, traceback):
53 self.close()
54
55 def close(self):
51 if self.data: 56 if self.data:
52 self.data.close() 57 self.data.close()
53 58
@@ -128,6 +133,9 @@ class ELFFile:
128 """ 133 """
129 return self.getShort(ELFFile.E_MACHINE) 134 return self.getShort(ELFFile.E_MACHINE)
130 135
136 def set_objdump(self, cmd, output):
137 self.objdump_output[cmd] = output
138
131 def run_objdump(self, cmd, d): 139 def run_objdump(self, cmd, d):
132 import bb.process 140 import bb.process
133 import sys 141 import sys
@@ -171,6 +179,57 @@ def elf_machine_to_string(machine):
171 except: 179 except:
172 return "Unknown (%s)" % repr(machine) 180 return "Unknown (%s)" % repr(machine)
173 181
182def write_error(type, error, d):
183 logfile = d.getVar('QA_LOGFILE')
184 if logfile:
185 p = d.getVar('P')
186 with open(logfile, "a+") as f:
187 f.write("%s: %s [%s]\n" % (p, error, type))
188
189def handle_error(error_class, error_msg, d):
190 if error_class in (d.getVar("ERROR_QA") or "").split():
191 write_error(error_class, error_msg, d)
192 bb.error("QA Issue: %s [%s]" % (error_msg, error_class))
193 d.setVar("QA_ERRORS_FOUND", "True")
194 return False
195 elif error_class in (d.getVar("WARN_QA") or "").split():
196 write_error(error_class, error_msg, d)
197 bb.warn("QA Issue: %s [%s]" % (error_msg, error_class))
198 else:
199 bb.note("QA Issue: %s [%s]" % (error_msg, error_class))
200 return True
201
202def add_message(messages, section, new_msg):
203 if section not in messages:
204 messages[section] = new_msg
205 else:
206 messages[section] = messages[section] + "\n" + new_msg
207
208def exit_with_message_if_errors(message, d):
209 qa_fatal_errors = bb.utils.to_boolean(d.getVar("QA_ERRORS_FOUND"), False)
210 if qa_fatal_errors:
211 bb.fatal(message)
212
213def exit_if_errors(d):
214 exit_with_message_if_errors("Fatal QA errors were found, failing task.", d)
215
216def check_upstream_status(fullpath):
217 import re
218 kinda_status_re = re.compile(r"^.*upstream.*status.*$", re.IGNORECASE | re.MULTILINE)
219 strict_status_re = re.compile(r"^Upstream-Status: (Pending|Submitted|Denied|Inappropriate|Backport|Inactive-Upstream)( .+)?$", re.MULTILINE)
220 guidelines = "https://docs.yoctoproject.org/contributor-guide/recipe-style-guide.html#patch-upstream-status"
221
222 with open(fullpath, encoding='utf-8', errors='ignore') as f:
223 file_content = f.read()
224 match_kinda = kinda_status_re.search(file_content)
225 match_strict = strict_status_re.search(file_content)
226
227 if not match_strict:
228 if match_kinda:
229 return "Malformed Upstream-Status in patch\n%s\nPlease correct according to %s :\n%s" % (fullpath, guidelines, match_kinda.group(0))
230 else:
231 return "Missing Upstream-Status in patch\n%s\nPlease add according to %s ." % (fullpath, guidelines)
232
174if __name__ == "__main__": 233if __name__ == "__main__":
175 import sys 234 import sys
176 235
diff --git a/meta/lib/oe/recipeutils.py b/meta/lib/oe/recipeutils.py
index 407d168894..de1fbdd3a8 100644
--- a/meta/lib/oe/recipeutils.py
+++ b/meta/lib/oe/recipeutils.py
@@ -24,9 +24,9 @@ from collections import OrderedDict, defaultdict
24from bb.utils import vercmp_string 24from bb.utils import vercmp_string
25 25
26# Help us to find places to insert values 26# Help us to find places to insert values
27recipe_progression = ['SUMMARY', 'DESCRIPTION', 'AUTHOR', 'HOMEPAGE', 'BUGTRACKER', 'SECTION', 'LICENSE', 'LICENSE_FLAGS', 'LIC_FILES_CHKSUM', 'PROVIDES', 'DEPENDS', 'PR', 'PV', 'SRCREV', 'SRCPV', 'SRC_URI', 'S', 'do_fetch()', 'do_unpack()', 'do_patch()', 'EXTRA_OECONF', 'EXTRA_OECMAKE', 'EXTRA_OESCONS', 'do_configure()', 'EXTRA_OEMAKE', 'do_compile()', 'do_install()', 'do_populate_sysroot()', 'INITSCRIPT', 'USERADD', 'GROUPADD', 'PACKAGES', 'FILES', 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RPROVIDES', 'RREPLACES', 'RCONFLICTS', 'ALLOW_EMPTY', 'populate_packages()', 'do_package()', 'do_deploy()', 'BBCLASSEXTEND'] 27recipe_progression = ['SUMMARY', 'DESCRIPTION', 'HOMEPAGE', 'BUGTRACKER', 'SECTION', 'LICENSE', 'LICENSE_FLAGS', 'LIC_FILES_CHKSUM', 'PROVIDES', 'DEPENDS', 'PR', 'PV', 'SRCREV', 'SRC_URI', 'S', 'do_fetch()', 'do_unpack()', 'do_patch()', 'EXTRA_OECONF', 'EXTRA_OECMAKE', 'EXTRA_OESCONS', 'do_configure()', 'EXTRA_OEMAKE', 'do_compile()', 'do_install()', 'do_populate_sysroot()', 'INITSCRIPT', 'USERADD', 'GROUPADD', 'PACKAGES', 'FILES', 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RPROVIDES', 'RREPLACES', 'RCONFLICTS', 'ALLOW_EMPTY', 'populate_packages()', 'do_package()', 'do_deploy()', 'BBCLASSEXTEND']
28# Variables that sometimes are a bit long but shouldn't be wrapped 28# Variables that sometimes are a bit long but shouldn't be wrapped
29nowrap_vars = ['SUMMARY', 'HOMEPAGE', 'BUGTRACKER', r'SRC_URI\[(.+\.)?md5sum\]', r'SRC_URI\[(.+\.)?sha256sum\]'] 29nowrap_vars = ['SUMMARY', 'HOMEPAGE', 'BUGTRACKER', r'SRC_URI\[(.+\.)?md5sum\]', r'SRC_URI\[(.+\.)?sha[0-9]+sum\]']
30list_vars = ['SRC_URI', 'LIC_FILES_CHKSUM'] 30list_vars = ['SRC_URI', 'LIC_FILES_CHKSUM']
31meta_vars = ['SUMMARY', 'DESCRIPTION', 'HOMEPAGE', 'BUGTRACKER', 'SECTION'] 31meta_vars = ['SUMMARY', 'DESCRIPTION', 'HOMEPAGE', 'BUGTRACKER', 'SECTION']
32 32
@@ -47,7 +47,7 @@ def simplify_history(history, d):
47 continue 47 continue
48 has_set = True 48 has_set = True
49 elif event['op'] in ('append', 'prepend', 'postdot', 'predot'): 49 elif event['op'] in ('append', 'prepend', 'postdot', 'predot'):
50 # Reminder: "append" and "prepend" mean += and =+ respectively, NOT _append / _prepend 50 # Reminder: "append" and "prepend" mean += and =+ respectively, NOT :append / :prepend
51 if has_set: 51 if has_set:
52 continue 52 continue
53 ret_history.insert(0, event) 53 ret_history.insert(0, event)
@@ -342,7 +342,7 @@ def patch_recipe(d, fn, varvalues, patch=False, relpath='', redirect_output=None
342 def override_applicable(hevent): 342 def override_applicable(hevent):
343 op = hevent['op'] 343 op = hevent['op']
344 if '[' in op: 344 if '[' in op:
345 opoverrides = op.split('[')[1].split(']')[0].split('_') 345 opoverrides = op.split('[')[1].split(']')[0].split(':')
346 for opoverride in opoverrides: 346 for opoverride in opoverrides:
347 if not opoverride in overrides: 347 if not opoverride in overrides:
348 return False 348 return False
@@ -368,13 +368,13 @@ def patch_recipe(d, fn, varvalues, patch=False, relpath='', redirect_output=None
368 recipe_set = True 368 recipe_set = True
369 if not recipe_set: 369 if not recipe_set:
370 for event in history: 370 for event in history:
371 if event['op'].startswith('_remove'): 371 if event['op'].startswith(':remove'):
372 continue 372 continue
373 if not override_applicable(event): 373 if not override_applicable(event):
374 continue 374 continue
375 newvalue = value.replace(event['detail'], '') 375 newvalue = value.replace(event['detail'], '')
376 if newvalue == value and os.path.abspath(event['file']) == fn and event['op'].startswith('_'): 376 if newvalue == value and os.path.abspath(event['file']) == fn and event['op'].startswith(':'):
377 op = event['op'].replace('[', '_').replace(']', '') 377 op = event['op'].replace('[', ':').replace(']', '')
378 extravals[var + op] = None 378 extravals[var + op] = None
379 value = newvalue 379 value = newvalue
380 vals[var] = ('+=', value) 380 vals[var] = ('+=', value)
@@ -414,15 +414,13 @@ def copy_recipe_files(d, tgt_dir, whole_dir=False, download=True, all_variants=F
414 414
415 fetch_urls(d) 415 fetch_urls(d)
416 if all_variants: 416 if all_variants:
417 # Get files for other variants e.g. in the case of a SRC_URI_append 417 # Get files for other variants e.g. in the case of a SRC_URI:append
418 localdata = bb.data.createCopy(d) 418 localdata = bb.data.createCopy(d)
419 variants = (localdata.getVar('BBCLASSEXTEND') or '').split() 419 variants = (localdata.getVar('BBCLASSEXTEND') or '').split()
420 if variants: 420 if variants:
421 # Ensure we handle class-target if we're dealing with one of the variants 421 # Ensure we handle class-target if we're dealing with one of the variants
422 variants.append('target') 422 variants.append('target')
423 for variant in variants: 423 for variant in variants:
424 if variant.startswith("devupstream"):
425 localdata.setVar('SRCPV', 'git')
426 localdata.setVar('CLASSOVERRIDE', 'class-%s' % variant) 424 localdata.setVar('CLASSOVERRIDE', 'class-%s' % variant)
427 fetch_urls(localdata) 425 fetch_urls(localdata)
428 426
@@ -666,19 +664,23 @@ def get_bbappend_path(d, destlayerdir, wildcardver=False):
666 return (appendpath, pathok) 664 return (appendpath, pathok)
667 665
668 666
669def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, machine=None, extralines=None, removevalues=None, redirect_output=None): 667def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, machine=None, extralines=None, removevalues=None, redirect_output=None, params=None, update_original_recipe=False):
670 """ 668 """
671 Writes a bbappend file for a recipe 669 Writes a bbappend file for a recipe
672 Parameters: 670 Parameters:
673 rd: data dictionary for the recipe 671 rd: data dictionary for the recipe
674 destlayerdir: base directory of the layer to place the bbappend in 672 destlayerdir: base directory of the layer to place the bbappend in
675 (subdirectory path from there will be determined automatically) 673 (subdirectory path from there will be determined automatically)
676 srcfiles: dict of source files to add to SRC_URI, where the value 674 srcfiles: dict of source files to add to SRC_URI, where the key
677 is the full path to the file to be added, and the value is the 675 is the full path to the file to be added, and the value is a
678 original filename as it would appear in SRC_URI or None if it 676 dict with following optional keys:
679 isn't already present. You may pass None for this parameter if 677 path: the original filename as it would appear in SRC_URI
680 you simply want to specify your own content via the extralines 678 or None if it isn't already present.
681 parameter. 679 patchdir: the patchdir parameter
680 newname: the name to give to the new added file. None to use
681 the default value: basename(path)
682 You may pass None for this parameter if you simply want to specify
683 your own content via the extralines parameter.
682 install: dict mapping entries in srcfiles to a tuple of two elements: 684 install: dict mapping entries in srcfiles to a tuple of two elements:
683 install path (*without* ${D} prefix) and permission value (as a 685 install path (*without* ${D} prefix) and permission value (as a
684 string, e.g. '0644'). 686 string, e.g. '0644').
@@ -696,18 +698,32 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
696 redirect_output: 698 redirect_output:
697 If specified, redirects writing the output file to the 699 If specified, redirects writing the output file to the
698 specified directory (for dry-run purposes) 700 specified directory (for dry-run purposes)
701 params:
702 Parameters to use when adding entries to SRC_URI. If specified,
703 should be a list of dicts with the same length as srcfiles.
704 update_original_recipe:
705 Force to update the original recipe instead of creating/updating
706 a bbapend. destlayerdir must contain the original recipe
699 """ 707 """
700 708
701 if not removevalues: 709 if not removevalues:
702 removevalues = {} 710 removevalues = {}
703 711
704 # Determine how the bbappend should be named 712 recipefile = rd.getVar('FILE')
705 appendpath, pathok = get_bbappend_path(rd, destlayerdir, wildcardver) 713 if update_original_recipe:
706 if not appendpath: 714 if destlayerdir not in recipefile:
707 bb.error('Unable to determine layer directory containing %s' % recipefile) 715 bb.error("destlayerdir %s doesn't contain the original recipe (%s), cannot update it" % (destlayerdir, recipefile))
708 return (None, None) 716 return (None, None)
709 if not pathok: 717
710 bb.warn('Unable to determine correct subdirectory path for bbappend file - check that what %s adds to BBFILES also matches .bbappend files. Using %s for now, but until you fix this the bbappend will not be applied.' % (os.path.join(destlayerdir, 'conf', 'layer.conf'), os.path.dirname(appendpath))) 718 appendpath = recipefile
719 else:
720 # Determine how the bbappend should be named
721 appendpath, pathok = get_bbappend_path(rd, destlayerdir, wildcardver)
722 if not appendpath:
723 bb.error('Unable to determine layer directory containing %s' % recipefile)
724 return (None, None)
725 if not pathok:
726 bb.warn('Unable to determine correct subdirectory path for bbappend file - check that what %s adds to BBFILES also matches .bbappend files. Using %s for now, but until you fix this the bbappend will not be applied.' % (os.path.join(destlayerdir, 'conf', 'layer.conf'), os.path.dirname(appendpath)))
711 727
712 appenddir = os.path.dirname(appendpath) 728 appenddir = os.path.dirname(appendpath)
713 if not redirect_output: 729 if not redirect_output:
@@ -752,30 +768,48 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
752 bbappendlines.append((varname, op, value)) 768 bbappendlines.append((varname, op, value))
753 769
754 destsubdir = rd.getVar('PN') 770 destsubdir = rd.getVar('PN')
755 if srcfiles: 771 if not update_original_recipe and srcfiles:
756 bbappendlines.append(('FILESEXTRAPATHS_prepend', ':=', '${THISDIR}/${PN}:')) 772 bbappendlines.append(('FILESEXTRAPATHS:prepend', ':=', '${THISDIR}/${PN}:'))
757 773
758 appendoverride = '' 774 appendoverride = ''
759 if machine: 775 if machine:
760 bbappendlines.append(('PACKAGE_ARCH', '=', '${MACHINE_ARCH}')) 776 bbappendlines.append(('PACKAGE_ARCH', '=', '${MACHINE_ARCH}'))
761 appendoverride = '_%s' % machine 777 appendoverride = ':%s' % machine
762 copyfiles = {} 778 copyfiles = {}
763 if srcfiles: 779 if srcfiles:
764 instfunclines = [] 780 instfunclines = []
765 for newfile, origsrcfile in srcfiles.items(): 781 for i, (newfile, param) in enumerate(srcfiles.items()):
766 srcfile = origsrcfile
767 srcurientry = None 782 srcurientry = None
768 if not srcfile: 783 if not 'path' in param or not param['path']:
769 srcfile = os.path.basename(newfile) 784 if 'newname' in param and param['newname']:
785 srcfile = param['newname']
786 else:
787 srcfile = os.path.basename(newfile)
770 srcurientry = 'file://%s' % srcfile 788 srcurientry = 'file://%s' % srcfile
789 oldentry = None
790 for uri in rd.getVar('SRC_URI').split():
791 if srcurientry in uri:
792 oldentry = uri
793 if params and params[i]:
794 srcurientry = '%s;%s' % (srcurientry, ';'.join('%s=%s' % (k,v) for k,v in params[i].items()))
771 # Double-check it's not there already 795 # Double-check it's not there already
772 # FIXME do we care if the entry is added by another bbappend that might go away? 796 # FIXME do we care if the entry is added by another bbappend that might go away?
773 if not srcurientry in rd.getVar('SRC_URI').split(): 797 if not srcurientry in rd.getVar('SRC_URI').split():
774 if machine: 798 if machine:
775 appendline('SRC_URI_append%s' % appendoverride, '=', ' ' + srcurientry) 799 if oldentry:
800 appendline('SRC_URI:remove%s' % appendoverride, '=', ' ' + oldentry)
801 appendline('SRC_URI:append%s' % appendoverride, '=', ' ' + srcurientry)
776 else: 802 else:
803 if oldentry:
804 if update_original_recipe:
805 removevalues['SRC_URI'] = oldentry
806 else:
807 appendline('SRC_URI:remove', '=', oldentry)
777 appendline('SRC_URI', '+=', srcurientry) 808 appendline('SRC_URI', '+=', srcurientry)
778 copyfiles[newfile] = srcfile 809 param['path'] = srcfile
810 else:
811 srcfile = param['path']
812 copyfiles[newfile] = param
779 if install: 813 if install:
780 institem = install.pop(newfile, None) 814 institem = install.pop(newfile, None)
781 if institem: 815 if institem:
@@ -786,7 +820,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
786 instfunclines.append(instdirline) 820 instfunclines.append(instdirline)
787 instfunclines.append('install -m %s ${WORKDIR}/%s ${D}%s' % (perms, os.path.basename(srcfile), instdestpath)) 821 instfunclines.append('install -m %s ${WORKDIR}/%s ${D}%s' % (perms, os.path.basename(srcfile), instdestpath))
788 if instfunclines: 822 if instfunclines:
789 bbappendlines.append(('do_install_append%s()' % appendoverride, '', instfunclines)) 823 bbappendlines.append(('do_install:append%s()' % appendoverride, '', instfunclines))
790 824
791 if redirect_output: 825 if redirect_output:
792 bb.note('Writing append file %s (dry-run)' % appendpath) 826 bb.note('Writing append file %s (dry-run)' % appendpath)
@@ -795,6 +829,8 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
795 # multiple times per operation when we're handling overrides) 829 # multiple times per operation when we're handling overrides)
796 if os.path.exists(appendpath) and not os.path.exists(outfile): 830 if os.path.exists(appendpath) and not os.path.exists(outfile):
797 shutil.copy2(appendpath, outfile) 831 shutil.copy2(appendpath, outfile)
832 elif update_original_recipe:
833 outfile = recipefile
798 else: 834 else:
799 bb.note('Writing append file %s' % appendpath) 835 bb.note('Writing append file %s' % appendpath)
800 outfile = appendpath 836 outfile = appendpath
@@ -804,15 +840,15 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
804 extvars = {'destsubdir': destsubdir} 840 extvars = {'destsubdir': destsubdir}
805 841
806 def appendfile_varfunc(varname, origvalue, op, newlines): 842 def appendfile_varfunc(varname, origvalue, op, newlines):
807 if varname == 'FILESEXTRAPATHS_prepend': 843 if varname == 'FILESEXTRAPATHS:prepend':
808 if origvalue.startswith('${THISDIR}/'): 844 if origvalue.startswith('${THISDIR}/'):
809 popline('FILESEXTRAPATHS_prepend') 845 popline('FILESEXTRAPATHS:prepend')
810 extvars['destsubdir'] = rd.expand(origvalue.split('${THISDIR}/', 1)[1].rstrip(':')) 846 extvars['destsubdir'] = rd.expand(origvalue.split('${THISDIR}/', 1)[1].rstrip(':'))
811 elif varname == 'PACKAGE_ARCH': 847 elif varname == 'PACKAGE_ARCH':
812 if machine: 848 if machine:
813 popline('PACKAGE_ARCH') 849 popline('PACKAGE_ARCH')
814 return (machine, None, 4, False) 850 return (machine, None, 4, False)
815 elif varname.startswith('do_install_append'): 851 elif varname.startswith('do_install:append'):
816 func = popline(varname) 852 func = popline(varname)
817 if func: 853 if func:
818 instfunclines = [line.strip() for line in origvalue.strip('\n').splitlines()] 854 instfunclines = [line.strip() for line in origvalue.strip('\n').splitlines()]
@@ -824,7 +860,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
824 splitval = split_var_value(origvalue, assignment=False) 860 splitval = split_var_value(origvalue, assignment=False)
825 changed = False 861 changed = False
826 removevar = varname 862 removevar = varname
827 if varname in ['SRC_URI', 'SRC_URI_append%s' % appendoverride]: 863 if varname in ['SRC_URI', 'SRC_URI:append%s' % appendoverride]:
828 removevar = 'SRC_URI' 864 removevar = 'SRC_URI'
829 line = popline(varname) 865 line = popline(varname)
830 if line: 866 if line:
@@ -853,11 +889,11 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
853 newvalue = splitval 889 newvalue = splitval
854 if len(newvalue) == 1: 890 if len(newvalue) == 1:
855 # Ensure it's written out as one line 891 # Ensure it's written out as one line
856 if '_append' in varname: 892 if ':append' in varname:
857 newvalue = ' ' + newvalue[0] 893 newvalue = ' ' + newvalue[0]
858 else: 894 else:
859 newvalue = newvalue[0] 895 newvalue = newvalue[0]
860 if not newvalue and (op in ['+=', '.='] or '_append' in varname): 896 if not newvalue and (op in ['+=', '.='] or ':append' in varname):
861 # There's no point appending nothing 897 # There's no point appending nothing
862 newvalue = None 898 newvalue = None
863 if varname.endswith('()'): 899 if varname.endswith('()'):
@@ -898,7 +934,12 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
898 outdir = redirect_output 934 outdir = redirect_output
899 else: 935 else:
900 outdir = appenddir 936 outdir = appenddir
901 for newfile, srcfile in copyfiles.items(): 937 for newfile, param in copyfiles.items():
938 srcfile = param['path']
939 patchdir = param.get('patchdir', ".")
940
941 if patchdir != ".":
942 newfile = os.path.join(os.path.split(newfile)[0], patchdir, os.path.split(newfile)[1])
902 filedest = os.path.join(outdir, destsubdir, os.path.basename(srcfile)) 943 filedest = os.path.join(outdir, destsubdir, os.path.basename(srcfile))
903 if os.path.abspath(newfile) != os.path.abspath(filedest): 944 if os.path.abspath(newfile) != os.path.abspath(filedest):
904 if newfile.startswith(tempfile.gettempdir()): 945 if newfile.startswith(tempfile.gettempdir()):
@@ -942,10 +983,9 @@ def replace_dir_vars(path, d):
942 path = path.replace(dirpath, '${%s}' % dirvars[dirpath]) 983 path = path.replace(dirpath, '${%s}' % dirvars[dirpath])
943 return path 984 return path
944 985
945def get_recipe_pv_without_srcpv(pv, uri_type): 986def get_recipe_pv_with_pfx_sfx(pv, uri_type):
946 """ 987 """
947 Get PV without SRCPV common in SCM's for now only 988 Get PV separating prefix and suffix components.
948 support git.
949 989
950 Returns tuple with pv, prefix and suffix. 990 Returns tuple with pv, prefix and suffix.
951 """ 991 """
@@ -953,7 +993,7 @@ def get_recipe_pv_without_srcpv(pv, uri_type):
953 sfx = '' 993 sfx = ''
954 994
955 if uri_type == 'git': 995 if uri_type == 'git':
956 git_regex = re.compile(r"(?P<pfx>v?)(?P<ver>.*?)(?P<sfx>\+[^\+]*(git)?r?(AUTOINC\+))(?P<rev>.*)") 996 git_regex = re.compile(r"(?P<pfx>v?)(?P<ver>.*?)(?P<sfx>\+[^\+]*(git)?r?(AUTOINC\+)?)(?P<rev>.*)")
957 m = git_regex.match(pv) 997 m = git_regex.match(pv)
958 998
959 if m: 999 if m:
@@ -1005,7 +1045,7 @@ def get_recipe_upstream_version(rd):
1005 src_uri = src_uris.split()[0] 1045 src_uri = src_uris.split()[0]
1006 uri_type, _, _, _, _, _ = decodeurl(src_uri) 1046 uri_type, _, _, _, _, _ = decodeurl(src_uri)
1007 1047
1008 (pv, pfx, sfx) = get_recipe_pv_without_srcpv(rd.getVar('PV'), uri_type) 1048 (pv, pfx, sfx) = get_recipe_pv_with_pfx_sfx(rd.getVar('PV'), uri_type)
1009 ru['current_version'] = pv 1049 ru['current_version'] = pv
1010 1050
1011 manual_upstream_version = rd.getVar("RECIPE_UPSTREAM_VERSION") 1051 manual_upstream_version = rd.getVar("RECIPE_UPSTREAM_VERSION")
@@ -1029,10 +1069,11 @@ def get_recipe_upstream_version(rd):
1029 else: 1069 else:
1030 ud = bb.fetch2.FetchData(src_uri, rd) 1070 ud = bb.fetch2.FetchData(src_uri, rd)
1031 if rd.getVar("UPSTREAM_CHECK_COMMITS") == "1": 1071 if rd.getVar("UPSTREAM_CHECK_COMMITS") == "1":
1072 bb.fetch2.get_srcrev(rd)
1032 revision = ud.method.latest_revision(ud, rd, 'default') 1073 revision = ud.method.latest_revision(ud, rd, 'default')
1033 upversion = pv 1074 upversion = pv
1034 if revision != rd.getVar("SRCREV"): 1075 if revision != rd.getVar("SRCREV"):
1035 upversion = upversion + "-new-commits-available" 1076 upversion = upversion + "-new-commits-available"
1036 else: 1077 else:
1037 pupver = ud.method.latest_versionstring(ud, rd) 1078 pupver = ud.method.latest_versionstring(ud, rd)
1038 (upversion, revision) = pupver 1079 (upversion, revision) = pupver
diff --git a/meta/lib/oe/reproducible.py b/meta/lib/oe/reproducible.py
index 204b9bd734..a9f717159e 100644
--- a/meta/lib/oe/reproducible.py
+++ b/meta/lib/oe/reproducible.py
@@ -1,10 +1,63 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4import os 6import os
5import subprocess 7import subprocess
6import bb 8import bb
7 9
10# For reproducible builds, this code sets the default SOURCE_DATE_EPOCH in each
11# component's build environment. The format is number of seconds since the
12# system epoch.
13#
14# Upstream components (generally) respect this environment variable,
15# using it in place of the "current" date and time.
16# See https://reproducible-builds.org/specs/source-date-epoch/
17#
18# The default value of SOURCE_DATE_EPOCH comes from the function
19# get_source_date_epoch_value which reads from the SDE_FILE, or if the file
20# is not available will use the fallback of SOURCE_DATE_EPOCH_FALLBACK.
21#
22# The SDE_FILE is normally constructed from the function
23# create_source_date_epoch_stamp which is typically added as a postfuncs to
24# the do_unpack task. If a recipe does NOT have do_unpack, it should be added
25# to a task that runs after the source is available and before the
26# do_deploy_source_date_epoch task is executed.
27#
28# If a recipe wishes to override the default behavior it should set it's own
29# SOURCE_DATE_EPOCH or override the do_deploy_source_date_epoch_stamp task
30# with recipe-specific functionality to write the appropriate
31# SOURCE_DATE_EPOCH into the SDE_FILE.
32#
33# SOURCE_DATE_EPOCH is intended to be a reproducible value. This value should
34# be reproducible for anyone who builds the same revision from the same
35# sources.
36#
37# There are 4 ways the create_source_date_epoch_stamp function determines what
38# becomes SOURCE_DATE_EPOCH:
39#
40# 1. Use the value from __source_date_epoch.txt file if this file exists.
41# This file was most likely created in the previous build by one of the
42# following methods 2,3,4.
43# Alternatively, it can be provided by a recipe via SRC_URI.
44#
45# If the file does not exist:
46#
47# 2. If there is a git checkout, use the last git commit timestamp.
48# Git does not preserve file timestamps on checkout.
49#
50# 3. Use the mtime of "known" files such as NEWS, CHANGLELOG, ...
51# This works for well-kept repositories distributed via tarball.
52#
53# 4. Use the modification time of the youngest file in the source tree, if
54# there is one.
55# This will be the newest file from the distribution tarball, if any.
56#
57# 5. Fall back to a fixed timestamp (SOURCE_DATE_EPOCH_FALLBACK).
58#
59# Once the value is determined, it is stored in the recipe's SDE_FILE.
60
8def get_source_date_epoch_from_known_files(d, sourcedir): 61def get_source_date_epoch_from_known_files(d, sourcedir):
9 source_date_epoch = None 62 source_date_epoch = None
10 newest_file = None 63 newest_file = None
@@ -41,7 +94,7 @@ def find_git_folder(d, sourcedir):
41 for root, dirs, files in os.walk(workdir, topdown=True): 94 for root, dirs, files in os.walk(workdir, topdown=True):
42 dirs[:] = [d for d in dirs if d not in exclude] 95 dirs[:] = [d for d in dirs if d not in exclude]
43 if '.git' in dirs: 96 if '.git' in dirs:
44 return root 97 return os.path.join(root, ".git")
45 98
46 bb.warn("Failed to find a git repository in WORKDIR: %s" % workdir) 99 bb.warn("Failed to find a git repository in WORKDIR: %s" % workdir)
47 return None 100 return None
@@ -62,11 +115,12 @@ def get_source_date_epoch_from_git(d, sourcedir):
62 return None 115 return None
63 116
64 bb.debug(1, "git repository: %s" % gitpath) 117 bb.debug(1, "git repository: %s" % gitpath)
65 p = subprocess.run(['git', '--git-dir', gitpath, 'log', '-1', '--pretty=%ct'], check=True, stdout=subprocess.PIPE) 118 p = subprocess.run(['git', '-c', 'log.showSignature=false', '--git-dir', gitpath, 'log', '-1', '--pretty=%ct'],
119 check=True, stdout=subprocess.PIPE)
66 return int(p.stdout.decode('utf-8')) 120 return int(p.stdout.decode('utf-8'))
67 121
68def get_source_date_epoch_from_youngest_file(d, sourcedir): 122def get_source_date_epoch_from_youngest_file(d, sourcedir):
69 if sourcedir == d.getVar('WORKDIR'): 123 if sourcedir == d.getVar('UNPACKDIR'):
70 # These sources are almost certainly not from a tarball 124 # These sources are almost certainly not from a tarball
71 return None 125 return None
72 126
@@ -77,6 +131,9 @@ def get_source_date_epoch_from_youngest_file(d, sourcedir):
77 files = [f for f in files if not f[0] == '.'] 131 files = [f for f in files if not f[0] == '.']
78 132
79 for fname in files: 133 for fname in files:
134 if fname == "singletask.lock":
135 # Ignore externalsrc/devtool lockfile [YOCTO #14921]
136 continue
80 filename = os.path.join(root, fname) 137 filename = os.path.join(root, fname)
81 try: 138 try:
82 mtime = int(os.lstat(filename).st_mtime) 139 mtime = int(os.lstat(filename).st_mtime)
@@ -101,8 +158,40 @@ def fixed_source_date_epoch(d):
101def get_source_date_epoch(d, sourcedir): 158def get_source_date_epoch(d, sourcedir):
102 return ( 159 return (
103 get_source_date_epoch_from_git(d, sourcedir) or 160 get_source_date_epoch_from_git(d, sourcedir) or
104 get_source_date_epoch_from_known_files(d, sourcedir) or
105 get_source_date_epoch_from_youngest_file(d, sourcedir) or 161 get_source_date_epoch_from_youngest_file(d, sourcedir) or
106 fixed_source_date_epoch(d) # Last resort 162 fixed_source_date_epoch(d) # Last resort
107 ) 163 )
108 164
165def epochfile_read(epochfile, d):
166 cached, efile = d.getVar('__CACHED_SOURCE_DATE_EPOCH') or (None, None)
167 if cached and efile == epochfile:
168 return cached
169
170 if cached and epochfile != efile:
171 bb.debug(1, "Epoch file changed from %s to %s" % (efile, epochfile))
172
173 source_date_epoch = int(d.getVar('SOURCE_DATE_EPOCH_FALLBACK'))
174 try:
175 with open(epochfile, 'r') as f:
176 s = f.read()
177 try:
178 source_date_epoch = int(s)
179 except ValueError:
180 bb.warn("SOURCE_DATE_EPOCH value '%s' is invalid. Reverting to SOURCE_DATE_EPOCH_FALLBACK" % s)
181 source_date_epoch = int(d.getVar('SOURCE_DATE_EPOCH_FALLBACK'))
182 bb.debug(1, "SOURCE_DATE_EPOCH: %d" % source_date_epoch)
183 except FileNotFoundError:
184 bb.debug(1, "Cannot find %s. SOURCE_DATE_EPOCH will default to %d" % (epochfile, source_date_epoch))
185
186 d.setVar('__CACHED_SOURCE_DATE_EPOCH', (str(source_date_epoch), epochfile))
187 return str(source_date_epoch)
188
189def epochfile_write(source_date_epoch, epochfile, d):
190
191 bb.debug(1, "SOURCE_DATE_EPOCH: %d" % source_date_epoch)
192 bb.utils.mkdirhier(os.path.dirname(epochfile))
193
194 tmp_file = "%s.new" % epochfile
195 with open(tmp_file, 'w') as f:
196 f.write(str(source_date_epoch))
197 os.rename(tmp_file, epochfile)
diff --git a/meta/lib/oe/rootfs.py b/meta/lib/oe/rootfs.py
index 249c685dcf..8cd48f9450 100644
--- a/meta/lib/oe/rootfs.py
+++ b/meta/lib/oe/rootfs.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4from abc import ABCMeta, abstractmethod 6from abc import ABCMeta, abstractmethod
@@ -104,7 +106,7 @@ class Rootfs(object, metaclass=ABCMeta):
104 def _cleanup(self): 106 def _cleanup(self):
105 pass 107 pass
106 108
107 def _setup_dbg_rootfs(self, dirs): 109 def _setup_dbg_rootfs(self, package_paths):
108 gen_debugfs = self.d.getVar('IMAGE_GEN_DEBUGFS') or '0' 110 gen_debugfs = self.d.getVar('IMAGE_GEN_DEBUGFS') or '0'
109 if gen_debugfs != '1': 111 if gen_debugfs != '1':
110 return 112 return
@@ -114,17 +116,18 @@ class Rootfs(object, metaclass=ABCMeta):
114 shutil.rmtree(self.image_rootfs + '-orig') 116 shutil.rmtree(self.image_rootfs + '-orig')
115 except: 117 except:
116 pass 118 pass
117 os.rename(self.image_rootfs, self.image_rootfs + '-orig') 119 bb.utils.rename(self.image_rootfs, self.image_rootfs + '-orig')
118 120
119 bb.note(" Creating debug rootfs...") 121 bb.note(" Creating debug rootfs...")
120 bb.utils.mkdirhier(self.image_rootfs) 122 bb.utils.mkdirhier(self.image_rootfs)
121 123
122 bb.note(" Copying back package database...") 124 bb.note(" Copying back package database...")
123 for dir in dirs: 125 for path in package_paths:
124 if not os.path.isdir(self.image_rootfs + '-orig' + dir): 126 bb.utils.mkdirhier(self.image_rootfs + os.path.dirname(path))
125 continue 127 if os.path.isdir(self.image_rootfs + '-orig' + path):
126 bb.utils.mkdirhier(self.image_rootfs + os.path.dirname(dir)) 128 shutil.copytree(self.image_rootfs + '-orig' + path, self.image_rootfs + path, symlinks=True)
127 shutil.copytree(self.image_rootfs + '-orig' + dir, self.image_rootfs + dir, symlinks=True) 129 elif os.path.isfile(self.image_rootfs + '-orig' + path):
130 shutil.copyfile(self.image_rootfs + '-orig' + path, self.image_rootfs + path)
128 131
129 # Copy files located in /usr/lib/debug or /usr/src/debug 132 # Copy files located in /usr/lib/debug or /usr/src/debug
130 for dir in ["/usr/lib/debug", "/usr/src/debug"]: 133 for dir in ["/usr/lib/debug", "/usr/src/debug"]:
@@ -160,25 +163,26 @@ class Rootfs(object, metaclass=ABCMeta):
160 bb.note(" Install extra debug packages...") 163 bb.note(" Install extra debug packages...")
161 self.pm.install(extra_debug_pkgs.split(), True) 164 self.pm.install(extra_debug_pkgs.split(), True)
162 165
166 bb.note(" Removing package database...")
167 for path in package_paths:
168 if os.path.isdir(self.image_rootfs + path):
169 shutil.rmtree(self.image_rootfs + path)
170 elif os.path.isfile(self.image_rootfs + path):
171 os.remove(self.image_rootfs + path)
172
163 bb.note(" Rename debug rootfs...") 173 bb.note(" Rename debug rootfs...")
164 try: 174 try:
165 shutil.rmtree(self.image_rootfs + '-dbg') 175 shutil.rmtree(self.image_rootfs + '-dbg')
166 except: 176 except:
167 pass 177 pass
168 os.rename(self.image_rootfs, self.image_rootfs + '-dbg') 178 bb.utils.rename(self.image_rootfs, self.image_rootfs + '-dbg')
169 179
170 bb.note(" Restoreing original rootfs...") 180 bb.note(" Restoring original rootfs...")
171 os.rename(self.image_rootfs + '-orig', self.image_rootfs) 181 bb.utils.rename(self.image_rootfs + '-orig', self.image_rootfs)
172 182
173 def _exec_shell_cmd(self, cmd): 183 def _exec_shell_cmd(self, cmd):
174 fakerootcmd = self.d.getVar('FAKEROOT')
175 if fakerootcmd is not None:
176 exec_cmd = [fakerootcmd, cmd]
177 else:
178 exec_cmd = cmd
179
180 try: 184 try:
181 subprocess.check_output(exec_cmd, stderr=subprocess.STDOUT) 185 subprocess.check_output(cmd, stderr=subprocess.STDOUT)
182 except subprocess.CalledProcessError as e: 186 except subprocess.CalledProcessError as e:
183 return("Command '%s' returned %d:\n%s" % (e.cmd, e.returncode, e.output)) 187 return("Command '%s' returned %d:\n%s" % (e.cmd, e.returncode, e.output))
184 188
@@ -190,9 +194,17 @@ class Rootfs(object, metaclass=ABCMeta):
190 post_process_cmds = self.d.getVar("ROOTFS_POSTPROCESS_COMMAND") 194 post_process_cmds = self.d.getVar("ROOTFS_POSTPROCESS_COMMAND")
191 rootfs_post_install_cmds = self.d.getVar('ROOTFS_POSTINSTALL_COMMAND') 195 rootfs_post_install_cmds = self.d.getVar('ROOTFS_POSTINSTALL_COMMAND')
192 196
193 bb.utils.mkdirhier(self.image_rootfs) 197 def make_last(command, commands):
198 commands = commands.split()
199 if command in commands:
200 commands.remove(command)
201 commands.append(command)
202 return "".join(commands)
194 203
195 bb.utils.mkdirhier(self.deploydir) 204 # We want this to run as late as possible, in particular after
205 # systemd_sysusers_create and set_user_group. Using :append is not enough
206 make_last("tidy_shadowutils_files", post_process_cmds)
207 make_last("rootfs_reproducible", post_process_cmds)
196 208
197 execute_pre_post_process(self.d, pre_process_cmds) 209 execute_pre_post_process(self.d, pre_process_cmds)
198 210
@@ -250,13 +262,11 @@ class Rootfs(object, metaclass=ABCMeta):
250 262
251 263
252 def _uninstall_unneeded(self): 264 def _uninstall_unneeded(self):
253 # Remove unneeded init script symlinks 265 # Remove the run-postinsts package if no delayed postinsts are found
254 delayed_postinsts = self._get_delayed_postinsts() 266 delayed_postinsts = self._get_delayed_postinsts()
255 if delayed_postinsts is None: 267 if delayed_postinsts is None:
256 if os.path.exists(self.d.expand("${IMAGE_ROOTFS}${sysconfdir}/init.d/run-postinsts")): 268 if os.path.exists(self.d.expand("${IMAGE_ROOTFS}${sysconfdir}/init.d/run-postinsts")) or os.path.exists(self.d.expand("${IMAGE_ROOTFS}${systemd_system_unitdir}/run-postinsts.service")):
257 self._exec_shell_cmd(["update-rc.d", "-f", "-r", 269 self.pm.remove(["run-postinsts"])
258 self.d.getVar('IMAGE_ROOTFS'),
259 "run-postinsts", "remove"])
260 270
261 image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", 271 image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs",
262 True, False, self.d) 272 True, False, self.d)
@@ -304,10 +314,20 @@ class Rootfs(object, metaclass=ABCMeta):
304 self._exec_shell_cmd(['ldconfig', '-r', self.image_rootfs, '-c', 314 self._exec_shell_cmd(['ldconfig', '-r', self.image_rootfs, '-c',
305 'new', '-v', '-X']) 315 'new', '-v', '-X'])
306 316
317 image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs",
318 True, False, self.d)
319 ldconfig_in_features = bb.utils.contains("DISTRO_FEATURES", "ldconfig",
320 True, False, self.d)
321 if image_rorfs or not ldconfig_in_features:
322 ldconfig_cache_dir = os.path.join(self.image_rootfs, "var/cache/ldconfig")
323 if os.path.exists(ldconfig_cache_dir):
324 bb.note("Removing ldconfig auxiliary cache...")
325 shutil.rmtree(ldconfig_cache_dir)
326
307 def _check_for_kernel_modules(self, modules_dir): 327 def _check_for_kernel_modules(self, modules_dir):
308 for root, dirs, files in os.walk(modules_dir, topdown=True): 328 for root, dirs, files in os.walk(modules_dir, topdown=True):
309 for name in files: 329 for name in files:
310 found_ko = name.endswith(".ko") 330 found_ko = name.endswith((".ko", ".ko.gz", ".ko.xz", ".ko.zst"))
311 if found_ko: 331 if found_ko:
312 return found_ko 332 return found_ko
313 return False 333 return False
@@ -319,17 +339,30 @@ class Rootfs(object, metaclass=ABCMeta):
319 bb.note("No Kernel Modules found, not running depmod") 339 bb.note("No Kernel Modules found, not running depmod")
320 return 340 return
321 341
322 kernel_abi_ver_file = oe.path.join(self.d.getVar('PKGDATA_DIR'), "kernel-depmod", 342 pkgdatadir = self.d.getVar('PKGDATA_DIR')
323 'kernel-abiversion') 343
324 if not os.path.exists(kernel_abi_ver_file): 344 # PKGDATA_DIR can include multiple kernels so we run depmod for each
325 bb.fatal("No kernel-abiversion file found (%s), cannot run depmod, aborting" % kernel_abi_ver_file) 345 # one of them.
346 for direntry in os.listdir(pkgdatadir):
347 match = re.match('(.*)-depmod', direntry)
348 if not match:
349 continue
350 kernel_package_name = match.group(1)
326 351
327 kernel_ver = open(kernel_abi_ver_file).read().strip(' \n') 352 kernel_abi_ver_file = oe.path.join(pkgdatadir, direntry, kernel_package_name + '-abiversion')
328 versioned_modules_dir = os.path.join(self.image_rootfs, modules_dir, kernel_ver) 353 if not os.path.exists(kernel_abi_ver_file):
354 bb.fatal("No kernel-abiversion file found (%s), cannot run depmod, aborting" % kernel_abi_ver_file)
329 355
330 bb.utils.mkdirhier(versioned_modules_dir) 356 with open(kernel_abi_ver_file) as f:
357 kernel_ver = f.read().strip(' \n')
331 358
332 self._exec_shell_cmd(['depmodwrapper', '-a', '-b', self.image_rootfs, kernel_ver]) 359 versioned_modules_dir = os.path.join(self.image_rootfs, modules_dir, kernel_ver)
360
361 bb.utils.mkdirhier(versioned_modules_dir)
362
363 bb.note("Running depmodwrapper for %s ..." % versioned_modules_dir)
364 if self._exec_shell_cmd(['depmodwrapper', '-a', '-b', self.image_rootfs, kernel_ver, kernel_package_name]):
365 bb.fatal("Kernel modules dependency generation failed")
333 366
334 """ 367 """
335 Create devfs: 368 Create devfs:
@@ -378,6 +411,10 @@ def create_rootfs(d, manifest_dir=None, progress_reporter=None, logcatcher=None)
378 411
379 412
380def image_list_installed_packages(d, rootfs_dir=None): 413def image_list_installed_packages(d, rootfs_dir=None):
414 # Theres no rootfs for baremetal images
415 if bb.data.inherits_class('baremetal-image', d):
416 return ""
417
381 if not rootfs_dir: 418 if not rootfs_dir:
382 rootfs_dir = d.getVar('IMAGE_ROOTFS') 419 rootfs_dir = d.getVar('IMAGE_ROOTFS')
383 420
diff --git a/meta/lib/oe/rust.py b/meta/lib/oe/rust.py
new file mode 100644
index 0000000000..185553eeeb
--- /dev/null
+++ b/meta/lib/oe/rust.py
@@ -0,0 +1,13 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7# Handle mismatches between `uname -m`-style output and Rust's arch names
8def arch_to_rust_arch(arch):
9 if arch == "ppc64le":
10 return "powerpc64le"
11 if arch in ('riscv32', 'riscv64'):
12 return arch + 'gc'
13 return arch
diff --git a/meta/lib/oe/sbom.py b/meta/lib/oe/sbom.py
new file mode 100644
index 0000000000..fd4b6895d8
--- /dev/null
+++ b/meta/lib/oe/sbom.py
@@ -0,0 +1,120 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import collections
8
9DepRecipe = collections.namedtuple("DepRecipe", ("doc", "doc_sha1", "recipe"))
10DepSource = collections.namedtuple("DepSource", ("doc", "doc_sha1", "recipe", "file"))
11
12
13def get_recipe_spdxid(d):
14 return "SPDXRef-%s-%s" % ("Recipe", d.getVar("PN"))
15
16
17def get_download_spdxid(d, idx):
18 return "SPDXRef-Download-%s-%d" % (d.getVar("PN"), idx)
19
20
21def get_package_spdxid(pkg):
22 return "SPDXRef-Package-%s" % pkg
23
24
25def get_source_file_spdxid(d, idx):
26 return "SPDXRef-SourceFile-%s-%d" % (d.getVar("PN"), idx)
27
28
29def get_packaged_file_spdxid(pkg, idx):
30 return "SPDXRef-PackagedFile-%s-%d" % (pkg, idx)
31
32
33def get_image_spdxid(img):
34 return "SPDXRef-Image-%s" % img
35
36
37def get_sdk_spdxid(sdk):
38 return "SPDXRef-SDK-%s" % sdk
39
40
41def _doc_path_by_namespace(spdx_deploy, arch, doc_namespace):
42 return spdx_deploy / "by-namespace" / arch / doc_namespace.replace("/", "_")
43
44
45def doc_find_by_namespace(spdx_deploy, search_arches, doc_namespace):
46 for pkgarch in search_arches:
47 p = _doc_path_by_namespace(spdx_deploy, pkgarch, doc_namespace)
48 if os.path.exists(p):
49 return p
50 return None
51
52
53def _doc_path_by_hashfn(spdx_deploy, arch, doc_name, hashfn):
54 return (
55 spdx_deploy / "by-hash" / arch / hashfn.split()[1] / (doc_name + ".spdx.json")
56 )
57
58
59def doc_find_by_hashfn(spdx_deploy, search_arches, doc_name, hashfn):
60 for pkgarch in search_arches:
61 p = _doc_path_by_hashfn(spdx_deploy, pkgarch, doc_name, hashfn)
62 if os.path.exists(p):
63 return p
64 return None
65
66
67def doc_path(spdx_deploy, doc_name, arch, subdir):
68 return spdx_deploy / arch / subdir / (doc_name + ".spdx.json")
69
70
71def write_doc(d, spdx_doc, arch, subdir, spdx_deploy=None, indent=None):
72 from pathlib import Path
73
74 if spdx_deploy is None:
75 spdx_deploy = Path(d.getVar("SPDXDEPLOY"))
76
77 dest = doc_path(spdx_deploy, spdx_doc.name, arch, subdir)
78 dest.parent.mkdir(exist_ok=True, parents=True)
79 with dest.open("wb") as f:
80 doc_sha1 = spdx_doc.to_json(f, sort_keys=True, indent=indent)
81
82 l = _doc_path_by_namespace(spdx_deploy, arch, spdx_doc.documentNamespace)
83 l.parent.mkdir(exist_ok=True, parents=True)
84 l.symlink_to(os.path.relpath(dest, l.parent))
85
86 l = _doc_path_by_hashfn(
87 spdx_deploy, arch, spdx_doc.name, d.getVar("BB_HASHFILENAME")
88 )
89 l.parent.mkdir(exist_ok=True, parents=True)
90 l.symlink_to(os.path.relpath(dest, l.parent))
91
92 return doc_sha1
93
94
95def read_doc(fn):
96 import hashlib
97 import oe.spdx
98 import io
99 import contextlib
100
101 @contextlib.contextmanager
102 def get_file():
103 if isinstance(fn, io.IOBase):
104 yield fn
105 else:
106 with fn.open("rb") as f:
107 yield f
108
109 with get_file() as f:
110 sha1 = hashlib.sha1()
111 while True:
112 chunk = f.read(4096)
113 if not chunk:
114 break
115 sha1.update(chunk)
116
117 f.seek(0)
118 doc = oe.spdx.SPDXDocument.from_json(f)
119
120 return (doc, sha1.hexdigest())
diff --git a/meta/lib/oe/sdk.py b/meta/lib/oe/sdk.py
index 37b59afd1a..3dc3672210 100644
--- a/meta/lib/oe/sdk.py
+++ b/meta/lib/oe/sdk.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -68,7 +70,7 @@ class Sdk(object, metaclass=ABCMeta):
68 #FIXME: using umbrella exc catching because bb.utils method raises it 70 #FIXME: using umbrella exc catching because bb.utils method raises it
69 except Exception as e: 71 except Exception as e:
70 bb.debug(1, "printing the stack trace\n %s" %traceback.format_exc()) 72 bb.debug(1, "printing the stack trace\n %s" %traceback.format_exc())
71 bb.error("unable to place %s in final SDK location" % sourcefile) 73 bb.fatal("unable to place %s in final SDK location" % sourcefile)
72 74
73 def mkdirhier(self, dirpath): 75 def mkdirhier(self, dirpath):
74 try: 76 try:
@@ -115,6 +117,10 @@ def sdk_list_installed_packages(d, target, rootfs_dir=None):
115 117
116 rootfs_dir = [sdk_output, os.path.join(sdk_output, target_path)][target is True] 118 rootfs_dir = [sdk_output, os.path.join(sdk_output, target_path)][target is True]
117 119
120 if target is False:
121 ipkgconf_sdk_target = d.getVar("IPKGCONF_SDK")
122 d.setVar("IPKGCONF_TARGET", ipkgconf_sdk_target)
123
118 img_type = d.getVar('IMAGE_PKGTYPE') 124 img_type = d.getVar('IMAGE_PKGTYPE')
119 import importlib 125 import importlib
120 cls = importlib.import_module('oe.package_manager.' + img_type) 126 cls = importlib.import_module('oe.package_manager.' + img_type)
diff --git a/meta/lib/oe/spdx.py b/meta/lib/oe/spdx.py
new file mode 100644
index 0000000000..7aaf2af5ed
--- /dev/null
+++ b/meta/lib/oe/spdx.py
@@ -0,0 +1,357 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7#
8# This library is intended to capture the JSON SPDX specification in a type
9# safe manner. It is not intended to encode any particular OE specific
10# behaviors, see the sbom.py for that.
11#
12# The documented SPDX spec document doesn't cover the JSON syntax for
13# particular configuration, which can make it hard to determine what the JSON
14# syntax should be. I've found it is actually much simpler to read the official
15# SPDX JSON schema which can be found here: https://github.com/spdx/spdx-spec
16# in schemas/spdx-schema.json
17#
18
19import hashlib
20import itertools
21import json
22
23SPDX_VERSION = "2.2"
24
25
26#
27# The following are the support classes that are used to implement SPDX object
28#
29
30class _Property(object):
31 """
32 A generic SPDX object property. The different types will derive from this
33 class
34 """
35
36 def __init__(self, *, default=None):
37 self.default = default
38
39 def setdefault(self, dest, name):
40 if self.default is not None:
41 dest.setdefault(name, self.default)
42
43
44class _String(_Property):
45 """
46 A scalar string property for an SPDX object
47 """
48
49 def __init__(self, **kwargs):
50 super().__init__(**kwargs)
51
52 def set_property(self, attrs, name):
53 def get_helper(obj):
54 return obj._spdx[name]
55
56 def set_helper(obj, value):
57 obj._spdx[name] = value
58
59 def del_helper(obj):
60 del obj._spdx[name]
61
62 attrs[name] = property(get_helper, set_helper, del_helper)
63
64 def init(self, source):
65 return source
66
67
68class _Object(_Property):
69 """
70 A scalar SPDX object property of a SPDX object
71 """
72
73 def __init__(self, cls, **kwargs):
74 super().__init__(**kwargs)
75 self.cls = cls
76
77 def set_property(self, attrs, name):
78 def get_helper(obj):
79 if not name in obj._spdx:
80 obj._spdx[name] = self.cls()
81 return obj._spdx[name]
82
83 def set_helper(obj, value):
84 obj._spdx[name] = value
85
86 def del_helper(obj):
87 del obj._spdx[name]
88
89 attrs[name] = property(get_helper, set_helper)
90
91 def init(self, source):
92 return self.cls(**source)
93
94
95class _ListProperty(_Property):
96 """
97 A list of SPDX properties
98 """
99
100 def __init__(self, prop, **kwargs):
101 super().__init__(**kwargs)
102 self.prop = prop
103
104 def set_property(self, attrs, name):
105 def get_helper(obj):
106 if not name in obj._spdx:
107 obj._spdx[name] = []
108 return obj._spdx[name]
109
110 def set_helper(obj, value):
111 obj._spdx[name] = list(value)
112
113 def del_helper(obj):
114 del obj._spdx[name]
115
116 attrs[name] = property(get_helper, set_helper, del_helper)
117
118 def init(self, source):
119 return [self.prop.init(o) for o in source]
120
121
122class _StringList(_ListProperty):
123 """
124 A list of strings as a property for an SPDX object
125 """
126
127 def __init__(self, **kwargs):
128 super().__init__(_String(), **kwargs)
129
130
131class _ObjectList(_ListProperty):
132 """
133 A list of SPDX objects as a property for an SPDX object
134 """
135
136 def __init__(self, cls, **kwargs):
137 super().__init__(_Object(cls), **kwargs)
138
139
140class MetaSPDXObject(type):
141 """
142 A metaclass that allows properties (anything derived from a _Property
143 class) to be defined for a SPDX object
144 """
145 def __new__(mcls, name, bases, attrs):
146 attrs["_properties"] = {}
147
148 for key in attrs.keys():
149 if isinstance(attrs[key], _Property):
150 prop = attrs[key]
151 attrs["_properties"][key] = prop
152 prop.set_property(attrs, key)
153
154 return super().__new__(mcls, name, bases, attrs)
155
156
157class SPDXObject(metaclass=MetaSPDXObject):
158 """
159 The base SPDX object; all SPDX spec classes must derive from this class
160 """
161 def __init__(self, **d):
162 self._spdx = {}
163
164 for name, prop in self._properties.items():
165 prop.setdefault(self._spdx, name)
166 if name in d:
167 self._spdx[name] = prop.init(d[name])
168
169 def serializer(self):
170 return self._spdx
171
172 def __setattr__(self, name, value):
173 if name in self._properties or name == "_spdx":
174 super().__setattr__(name, value)
175 return
176 raise KeyError("%r is not a valid SPDX property" % name)
177
178#
179# These are the SPDX objects implemented from the spec. The *only* properties
180# that can be added to these objects are ones directly specified in the SPDX
181# spec, however you may add helper functions to make operations easier.
182#
183# Defaults should *only* be specified if the SPDX spec says there is a certain
184# required value for a field (e.g. dataLicense), or if the field is mandatory
185# and has some sane "this field is unknown" (e.g. "NOASSERTION")
186#
187
188class SPDXAnnotation(SPDXObject):
189 annotationDate = _String()
190 annotationType = _String()
191 annotator = _String()
192 comment = _String()
193
194class SPDXChecksum(SPDXObject):
195 algorithm = _String()
196 checksumValue = _String()
197
198
199class SPDXRelationship(SPDXObject):
200 spdxElementId = _String()
201 relatedSpdxElement = _String()
202 relationshipType = _String()
203 comment = _String()
204 annotations = _ObjectList(SPDXAnnotation)
205
206
207class SPDXExternalReference(SPDXObject):
208 referenceCategory = _String()
209 referenceType = _String()
210 referenceLocator = _String()
211
212
213class SPDXPackageVerificationCode(SPDXObject):
214 packageVerificationCodeValue = _String()
215 packageVerificationCodeExcludedFiles = _StringList()
216
217
218class SPDXPackage(SPDXObject):
219 ALLOWED_CHECKSUMS = [
220 "SHA1",
221 "SHA224",
222 "SHA256",
223 "SHA384",
224 "SHA512",
225 "MD2",
226 "MD4",
227 "MD5",
228 "MD6",
229 ]
230
231 name = _String()
232 SPDXID = _String()
233 versionInfo = _String()
234 downloadLocation = _String(default="NOASSERTION")
235 supplier = _String(default="NOASSERTION")
236 homepage = _String()
237 licenseConcluded = _String(default="NOASSERTION")
238 licenseDeclared = _String(default="NOASSERTION")
239 summary = _String()
240 description = _String()
241 sourceInfo = _String()
242 copyrightText = _String(default="NOASSERTION")
243 licenseInfoFromFiles = _StringList(default=["NOASSERTION"])
244 externalRefs = _ObjectList(SPDXExternalReference)
245 packageVerificationCode = _Object(SPDXPackageVerificationCode)
246 hasFiles = _StringList()
247 packageFileName = _String()
248 annotations = _ObjectList(SPDXAnnotation)
249 checksums = _ObjectList(SPDXChecksum)
250
251
252class SPDXFile(SPDXObject):
253 SPDXID = _String()
254 fileName = _String()
255 licenseConcluded = _String(default="NOASSERTION")
256 copyrightText = _String(default="NOASSERTION")
257 licenseInfoInFiles = _StringList(default=["NOASSERTION"])
258 checksums = _ObjectList(SPDXChecksum)
259 fileTypes = _StringList()
260
261
262class SPDXCreationInfo(SPDXObject):
263 created = _String()
264 licenseListVersion = _String()
265 comment = _String()
266 creators = _StringList()
267
268
269class SPDXExternalDocumentRef(SPDXObject):
270 externalDocumentId = _String()
271 spdxDocument = _String()
272 checksum = _Object(SPDXChecksum)
273
274
275class SPDXExtractedLicensingInfo(SPDXObject):
276 name = _String()
277 comment = _String()
278 licenseId = _String()
279 extractedText = _String()
280
281
282class SPDXDocument(SPDXObject):
283 spdxVersion = _String(default="SPDX-" + SPDX_VERSION)
284 dataLicense = _String(default="CC0-1.0")
285 SPDXID = _String(default="SPDXRef-DOCUMENT")
286 name = _String()
287 documentNamespace = _String()
288 creationInfo = _Object(SPDXCreationInfo)
289 packages = _ObjectList(SPDXPackage)
290 files = _ObjectList(SPDXFile)
291 relationships = _ObjectList(SPDXRelationship)
292 externalDocumentRefs = _ObjectList(SPDXExternalDocumentRef)
293 hasExtractedLicensingInfos = _ObjectList(SPDXExtractedLicensingInfo)
294
295 def __init__(self, **d):
296 super().__init__(**d)
297
298 def to_json(self, f, *, sort_keys=False, indent=None, separators=None):
299 class Encoder(json.JSONEncoder):
300 def default(self, o):
301 if isinstance(o, SPDXObject):
302 return o.serializer()
303
304 return super().default(o)
305
306 sha1 = hashlib.sha1()
307 for chunk in Encoder(
308 sort_keys=sort_keys,
309 indent=indent,
310 separators=separators,
311 ).iterencode(self):
312 chunk = chunk.encode("utf-8")
313 f.write(chunk)
314 sha1.update(chunk)
315
316 return sha1.hexdigest()
317
318 @classmethod
319 def from_json(cls, f):
320 return cls(**json.load(f))
321
322 def add_relationship(self, _from, relationship, _to, *, comment=None, annotation=None):
323 if isinstance(_from, SPDXObject):
324 from_spdxid = _from.SPDXID
325 else:
326 from_spdxid = _from
327
328 if isinstance(_to, SPDXObject):
329 to_spdxid = _to.SPDXID
330 else:
331 to_spdxid = _to
332
333 r = SPDXRelationship(
334 spdxElementId=from_spdxid,
335 relatedSpdxElement=to_spdxid,
336 relationshipType=relationship,
337 )
338
339 if comment is not None:
340 r.comment = comment
341
342 if annotation is not None:
343 r.annotations.append(annotation)
344
345 self.relationships.append(r)
346
347 def find_by_spdxid(self, spdxid):
348 for o in itertools.chain(self.packages, self.files):
349 if o.SPDXID == spdxid:
350 return o
351 return None
352
353 def find_external_document_ref(self, namespace):
354 for r in self.externalDocumentRefs:
355 if r.spdxDocument == namespace:
356 return r
357 return None
diff --git a/meta/lib/oe/sstatesig.py b/meta/lib/oe/sstatesig.py
index 6cd6e11acc..a46e5502ab 100644
--- a/meta/lib/oe/sstatesig.py
+++ b/meta/lib/oe/sstatesig.py
@@ -1,9 +1,12 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4import bb.siggen 6import bb.siggen
5import bb.runqueue 7import bb.runqueue
6import oe 8import oe
9import netrc
7 10
8def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches): 11def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches):
9 # Return True if we should keep the dependency, False to drop it 12 # Return True if we should keep the dependency, False to drop it
@@ -28,6 +31,12 @@ def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches):
28 depmc, _, deptaskname, depmcfn = bb.runqueue.split_tid_mcfn(dep) 31 depmc, _, deptaskname, depmcfn = bb.runqueue.split_tid_mcfn(dep)
29 mc, _ = bb.runqueue.split_mc(fn) 32 mc, _ = bb.runqueue.split_mc(fn)
30 33
34 # We can skip the rm_work task signature to avoid running the task
35 # when we remove some tasks from the dependencie chain
36 # i.e INHERIT:remove = "create-spdx" will trigger the do_rm_work
37 if task == "do_rm_work":
38 return False
39
31 # (Almost) always include our own inter-task dependencies (unless it comes 40 # (Almost) always include our own inter-task dependencies (unless it comes
32 # from a mcdepends). The exception is the special 41 # from a mcdepends). The exception is the special
33 # do_kernel_configme->do_unpack_and_patch dependency from archiver.bbclass. 42 # do_kernel_configme->do_unpack_and_patch dependency from archiver.bbclass.
@@ -59,7 +68,7 @@ def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches):
59 return False 68 return False
60 69
61 # Kernel modules are well namespaced. We don't want to depend on the kernel's checksum 70 # Kernel modules are well namespaced. We don't want to depend on the kernel's checksum
62 # if we're just doing an RRECOMMENDS_xxx = "kernel-module-*", not least because the checksum 71 # if we're just doing an RRECOMMENDS:xxx = "kernel-module-*", not least because the checksum
63 # is machine specific. 72 # is machine specific.
64 # Therefore if we're not a kernel or a module recipe (inheriting the kernel classes) 73 # Therefore if we're not a kernel or a module recipe (inheriting the kernel classes)
65 # and we reccomend a kernel-module, we exclude the dependency. 74 # and we reccomend a kernel-module, we exclude the dependency.
@@ -84,15 +93,6 @@ def sstate_lockedsigs(d):
84 sigs[pn][task] = [h, siggen_lockedsigs_var] 93 sigs[pn][task] = [h, siggen_lockedsigs_var]
85 return sigs 94 return sigs
86 95
87class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic):
88 name = "OEBasic"
89 def init_rundepcheck(self, data):
90 self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
91 self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
92 pass
93 def rundep_check(self, fn, recipename, task, dep, depname, dataCaches = None):
94 return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCaches)
95
96class SignatureGeneratorOEBasicHashMixIn(object): 96class SignatureGeneratorOEBasicHashMixIn(object):
97 supports_multiconfig_datacaches = True 97 supports_multiconfig_datacaches = True
98 98
@@ -105,10 +105,11 @@ class SignatureGeneratorOEBasicHashMixIn(object):
105 self.lockedhashfn = {} 105 self.lockedhashfn = {}
106 self.machine = data.getVar("MACHINE") 106 self.machine = data.getVar("MACHINE")
107 self.mismatch_msgs = [] 107 self.mismatch_msgs = []
108 self.mismatch_number = 0
109 self.lockedsigs_msgs = ""
108 self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or 110 self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or
109 "").split() 111 "").split()
110 self.unlockedrecipes = { k: "" for k in self.unlockedrecipes } 112 self.unlockedrecipes = { k: "" for k in self.unlockedrecipes }
111 self.buildarch = data.getVar('BUILD_ARCH')
112 self._internal = False 113 self._internal = False
113 pass 114 pass
114 115
@@ -142,18 +143,12 @@ class SignatureGeneratorOEBasicHashMixIn(object):
142 super().set_taskdata(data[3:]) 143 super().set_taskdata(data[3:])
143 144
144 def dump_sigs(self, dataCache, options): 145 def dump_sigs(self, dataCache, options):
145 sigfile = os.getcwd() + "/locked-sigs.inc" 146 if 'lockedsigs' in options:
146 bb.plain("Writing locked sigs to %s" % sigfile) 147 sigfile = os.getcwd() + "/locked-sigs.inc"
147 self.dump_lockedsigs(sigfile) 148 bb.plain("Writing locked sigs to %s" % sigfile)
149 self.dump_lockedsigs(sigfile)
148 return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options) 150 return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options)
149 151
150 def prep_taskhash(self, tid, deps, dataCaches):
151 super().prep_taskhash(tid, deps, dataCaches)
152 if hasattr(self, "extramethod"):
153 (mc, _, _, fn) = bb.runqueue.split_tid_mcfn(tid)
154 inherits = " ".join(dataCaches[mc].inherits[fn])
155 if inherits.find("/native.bbclass") != -1 or inherits.find("/cross.bbclass") != -1:
156 self.extramethod[tid] = ":" + self.buildarch
157 152
158 def get_taskhash(self, tid, deps, dataCaches): 153 def get_taskhash(self, tid, deps, dataCaches):
159 if tid in self.lockedhashes: 154 if tid in self.lockedhashes:
@@ -196,6 +191,7 @@ class SignatureGeneratorOEBasicHashMixIn(object):
196 #bb.warn("Using %s %s %s" % (recipename, task, h)) 191 #bb.warn("Using %s %s %s" % (recipename, task, h))
197 192
198 if h != h_locked and h_locked != unihash: 193 if h != h_locked and h_locked != unihash:
194 self.mismatch_number += 1
199 self.mismatch_msgs.append('The %s:%s sig is computed to be %s, but the sig is locked to %s in %s' 195 self.mismatch_msgs.append('The %s:%s sig is computed to be %s, but the sig is locked to %s in %s'
200 % (recipename, task, h, h_locked, var)) 196 % (recipename, task, h, h_locked, var))
201 197
@@ -210,10 +206,10 @@ class SignatureGeneratorOEBasicHashMixIn(object):
210 return self.lockedhashes[tid] 206 return self.lockedhashes[tid]
211 return super().get_stampfile_hash(tid) 207 return super().get_stampfile_hash(tid)
212 208
213 def get_unihash(self, tid): 209 def get_cached_unihash(self, tid):
214 if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal: 210 if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal:
215 return self.lockedhashes[tid] 211 return self.lockedhashes[tid]
216 return super().get_unihash(tid) 212 return super().get_cached_unihash(tid)
217 213
218 def dump_sigtask(self, fn, task, stampbase, runtime): 214 def dump_sigtask(self, fn, task, stampbase, runtime):
219 tid = fn + ":" + task 215 tid = fn + ":" + task
@@ -224,6 +220,9 @@ class SignatureGeneratorOEBasicHashMixIn(object):
224 def dump_lockedsigs(self, sigfile, taskfilter=None): 220 def dump_lockedsigs(self, sigfile, taskfilter=None):
225 types = {} 221 types = {}
226 for tid in self.runtaskdeps: 222 for tid in self.runtaskdeps:
223 # Bitbake changed this to a tuple in newer versions
224 if isinstance(tid, tuple):
225 tid = tid[1]
227 if taskfilter: 226 if taskfilter:
228 if not tid in taskfilter: 227 if not tid in taskfilter:
229 continue 228 continue
@@ -246,15 +245,26 @@ class SignatureGeneratorOEBasicHashMixIn(object):
246 continue 245 continue
247 f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n") 246 f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n")
248 f.write(' "\n') 247 f.write(' "\n')
249 f.write('SIGGEN_LOCKEDSIGS_TYPES_%s = "%s"' % (self.machine, " ".join(l))) 248 f.write('SIGGEN_LOCKEDSIGS_TYPES:%s = "%s"' % (self.machine, " ".join(l)))
249
250 def dump_siglist(self, sigfile, path_prefix_strip=None):
251 def strip_fn(fn):
252 nonlocal path_prefix_strip
253 if not path_prefix_strip:
254 return fn
255
256 fn_exp = fn.split(":")
257 if fn_exp[-1].startswith(path_prefix_strip):
258 fn_exp[-1] = fn_exp[-1][len(path_prefix_strip):]
259
260 return ":".join(fn_exp)
250 261
251 def dump_siglist(self, sigfile):
252 with open(sigfile, "w") as f: 262 with open(sigfile, "w") as f:
253 tasks = [] 263 tasks = []
254 for taskitem in self.taskhash: 264 for taskitem in self.taskhash:
255 (fn, task) = taskitem.rsplit(":", 1) 265 (fn, task) = taskitem.rsplit(":", 1)
256 pn = self.lockedpnmap[fn] 266 pn = self.lockedpnmap[fn]
257 tasks.append((pn, task, fn, self.taskhash[taskitem])) 267 tasks.append((pn, task, strip_fn(fn), self.taskhash[taskitem]))
258 for (pn, task, fn, taskhash) in sorted(tasks): 268 for (pn, task, fn, taskhash) in sorted(tasks):
259 f.write('%s:%s %s %s\n' % (pn, task, fn, taskhash)) 269 f.write('%s:%s %s %s\n' % (pn, task, fn, taskhash))
260 270
@@ -262,6 +272,15 @@ class SignatureGeneratorOEBasicHashMixIn(object):
262 warn_msgs = [] 272 warn_msgs = []
263 error_msgs = [] 273 error_msgs = []
264 sstate_missing_msgs = [] 274 sstate_missing_msgs = []
275 info_msgs = None
276
277 if self.lockedsigs:
278 if len(self.lockedsigs) > 10:
279 self.lockedsigs_msgs = "There are %s recipes with locked tasks (%s task(s) have non matching signature)" % (len(self.lockedsigs), self.mismatch_number)
280 else:
281 self.lockedsigs_msgs = "The following recipes have locked tasks:"
282 for pn in self.lockedsigs:
283 self.lockedsigs_msgs += " %s" % (pn)
265 284
266 for tid in sq_data['hash']: 285 for tid in sq_data['hash']:
267 if tid not in found: 286 if tid not in found:
@@ -274,7 +293,9 @@ class SignatureGeneratorOEBasicHashMixIn(object):
274 % (pn, taskname, sq_data['hash'][tid])) 293 % (pn, taskname, sq_data['hash'][tid]))
275 294
276 checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK") 295 checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK")
277 if checklevel == 'warn': 296 if checklevel == 'info':
297 info_msgs = self.lockedsigs_msgs
298 if checklevel == 'warn' or checklevel == 'info':
278 warn_msgs += self.mismatch_msgs 299 warn_msgs += self.mismatch_msgs
279 elif checklevel == 'error': 300 elif checklevel == 'error':
280 error_msgs += self.mismatch_msgs 301 error_msgs += self.mismatch_msgs
@@ -285,6 +306,8 @@ class SignatureGeneratorOEBasicHashMixIn(object):
285 elif checklevel == 'error': 306 elif checklevel == 'error':
286 error_msgs += sstate_missing_msgs 307 error_msgs += sstate_missing_msgs
287 308
309 if info_msgs:
310 bb.note(info_msgs)
288 if warn_msgs: 311 if warn_msgs:
289 bb.warn("\n".join(warn_msgs)) 312 bb.warn("\n".join(warn_msgs))
290 if error_msgs: 313 if error_msgs:
@@ -304,9 +327,21 @@ class SignatureGeneratorOEEquivHash(SignatureGeneratorOEBasicHashMixIn, bb.sigge
304 self.method = data.getVar('SSTATE_HASHEQUIV_METHOD') 327 self.method = data.getVar('SSTATE_HASHEQUIV_METHOD')
305 if not self.method: 328 if not self.method:
306 bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set") 329 bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set")
330 self.max_parallel = int(data.getVar('BB_HASHSERVE_MAX_PARALLEL') or 1)
331 self.username = data.getVar("BB_HASHSERVE_USERNAME")
332 self.password = data.getVar("BB_HASHSERVE_PASSWORD")
333 if not self.username or not self.password:
334 try:
335 n = netrc.netrc()
336 auth = n.authenticators(self.server)
337 if auth is not None:
338 self.username, _, self.password = auth
339 except FileNotFoundError:
340 pass
341 except netrc.NetrcParseError as e:
342 bb.warn("Error parsing %s:%s: %s" % (e.filename, str(e.lineno), e.msg))
307 343
308# Insert these classes into siggen's namespace so it can see and select them 344# Insert these classes into siggen's namespace so it can see and select them
309bb.siggen.SignatureGeneratorOEBasic = SignatureGeneratorOEBasic
310bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash 345bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash
311bb.siggen.SignatureGeneratorOEEquivHash = SignatureGeneratorOEEquivHash 346bb.siggen.SignatureGeneratorOEEquivHash = SignatureGeneratorOEEquivHash
312 347
@@ -320,14 +355,14 @@ def find_siginfo(pn, taskname, taskhashlist, d):
320 if not taskname: 355 if not taskname:
321 # We have to derive pn and taskname 356 # We have to derive pn and taskname
322 key = pn 357 key = pn
323 splitit = key.split('.bb:') 358 if key.startswith("mc:"):
324 taskname = splitit[1] 359 # mc:<mc>:<pn>:<task>
325 pn = os.path.basename(splitit[0]).split('_')[0] 360 _, _, pn, taskname = key.split(':', 3)
326 if key.startswith('virtual:native:'): 361 else:
327 pn = pn + '-native' 362 # <pn>:<task>
363 pn, taskname = key.split(':', 1)
328 364
329 hashfiles = {} 365 hashfiles = {}
330 filedates = {}
331 366
332 def get_hashval(siginfo): 367 def get_hashval(siginfo):
333 if siginfo.endswith('.siginfo'): 368 if siginfo.endswith('.siginfo'):
@@ -335,6 +370,9 @@ def find_siginfo(pn, taskname, taskhashlist, d):
335 else: 370 else:
336 return siginfo.rpartition('.')[2] 371 return siginfo.rpartition('.')[2]
337 372
373 def get_time(fullpath):
374 return os.stat(fullpath).st_mtime
375
338 # First search in stamps dir 376 # First search in stamps dir
339 localdata = d.createCopy() 377 localdata = d.createCopy()
340 localdata.setVar('MULTIMACH_TARGET_SYS', '*') 378 localdata.setVar('MULTIMACH_TARGET_SYS', '*')
@@ -350,24 +388,21 @@ def find_siginfo(pn, taskname, taskhashlist, d):
350 filespec = '%s.%s.sigdata.*' % (stamp, taskname) 388 filespec = '%s.%s.sigdata.*' % (stamp, taskname)
351 foundall = False 389 foundall = False
352 import glob 390 import glob
391 bb.debug(1, "Calling glob.glob on {}".format(filespec))
353 for fullpath in glob.glob(filespec): 392 for fullpath in glob.glob(filespec):
354 match = False 393 match = False
355 if taskhashlist: 394 if taskhashlist:
356 for taskhash in taskhashlist: 395 for taskhash in taskhashlist:
357 if fullpath.endswith('.%s' % taskhash): 396 if fullpath.endswith('.%s' % taskhash):
358 hashfiles[taskhash] = fullpath 397 hashfiles[taskhash] = {'path':fullpath, 'sstate':False, 'time':get_time(fullpath)}
359 if len(hashfiles) == len(taskhashlist): 398 if len(hashfiles) == len(taskhashlist):
360 foundall = True 399 foundall = True
361 break 400 break
362 else: 401 else:
363 try:
364 filedates[fullpath] = os.stat(fullpath).st_mtime
365 except OSError:
366 continue
367 hashval = get_hashval(fullpath) 402 hashval = get_hashval(fullpath)
368 hashfiles[hashval] = fullpath 403 hashfiles[hashval] = {'path':fullpath, 'sstate':False, 'time':get_time(fullpath)}
369 404
370 if not taskhashlist or (len(filedates) < 2 and not foundall): 405 if not taskhashlist or (len(hashfiles) < 2 and not foundall):
371 # That didn't work, look in sstate-cache 406 # That didn't work, look in sstate-cache
372 hashes = taskhashlist or ['?' * 64] 407 hashes = taskhashlist or ['?' * 64]
373 localdata = bb.data.createCopy(d) 408 localdata = bb.data.createCopy(d)
@@ -376,35 +411,32 @@ def find_siginfo(pn, taskname, taskhashlist, d):
376 localdata.setVar('TARGET_VENDOR', '*') 411 localdata.setVar('TARGET_VENDOR', '*')
377 localdata.setVar('TARGET_OS', '*') 412 localdata.setVar('TARGET_OS', '*')
378 localdata.setVar('PN', pn) 413 localdata.setVar('PN', pn)
414 # gcc-source is a special case, same as with local stamps above
415 if pn.startswith("gcc-source"):
416 localdata.setVar('PN', "gcc")
379 localdata.setVar('PV', '*') 417 localdata.setVar('PV', '*')
380 localdata.setVar('PR', '*') 418 localdata.setVar('PR', '*')
381 localdata.setVar('BB_TASKHASH', hashval) 419 localdata.setVar('BB_TASKHASH', hashval)
420 localdata.setVar('SSTATE_CURRTASK', taskname[3:])
382 swspec = localdata.getVar('SSTATE_SWSPEC') 421 swspec = localdata.getVar('SSTATE_SWSPEC')
383 if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec: 422 if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec:
384 localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}') 423 localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}')
385 elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn: 424 elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn:
386 localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/") 425 localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/")
387 sstatename = taskname[3:] 426 filespec = '%s.siginfo' % localdata.getVar('SSTATE_PKG')
388 filespec = '%s_%s.*.siginfo' % (localdata.getVar('SSTATE_PKG'), sstatename)
389 427
428 bb.debug(1, "Calling glob.glob on {}".format(filespec))
390 matchedfiles = glob.glob(filespec) 429 matchedfiles = glob.glob(filespec)
391 for fullpath in matchedfiles: 430 for fullpath in matchedfiles:
392 actual_hashval = get_hashval(fullpath) 431 actual_hashval = get_hashval(fullpath)
393 if actual_hashval in hashfiles: 432 if actual_hashval in hashfiles:
394 continue 433 continue
395 hashfiles[hashval] = fullpath 434 hashfiles[actual_hashval] = {'path':fullpath, 'sstate':True, 'time':get_time(fullpath)}
396 if not taskhashlist:
397 try:
398 filedates[fullpath] = os.stat(fullpath).st_mtime
399 except:
400 continue
401 435
402 if taskhashlist: 436 return hashfiles
403 return hashfiles
404 else:
405 return filedates
406 437
407bb.siggen.find_siginfo = find_siginfo 438bb.siggen.find_siginfo = find_siginfo
439bb.siggen.find_siginfo_version = 2
408 440
409 441
410def sstate_get_manifest_filename(task, d): 442def sstate_get_manifest_filename(task, d):
@@ -440,7 +472,7 @@ def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache):
440 elif "-cross-canadian" in taskdata: 472 elif "-cross-canadian" in taskdata:
441 pkgarchs = ["${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}"] 473 pkgarchs = ["${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}"]
442 elif "-cross-" in taskdata: 474 elif "-cross-" in taskdata:
443 pkgarchs = ["${BUILD_ARCH}_${TARGET_ARCH}"] 475 pkgarchs = ["${BUILD_ARCH}"]
444 elif "-crosssdk" in taskdata: 476 elif "-crosssdk" in taskdata:
445 pkgarchs = ["${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}"] 477 pkgarchs = ["${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}"]
446 else: 478 else:
@@ -449,11 +481,15 @@ def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache):
449 pkgarchs.append('allarch') 481 pkgarchs.append('allarch')
450 pkgarchs.append('${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}') 482 pkgarchs.append('${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}')
451 483
484 searched_manifests = []
485
452 for pkgarch in pkgarchs: 486 for pkgarch in pkgarchs:
453 manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-%s-%s.%s" % (pkgarch, taskdata, taskname)) 487 manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-%s-%s.%s" % (pkgarch, taskdata, taskname))
454 if os.path.exists(manifest): 488 if os.path.exists(manifest):
455 return manifest, d2 489 return manifest, d2
456 bb.error("Manifest %s not found in %s (variant '%s')?" % (manifest, d2.expand(" ".join(pkgarchs)), variant)) 490 searched_manifests.append(manifest)
491 bb.fatal("The sstate manifest for task '%s:%s' (multilib variant '%s') could not be found.\nThe pkgarchs considered were: %s.\nBut none of these manifests exists:\n %s"
492 % (taskdata, taskname, variant, d2.expand(", ".join(pkgarchs)),"\n ".join(searched_manifests)))
457 return None, d2 493 return None, d2
458 494
459def OEOuthashBasic(path, sigfile, task, d): 495def OEOuthashBasic(path, sigfile, task, d):
@@ -467,6 +503,8 @@ def OEOuthashBasic(path, sigfile, task, d):
467 import stat 503 import stat
468 import pwd 504 import pwd
469 import grp 505 import grp
506 import re
507 import fnmatch
470 508
471 def update_hash(s): 509 def update_hash(s):
472 s = s.encode('utf-8') 510 s = s.encode('utf-8')
@@ -476,20 +514,37 @@ def OEOuthashBasic(path, sigfile, task, d):
476 514
477 h = hashlib.sha256() 515 h = hashlib.sha256()
478 prev_dir = os.getcwd() 516 prev_dir = os.getcwd()
517 corebase = d.getVar("COREBASE")
518 tmpdir = d.getVar("TMPDIR")
479 include_owners = os.environ.get('PSEUDO_DISABLED') == '0' 519 include_owners = os.environ.get('PSEUDO_DISABLED') == '0'
480 if "package_write_" in task or task == "package_qa": 520 if "package_write_" in task or task == "package_qa":
481 include_owners = False 521 include_owners = False
482 include_timestamps = False 522 include_timestamps = False
523 include_root = True
483 if task == "package": 524 if task == "package":
484 include_timestamps = d.getVar('BUILD_REPRODUCIBLE_BINARIES') == '1' 525 include_timestamps = True
485 extra_content = d.getVar('HASHEQUIV_HASH_VERSION') 526 include_root = False
527 hash_version = d.getVar('HASHEQUIV_HASH_VERSION')
528 extra_sigdata = d.getVar("HASHEQUIV_EXTRA_SIGDATA")
529
530 filemaps = {}
531 for m in (d.getVar('SSTATE_HASHEQUIV_FILEMAP') or '').split():
532 entry = m.split(":")
533 if len(entry) != 3 or entry[0] != task:
534 continue
535 filemaps.setdefault(entry[1], [])
536 filemaps[entry[1]].append(entry[2])
486 537
487 try: 538 try:
488 os.chdir(path) 539 os.chdir(path)
540 basepath = os.path.normpath(path)
489 541
490 update_hash("OEOuthashBasic\n") 542 update_hash("OEOuthashBasic\n")
491 if extra_content: 543 if hash_version:
492 update_hash(extra_content + "\n") 544 update_hash(hash_version + "\n")
545
546 if extra_sigdata:
547 update_hash(extra_sigdata + "\n")
493 548
494 # It is only currently useful to get equivalent hashes for things that 549 # It is only currently useful to get equivalent hashes for things that
495 # can be restored from sstate. Since the sstate object is named using 550 # can be restored from sstate. Since the sstate object is named using
@@ -534,28 +589,29 @@ def OEOuthashBasic(path, sigfile, task, d):
534 else: 589 else:
535 add_perm(stat.S_IXUSR, 'x') 590 add_perm(stat.S_IXUSR, 'x')
536 591
537 add_perm(stat.S_IRGRP, 'r') 592 if include_owners:
538 add_perm(stat.S_IWGRP, 'w') 593 # Group/other permissions are only relevant in pseudo context
539 if stat.S_ISGID & s.st_mode: 594 add_perm(stat.S_IRGRP, 'r')
540 add_perm(stat.S_IXGRP, 's', 'S') 595 add_perm(stat.S_IWGRP, 'w')
541 else: 596 if stat.S_ISGID & s.st_mode:
542 add_perm(stat.S_IXGRP, 'x') 597 add_perm(stat.S_IXGRP, 's', 'S')
598 else:
599 add_perm(stat.S_IXGRP, 'x')
543 600
544 add_perm(stat.S_IROTH, 'r') 601 add_perm(stat.S_IROTH, 'r')
545 add_perm(stat.S_IWOTH, 'w') 602 add_perm(stat.S_IWOTH, 'w')
546 if stat.S_ISVTX & s.st_mode: 603 if stat.S_ISVTX & s.st_mode:
547 update_hash('t') 604 update_hash('t')
548 else: 605 else:
549 add_perm(stat.S_IXOTH, 'x') 606 add_perm(stat.S_IXOTH, 'x')
550 607
551 if include_owners:
552 try: 608 try:
553 update_hash(" %10s" % pwd.getpwuid(s.st_uid).pw_name) 609 update_hash(" %10s" % pwd.getpwuid(s.st_uid).pw_name)
554 update_hash(" %10s" % grp.getgrgid(s.st_gid).gr_name) 610 update_hash(" %10s" % grp.getgrgid(s.st_gid).gr_name)
555 except KeyError as e: 611 except KeyError as e:
556 bb.warn("KeyError in %s" % path)
557 msg = ("KeyError: %s\nPath %s is owned by uid %d, gid %d, which doesn't match " 612 msg = ("KeyError: %s\nPath %s is owned by uid %d, gid %d, which doesn't match "
558 "any user/group on target. This may be due to host contamination." % (e, path, s.st_uid, s.st_gid)) 613 "any user/group on target. This may be due to host contamination." %
614 (e, os.path.abspath(path), s.st_uid, s.st_gid))
559 raise Exception(msg).with_traceback(e.__traceback__) 615 raise Exception(msg).with_traceback(e.__traceback__)
560 616
561 if include_timestamps: 617 if include_timestamps:
@@ -567,8 +623,13 @@ def OEOuthashBasic(path, sigfile, task, d):
567 else: 623 else:
568 update_hash(" " * 9) 624 update_hash(" " * 9)
569 625
626 filterfile = False
627 for entry in filemaps:
628 if fnmatch.fnmatch(path, entry):
629 filterfile = True
630
570 update_hash(" ") 631 update_hash(" ")
571 if stat.S_ISREG(s.st_mode): 632 if stat.S_ISREG(s.st_mode) and not filterfile:
572 update_hash("%10d" % s.st_size) 633 update_hash("%10d" % s.st_size)
573 else: 634 else:
574 update_hash(" " * 10) 635 update_hash(" " * 10)
@@ -577,9 +638,24 @@ def OEOuthashBasic(path, sigfile, task, d):
577 fh = hashlib.sha256() 638 fh = hashlib.sha256()
578 if stat.S_ISREG(s.st_mode): 639 if stat.S_ISREG(s.st_mode):
579 # Hash file contents 640 # Hash file contents
580 with open(path, 'rb') as d: 641 if filterfile:
581 for chunk in iter(lambda: d.read(4096), b""): 642 # Need to ignore paths in crossscripts and postinst-useradd files.
643 with open(path, 'rb') as d:
644 chunk = d.read()
645 chunk = chunk.replace(bytes(basepath, encoding='utf8'), b'')
646 for entry in filemaps:
647 if not fnmatch.fnmatch(path, entry):
648 continue
649 for r in filemaps[entry]:
650 if r.startswith("regex-"):
651 chunk = re.sub(bytes(r[6:], encoding='utf8'), b'', chunk)
652 else:
653 chunk = chunk.replace(bytes(r, encoding='utf8'), b'')
582 fh.update(chunk) 654 fh.update(chunk)
655 else:
656 with open(path, 'rb') as d:
657 for chunk in iter(lambda: d.read(4096), b""):
658 fh.update(chunk)
583 update_hash(fh.hexdigest()) 659 update_hash(fh.hexdigest())
584 else: 660 else:
585 update_hash(" " * len(fh.hexdigest())) 661 update_hash(" " * len(fh.hexdigest()))
@@ -592,11 +668,16 @@ def OEOuthashBasic(path, sigfile, task, d):
592 update_hash("\n") 668 update_hash("\n")
593 669
594 # Process this directory and all its child files 670 # Process this directory and all its child files
595 process(root) 671 if include_root or root != ".":
672 process(root)
596 for f in files: 673 for f in files:
597 if f == 'fixmepath': 674 if f == 'fixmepath':
598 continue 675 continue
599 process(os.path.join(root, f)) 676 process(os.path.join(root, f))
677
678 for dir in dirs:
679 if os.path.islink(os.path.join(root, dir)):
680 process(os.path.join(root, dir))
600 finally: 681 finally:
601 os.chdir(prev_dir) 682 os.chdir(prev_dir)
602 683
diff --git a/meta/lib/oe/terminal.py b/meta/lib/oe/terminal.py
index 61c2687ef4..4412bc14c1 100644
--- a/meta/lib/oe/terminal.py
+++ b/meta/lib/oe/terminal.py
@@ -1,11 +1,12 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4import logging 6import logging
5import oe.classutils 7import oe.classutils
6import shlex 8import shlex
7from bb.process import Popen, ExecutionError 9from bb.process import Popen, ExecutionError
8from distutils.version import LooseVersion
9 10
10logger = logging.getLogger('BitBake.OE.Terminal') 11logger = logging.getLogger('BitBake.OE.Terminal')
11 12
@@ -31,9 +32,10 @@ class Registry(oe.classutils.ClassRegistry):
31 32
32class Terminal(Popen, metaclass=Registry): 33class Terminal(Popen, metaclass=Registry):
33 def __init__(self, sh_cmd, title=None, env=None, d=None): 34 def __init__(self, sh_cmd, title=None, env=None, d=None):
35 from subprocess import STDOUT
34 fmt_sh_cmd = self.format_command(sh_cmd, title) 36 fmt_sh_cmd = self.format_command(sh_cmd, title)
35 try: 37 try:
36 Popen.__init__(self, fmt_sh_cmd, env=env) 38 Popen.__init__(self, fmt_sh_cmd, env=env, stderr=STDOUT)
37 except OSError as exc: 39 except OSError as exc:
38 import errno 40 import errno
39 if exc.errno == errno.ENOENT: 41 if exc.errno == errno.ENOENT:
@@ -86,10 +88,10 @@ class Konsole(XTerminal):
86 def __init__(self, sh_cmd, title=None, env=None, d=None): 88 def __init__(self, sh_cmd, title=None, env=None, d=None):
87 # Check version 89 # Check version
88 vernum = check_terminal_version("konsole") 90 vernum = check_terminal_version("konsole")
89 if vernum and LooseVersion(vernum) < '2.0.0': 91 if vernum and bb.utils.vercmp_string_op(vernum, "2.0.0", "<"):
90 # Konsole from KDE 3.x 92 # Konsole from KDE 3.x
91 self.command = 'konsole -T "{title}" -e {command}' 93 self.command = 'konsole -T "{title}" -e {command}'
92 elif vernum and LooseVersion(vernum) < '16.08.1': 94 elif vernum and bb.utils.vercmp_string_op(vernum, "16.08.1", "<"):
93 # Konsole pre 16.08.01 Has nofork 95 # Konsole pre 16.08.01 Has nofork
94 self.command = 'konsole --nofork --workdir . -p tabtitle="{title}" -e {command}' 96 self.command = 'konsole --nofork --workdir . -p tabtitle="{title}" -e {command}'
95 XTerminal.__init__(self, sh_cmd, title, env, d) 97 XTerminal.__init__(self, sh_cmd, title, env, d)
@@ -102,6 +104,10 @@ class Rxvt(XTerminal):
102 command = 'rxvt -T "{title}" -e {command}' 104 command = 'rxvt -T "{title}" -e {command}'
103 priority = 1 105 priority = 1
104 106
107class URxvt(XTerminal):
108 command = 'urxvt -T "{title}" -e {command}'
109 priority = 1
110
105class Screen(Terminal): 111class Screen(Terminal):
106 command = 'screen -D -m -t "{title}" -S devshell {command}' 112 command = 'screen -D -m -t "{title}" -S devshell {command}'
107 113
@@ -163,7 +169,12 @@ class Tmux(Terminal):
163 # devshells, if it's already there, add a new window to it. 169 # devshells, if it's already there, add a new window to it.
164 window_name = 'devshell-%i' % os.getpid() 170 window_name = 'devshell-%i' % os.getpid()
165 171
166 self.command = 'tmux new -c "{{cwd}}" -d -s {0} -n {0} "{{command}}"'.format(window_name) 172 self.command = 'tmux new -c "{{cwd}}" -d -s {0} -n {0} "{{command}}"'
173 if not check_tmux_version('1.9'):
174 # `tmux new-session -c` was added in 1.9;
175 # older versions fail with that flag
176 self.command = 'tmux new -d -s {0} -n {0} "{{command}}"'
177 self.command = self.command.format(window_name)
167 Terminal.__init__(self, sh_cmd, title, env, d) 178 Terminal.__init__(self, sh_cmd, title, env, d)
168 179
169 attach_cmd = 'tmux att -t {0}'.format(window_name) 180 attach_cmd = 'tmux att -t {0}'.format(window_name)
@@ -253,13 +264,18 @@ def spawn(name, sh_cmd, title=None, env=None, d=None):
253 except OSError: 264 except OSError:
254 return 265 return
255 266
267def check_tmux_version(desired):
268 vernum = check_terminal_version("tmux")
269 if vernum and bb.utils.vercmp_string_op(vernum, desired, "<"):
270 return False
271 return vernum
272
256def check_tmux_pane_size(tmux): 273def check_tmux_pane_size(tmux):
257 import subprocess as sub 274 import subprocess as sub
258 # On older tmux versions (<1.9), return false. The reason 275 # On older tmux versions (<1.9), return false. The reason
259 # is that there is no easy way to get the height of the active panel 276 # is that there is no easy way to get the height of the active panel
260 # on current window without nested formats (available from version 1.9) 277 # on current window without nested formats (available from version 1.9)
261 vernum = check_terminal_version("tmux") 278 if not check_tmux_version('1.9'):
262 if vernum and LooseVersion(vernum) < '1.9':
263 return False 279 return False
264 try: 280 try:
265 p = sub.Popen('%s list-panes -F "#{?pane_active,#{pane_height},}"' % tmux, 281 p = sub.Popen('%s list-panes -F "#{?pane_active,#{pane_height},}"' % tmux,
diff --git a/meta/lib/oe/types.py b/meta/lib/oe/types.py
index bbbabafbf6..b929afb1f3 100644
--- a/meta/lib/oe/types.py
+++ b/meta/lib/oe/types.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oe/useradd.py b/meta/lib/oe/useradd.py
index 8fc77568ff..54aa86feb5 100644
--- a/meta/lib/oe/useradd.py
+++ b/meta/lib/oe/useradd.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4import argparse 6import argparse
@@ -45,7 +47,6 @@ def build_useradd_parser():
45 parser.add_argument("-N", "--no-user-group", dest="user_group", help="do not create a group with the same name as the user", action="store_const", const=False) 47 parser.add_argument("-N", "--no-user-group", dest="user_group", help="do not create a group with the same name as the user", action="store_const", const=False)
46 parser.add_argument("-o", "--non-unique", help="allow to create users with duplicate (non-unique UID)", action="store_true") 48 parser.add_argument("-o", "--non-unique", help="allow to create users with duplicate (non-unique UID)", action="store_true")
47 parser.add_argument("-p", "--password", metavar="PASSWORD", help="encrypted password of the new account") 49 parser.add_argument("-p", "--password", metavar="PASSWORD", help="encrypted password of the new account")
48 parser.add_argument("-P", "--clear-password", metavar="CLEAR_PASSWORD", help="use this clear password for the new account")
49 parser.add_argument("-R", "--root", metavar="CHROOT_DIR", help="directory to chroot into") 50 parser.add_argument("-R", "--root", metavar="CHROOT_DIR", help="directory to chroot into")
50 parser.add_argument("-r", "--system", help="create a system account", action="store_true") 51 parser.add_argument("-r", "--system", help="create a system account", action="store_true")
51 parser.add_argument("-s", "--shell", metavar="SHELL", help="login shell of the new account") 52 parser.add_argument("-s", "--shell", metavar="SHELL", help="login shell of the new account")
@@ -63,7 +64,6 @@ def build_groupadd_parser():
63 parser.add_argument("-K", "--key", metavar="KEY=VALUE", help="override /etc/login.defs defaults") 64 parser.add_argument("-K", "--key", metavar="KEY=VALUE", help="override /etc/login.defs defaults")
64 parser.add_argument("-o", "--non-unique", help="allow to create groups with duplicate (non-unique) GID", action="store_true") 65 parser.add_argument("-o", "--non-unique", help="allow to create groups with duplicate (non-unique) GID", action="store_true")
65 parser.add_argument("-p", "--password", metavar="PASSWORD", help="use this encrypted password for the new group") 66 parser.add_argument("-p", "--password", metavar="PASSWORD", help="use this encrypted password for the new group")
66 parser.add_argument("-P", "--clear-password", metavar="CLEAR_PASSWORD", help="use this clear password for the new group")
67 parser.add_argument("-R", "--root", metavar="CHROOT_DIR", help="directory to chroot into") 67 parser.add_argument("-R", "--root", metavar="CHROOT_DIR", help="directory to chroot into")
68 parser.add_argument("-r", "--system", help="create a system account", action="store_true") 68 parser.add_argument("-r", "--system", help="create a system account", action="store_true")
69 parser.add_argument("GROUP", help="Group name of the new group") 69 parser.add_argument("GROUP", help="Group name of the new group")
diff --git a/meta/lib/oe/utils.py b/meta/lib/oe/utils.py
index 9a2187e36f..14a7d07ef0 100644
--- a/meta/lib/oe/utils.py
+++ b/meta/lib/oe/utils.py
@@ -1,10 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
5import subprocess 7import subprocess
6import multiprocessing 8import multiprocessing
7import traceback 9import traceback
10import errno
8 11
9def read_file(filename): 12def read_file(filename):
10 try: 13 try:
@@ -221,12 +224,12 @@ def packages_filter_out_system(d):
221 PN-dbg PN-doc PN-locale-eb-gb removed. 224 PN-dbg PN-doc PN-locale-eb-gb removed.
222 """ 225 """
223 pn = d.getVar('PN') 226 pn = d.getVar('PN')
224 blacklist = [pn + suffix for suffix in ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev', '-src')] 227 pkgfilter = [pn + suffix for suffix in ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev', '-src')]
225 localepkg = pn + "-locale-" 228 localepkg = pn + "-locale-"
226 pkgs = [] 229 pkgs = []
227 230
228 for pkg in d.getVar('PACKAGES').split(): 231 for pkg in d.getVar('PACKAGES').split():
229 if pkg not in blacklist and localepkg not in pkg: 232 if pkg not in pkgfilter and localepkg not in pkg:
230 pkgs.append(pkg) 233 pkgs.append(pkg)
231 return pkgs 234 return pkgs
232 235
@@ -248,24 +251,31 @@ def trim_version(version, num_parts=2):
248 trimmed = ".".join(parts[:num_parts]) 251 trimmed = ".".join(parts[:num_parts])
249 return trimmed 252 return trimmed
250 253
251def cpu_count(at_least=1): 254def cpu_count(at_least=1, at_most=64):
252 cpus = len(os.sched_getaffinity(0)) 255 cpus = len(os.sched_getaffinity(0))
253 return max(cpus, at_least) 256 return max(min(cpus, at_most), at_least)
254 257
255def execute_pre_post_process(d, cmds): 258def execute_pre_post_process(d, cmds):
256 if cmds is None: 259 if cmds is None:
257 return 260 return
258 261
259 for cmd in cmds.strip().split(';'): 262 cmds = cmds.replace(";", " ")
260 cmd = cmd.strip() 263
261 if cmd != '': 264 for cmd in cmds.split():
262 bb.note("Executing %s ..." % cmd) 265 bb.note("Executing %s ..." % cmd)
263 bb.build.exec_func(cmd, d) 266 bb.build.exec_func(cmd, d)
267
268def get_bb_number_threads(d):
269 return int(d.getVar("BB_NUMBER_THREADS") or os.cpu_count() or 1)
270
271def multiprocess_launch(target, items, d, extraargs=None):
272 max_process = get_bb_number_threads(d)
273 return multiprocess_launch_mp(target, items, max_process, extraargs)
264 274
265# For each item in items, call the function 'target' with item as the first 275# For each item in items, call the function 'target' with item as the first
266# argument, extraargs as the other arguments and handle any exceptions in the 276# argument, extraargs as the other arguments and handle any exceptions in the
267# parent thread 277# parent thread
268def multiprocess_launch(target, items, d, extraargs=None): 278def multiprocess_launch_mp(target, items, max_process, extraargs=None):
269 279
270 class ProcessLaunch(multiprocessing.Process): 280 class ProcessLaunch(multiprocessing.Process):
271 def __init__(self, *args, **kwargs): 281 def __init__(self, *args, **kwargs):
@@ -300,7 +310,6 @@ def multiprocess_launch(target, items, d, extraargs=None):
300 self.update() 310 self.update()
301 return self._result 311 return self._result
302 312
303 max_process = int(d.getVar("BB_NUMBER_THREADS") or os.cpu_count() or 1)
304 launched = [] 313 launched = []
305 errors = [] 314 errors = []
306 results = [] 315 results = []
@@ -344,7 +353,29 @@ def squashspaces(string):
344 import re 353 import re
345 return re.sub(r"\s+", " ", string).strip() 354 return re.sub(r"\s+", " ", string).strip()
346 355
347def format_pkg_list(pkg_dict, ret_format=None): 356def rprovides_map(pkgdata_dir, pkg_dict):
357 # Map file -> pkg provider
358 rprov_map = {}
359
360 for pkg in pkg_dict:
361 path_to_pkgfile = os.path.join(pkgdata_dir, 'runtime-reverse', pkg)
362 if not os.path.isfile(path_to_pkgfile):
363 continue
364 with open(path_to_pkgfile) as f:
365 for line in f:
366 if line.startswith('RPROVIDES') or line.startswith('FILERPROVIDES'):
367 # List all components provided by pkg.
368 # Exclude version strings, i.e. those starting with (
369 provides = [x for x in line.split()[1:] if not x.startswith('(')]
370 for prov in provides:
371 if prov in rprov_map:
372 rprov_map[prov].append(pkg)
373 else:
374 rprov_map[prov] = [pkg]
375
376 return rprov_map
377
378def format_pkg_list(pkg_dict, ret_format=None, pkgdata_dir=None):
348 output = [] 379 output = []
349 380
350 if ret_format == "arch": 381 if ret_format == "arch":
@@ -357,9 +388,15 @@ def format_pkg_list(pkg_dict, ret_format=None):
357 for pkg in sorted(pkg_dict): 388 for pkg in sorted(pkg_dict):
358 output.append("%s %s %s" % (pkg, pkg_dict[pkg]["arch"], pkg_dict[pkg]["ver"])) 389 output.append("%s %s %s" % (pkg, pkg_dict[pkg]["arch"], pkg_dict[pkg]["ver"]))
359 elif ret_format == "deps": 390 elif ret_format == "deps":
391 rprov_map = rprovides_map(pkgdata_dir, pkg_dict)
360 for pkg in sorted(pkg_dict): 392 for pkg in sorted(pkg_dict):
361 for dep in pkg_dict[pkg]["deps"]: 393 for dep in pkg_dict[pkg]["deps"]:
362 output.append("%s|%s" % (pkg, dep)) 394 if dep in rprov_map:
395 # There could be multiple providers within the image
396 for pkg_provider in rprov_map[dep]:
397 output.append("%s|%s * %s [RPROVIDES]" % (pkg, pkg_provider, dep))
398 else:
399 output.append("%s|%s" % (pkg, dep))
363 else: 400 else:
364 for pkg in sorted(pkg_dict): 401 for pkg in sorted(pkg_dict):
365 output.append(pkg) 402 output.append(pkg)
@@ -445,81 +482,6 @@ def get_multilib_datastore(variant, d):
445 localdata.setVar("MLPREFIX", "") 482 localdata.setVar("MLPREFIX", "")
446 return localdata 483 return localdata
447 484
448#
449# Python 2.7 doesn't have threaded pools (just multiprocessing)
450# so implement a version here
451#
452
453from queue import Queue
454from threading import Thread
455
456class ThreadedWorker(Thread):
457 """Thread executing tasks from a given tasks queue"""
458 def __init__(self, tasks, worker_init, worker_end):
459 Thread.__init__(self)
460 self.tasks = tasks
461 self.daemon = True
462
463 self.worker_init = worker_init
464 self.worker_end = worker_end
465
466 def run(self):
467 from queue import Empty
468
469 if self.worker_init is not None:
470 self.worker_init(self)
471
472 while True:
473 try:
474 func, args, kargs = self.tasks.get(block=False)
475 except Empty:
476 if self.worker_end is not None:
477 self.worker_end(self)
478 break
479
480 try:
481 func(self, *args, **kargs)
482 except Exception as e:
483 print(e)
484 finally:
485 self.tasks.task_done()
486
487class ThreadedPool:
488 """Pool of threads consuming tasks from a queue"""
489 def __init__(self, num_workers, num_tasks, worker_init=None,
490 worker_end=None):
491 self.tasks = Queue(num_tasks)
492 self.workers = []
493
494 for _ in range(num_workers):
495 worker = ThreadedWorker(self.tasks, worker_init, worker_end)
496 self.workers.append(worker)
497
498 def start(self):
499 for worker in self.workers:
500 worker.start()
501
502 def add_task(self, func, *args, **kargs):
503 """Add a task to the queue"""
504 self.tasks.put((func, args, kargs))
505
506 def wait_completion(self):
507 """Wait for completion of all the tasks in the queue"""
508 self.tasks.join()
509 for worker in self.workers:
510 worker.join()
511
512def write_ld_so_conf(d):
513 # Some utils like prelink may not have the correct target library paths
514 # so write an ld.so.conf to help them
515 ldsoconf = d.expand("${STAGING_DIR_TARGET}${sysconfdir}/ld.so.conf")
516 if os.path.exists(ldsoconf):
517 bb.utils.remove(ldsoconf)
518 bb.utils.mkdirhier(os.path.dirname(ldsoconf))
519 with open(ldsoconf, "w") as f:
520 f.write(d.getVar("base_libdir") + '\n')
521 f.write(d.getVar("libdir") + '\n')
522
523class ImageQAFailed(Exception): 485class ImageQAFailed(Exception):
524 def __init__(self, description, name=None, logfile=None): 486 def __init__(self, description, name=None, logfile=None):
525 self.description = description 487 self.description = description
@@ -536,3 +498,45 @@ class ImageQAFailed(Exception):
536def sh_quote(string): 498def sh_quote(string):
537 import shlex 499 import shlex
538 return shlex.quote(string) 500 return shlex.quote(string)
501
502def directory_size(root, blocksize=4096):
503 """
504 Calculate the size of the directory, taking into account hard links,
505 rounding up every size to multiples of the blocksize.
506 """
507 def roundup(size):
508 """
509 Round the size up to the nearest multiple of the block size.
510 """
511 import math
512 return math.ceil(size / blocksize) * blocksize
513
514 def getsize(filename):
515 """
516 Get the size of the filename, not following symlinks, taking into
517 account hard links.
518 """
519 stat = os.lstat(filename)
520 if stat.st_ino not in inodes:
521 inodes.add(stat.st_ino)
522 return stat.st_size
523 else:
524 return 0
525
526 inodes = set()
527 total = 0
528 for root, dirs, files in os.walk(root):
529 total += sum(roundup(getsize(os.path.join(root, name))) for name in files)
530 total += roundup(getsize(root))
531 return total
532
533# Update the mtime of a file, skip if permission/read-only issues
534def touch(filename):
535 try:
536 os.utime(filename, None)
537 except PermissionError:
538 pass
539 except OSError as e:
540 # Handle read-only file systems gracefully
541 if e.errno != errno.EROFS:
542 raise e