summaryrefslogtreecommitdiffstats
path: root/meta/classes
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes')
-rw-r--r--meta/classes/create-spdx-2.2.bbclass14
-rw-r--r--meta/classes/devtool-source.bbclass74
-rw-r--r--meta/classes/multilib.bbclass87
-rw-r--r--meta/classes/multilib_global.bbclass25
-rw-r--r--meta/classes/siteconfig.bbclass39
5 files changed, 77 insertions, 162 deletions
diff --git a/meta/classes/create-spdx-2.2.bbclass b/meta/classes/create-spdx-2.2.bbclass
index 4ea91f6499..7c8a0b8b0f 100644
--- a/meta/classes/create-spdx-2.2.bbclass
+++ b/meta/classes/create-spdx-2.2.bbclass
@@ -357,7 +357,13 @@ def collect_dep_recipes(d, doc, spdx_recipe):
357 with spdx_deps_file.open("r") as f: 357 with spdx_deps_file.open("r") as f:
358 deps = json.load(f) 358 deps = json.load(f)
359 359
360 for dep_pn, dep_hashfn in deps: 360 for dep_pn, dep_hashfn, in_taskhash in deps:
361 # If this dependency is not calculated in the taskhash skip it.
362 # Otherwise, it can result in broken links since this task won't
363 # rebuild and see the new SPDX ID if the dependency changes
364 if not in_taskhash:
365 continue
366
361 dep_recipe_path = oe.sbom.doc_find_by_hashfn(deploy_dir_spdx, package_archs, "recipe-" + dep_pn, dep_hashfn) 367 dep_recipe_path = oe.sbom.doc_find_by_hashfn(deploy_dir_spdx, package_archs, "recipe-" + dep_pn, dep_hashfn)
362 if not dep_recipe_path: 368 if not dep_recipe_path:
363 bb.fatal("Cannot find any SPDX file for recipe %s, %s" % (dep_pn, dep_hashfn)) 369 bb.fatal("Cannot find any SPDX file for recipe %s, %s" % (dep_pn, dep_hashfn))
@@ -478,7 +484,7 @@ def collect_direct_deps(d, dep_task):
478 for dep_name in this_dep[3]: 484 for dep_name in this_dep[3]:
479 dep_data = taskdepdata[dep_name] 485 dep_data = taskdepdata[dep_name]
480 if dep_data[1] == dep_task and dep_data[0] != pn: 486 if dep_data[1] == dep_task and dep_data[0] != pn:
481 deps.add((dep_data[0], dep_data[7])) 487 deps.add((dep_data[0], dep_data[7], dep_name in this_dep[8]))
482 488
483 return sorted(deps) 489 return sorted(deps)
484 490
@@ -721,9 +727,9 @@ def collect_package_providers(d):
721 providers = {} 727 providers = {}
722 728
723 deps = collect_direct_deps(d, "do_create_spdx") 729 deps = collect_direct_deps(d, "do_create_spdx")
724 deps.append((d.getVar("PN"), d.getVar("BB_HASHFILENAME"))) 730 deps.append((d.getVar("PN"), d.getVar("BB_HASHFILENAME"), True))
725 731
726 for dep_pn, dep_hashfn in deps: 732 for dep_pn, dep_hashfn, _ in deps:
727 localdata = d 733 localdata = d
728 recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata) 734 recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata)
729 if not recipe_data: 735 if not recipe_data:
diff --git a/meta/classes/devtool-source.bbclass b/meta/classes/devtool-source.bbclass
index 4158c20c7e..3e24800dcb 100644
--- a/meta/classes/devtool-source.bbclass
+++ b/meta/classes/devtool-source.bbclass
@@ -26,8 +26,6 @@
26 26
27 27
28DEVTOOL_TEMPDIR ?= "" 28DEVTOOL_TEMPDIR ?= ""
29DEVTOOL_PATCH_SRCDIR = "${DEVTOOL_TEMPDIR}/patchworkdir"
30
31 29
32python() { 30python() {
33 tempdir = d.getVar('DEVTOOL_TEMPDIR') 31 tempdir = d.getVar('DEVTOOL_TEMPDIR')
@@ -60,7 +58,6 @@ python() {
60 else: 58 else:
61 unpacktask = 'do_unpack' 59 unpacktask = 'do_unpack'
62 d.appendVarFlag(unpacktask, 'postfuncs', ' devtool_post_unpack') 60 d.appendVarFlag(unpacktask, 'postfuncs', ' devtool_post_unpack')
63 d.prependVarFlag('do_patch', 'prefuncs', ' devtool_pre_patch')
64 d.appendVarFlag('do_patch', 'postfuncs', ' devtool_post_patch') 61 d.appendVarFlag('do_patch', 'postfuncs', ' devtool_post_patch')
65 62
66 # NOTE: in order for the patch stuff to be fully functional, 63 # NOTE: in order for the patch stuff to be fully functional,
@@ -79,67 +76,23 @@ python devtool_post_unpack() {
79 76
80 tempdir = d.getVar('DEVTOOL_TEMPDIR') 77 tempdir = d.getVar('DEVTOOL_TEMPDIR')
81 workdir = d.getVar('WORKDIR') 78 workdir = d.getVar('WORKDIR')
79 unpackdir = d.getVar('UNPACKDIR')
82 srcsubdir = d.getVar('S') 80 srcsubdir = d.getVar('S')
83 81
84 def _move_file(src, dst): 82 # Add locally copied files to gitignore as we add back to the metadata directly
85 """Move a file. Creates all the directory components of destination path."""
86 dst_d = os.path.dirname(dst)
87 if dst_d:
88 bb.utils.mkdirhier(dst_d)
89 shutil.move(src, dst)
90
91 def _ls_tree(directory):
92 """Recursive listing of files in a directory"""
93 ret = []
94 for root, dirs, files in os.walk(directory):
95 ret.extend([os.path.relpath(os.path.join(root, fname), directory) for
96 fname in files])
97 return ret
98
99 is_kernel_yocto = bb.data.inherits_class('kernel-yocto', d)
100 # Move local source files into separate subdir
101 recipe_patches = [os.path.basename(patch) for patch in
102 oe.recipeutils.get_recipe_patches(d)]
103 local_files = oe.recipeutils.get_recipe_local_files(d) 83 local_files = oe.recipeutils.get_recipe_local_files(d)
104
105 if is_kernel_yocto:
106 for key in [f for f in local_files if f.endswith('scc')]:
107 with open(local_files[key], 'r') as sccfile:
108 for l in sccfile:
109 line = l.split()
110 if line and line[0] in ('kconf', 'patch'):
111 cfg = os.path.join(os.path.dirname(local_files[key]), line[-1])
112 if cfg not in local_files.values():
113 local_files[line[-1]] = cfg
114 shutil.copy2(cfg, workdir)
115
116 # Ignore local files with subdir={BP}
117 srcabspath = os.path.abspath(srcsubdir) 84 srcabspath = os.path.abspath(srcsubdir)
118 local_files = [fname for fname in local_files if 85 local_files = [fname for fname in local_files if
119 os.path.exists(os.path.join(workdir, fname)) and 86 os.path.exists(os.path.join(unpackdir, fname)) and
120 (srcabspath == workdir or not 87 srcabspath == unpackdir]
121 os.path.join(workdir, fname).startswith(srcabspath +
122 os.sep))]
123 if local_files: 88 if local_files:
124 for fname in local_files: 89 with open(os.path.join(tempdir, '.gitignore'), 'a+') as f:
125 _move_file(os.path.join(workdir, fname), 90 f.write('# Ignore local files, by default. Remove following lines'
126 os.path.join(tempdir, 'oe-local-files', fname)) 91 'if you want to commit the directory to Git\n')
127 with open(os.path.join(tempdir, 'oe-local-files', '.gitignore'), 92 for fname in local_files:
128 'w') as f: 93 f.write('%s\n' % fname)
129 f.write('# Ignore local files, by default. Remove this file ' 94
130 'if you want to commit the directory to Git\n*\n') 95 if os.path.dirname(srcsubdir) != workdir:
131
132 if srcsubdir == workdir:
133 # Find non-patch non-local sources that were "unpacked" to srctree
134 # directory
135 src_files = [fname for fname in _ls_tree(workdir) if
136 os.path.basename(fname) not in recipe_patches]
137 srcsubdir = d.getVar('DEVTOOL_PATCH_SRCDIR')
138 # Move source files to S
139 for path in src_files:
140 _move_file(os.path.join(workdir, path),
141 os.path.join(srcsubdir, path))
142 elif os.path.dirname(srcsubdir) != workdir:
143 # Handle if S is set to a subdirectory of the source 96 # Handle if S is set to a subdirectory of the source
144 srcsubdir = os.path.join(workdir, os.path.relpath(srcsubdir, workdir).split(os.sep)[0]) 97 srcsubdir = os.path.join(workdir, os.path.relpath(srcsubdir, workdir).split(os.sep)[0])
145 98
@@ -164,11 +117,6 @@ python devtool_post_unpack() {
164 f.write(srcsubdir) 117 f.write(srcsubdir)
165} 118}
166 119
167python devtool_pre_patch() {
168 if d.getVar('S') == d.getVar('WORKDIR'):
169 d.setVar('S', '${DEVTOOL_PATCH_SRCDIR}')
170}
171
172python devtool_post_patch() { 120python devtool_post_patch() {
173 import shutil 121 import shutil
174 tempdir = d.getVar('DEVTOOL_TEMPDIR') 122 tempdir = d.getVar('DEVTOOL_TEMPDIR')
diff --git a/meta/classes/multilib.bbclass b/meta/classes/multilib.bbclass
index b6c09969b1..a4151658a6 100644
--- a/meta/classes/multilib.bbclass
+++ b/meta/classes/multilib.bbclass
@@ -5,30 +5,30 @@
5# 5#
6 6
7python multilib_virtclass_handler () { 7python multilib_virtclass_handler () {
8 cls = e.data.getVar("BBEXTENDCURR") 8 cls = d.getVar("BBEXTENDCURR")
9 variant = e.data.getVar("BBEXTENDVARIANT") 9 variant = d.getVar("BBEXTENDVARIANT")
10 if cls != "multilib" or not variant: 10 if cls != "multilib" or not variant:
11 return 11 return
12 12
13 localdata = bb.data.createCopy(e.data) 13 localdata = bb.data.createCopy(d)
14 localdata.delVar('TMPDIR') 14 localdata.delVar('TMPDIR')
15 e.data.setVar('STAGING_KERNEL_DIR', localdata.getVar('STAGING_KERNEL_DIR')) 15 d.setVar('STAGING_KERNEL_DIR', localdata.getVar('STAGING_KERNEL_DIR'))
16 16
17 # There should only be one kernel in multilib configs 17 # There should only be one kernel in multilib configs
18 # We also skip multilib setup for module packages. 18 # We also skip multilib setup for module packages.
19 provides = (e.data.getVar("PROVIDES") or "").split() 19 provides = (d.getVar("PROVIDES") or "").split()
20 non_ml_recipes = d.getVar('NON_MULTILIB_RECIPES').split() 20 non_ml_recipes = d.getVar('NON_MULTILIB_RECIPES').split()
21 bpn = e.data.getVar("BPN") 21 bpn = d.getVar("BPN")
22 if "virtual/kernel" in provides or \ 22 if ("virtual/kernel" in provides
23 bb.data.inherits_class('module-base', e.data) or \ 23 or bb.data.inherits_class('module-base', d)
24 bpn in non_ml_recipes: 24 or bpn in non_ml_recipes):
25 raise bb.parse.SkipRecipe("We shouldn't have multilib variants for %s" % bpn) 25 raise bb.parse.SkipRecipe("We shouldn't have multilib variants for %s" % bpn)
26 26
27 save_var_name=e.data.getVar("MULTILIB_SAVE_VARNAME") or "" 27 save_var_name = d.getVar("MULTILIB_SAVE_VARNAME") or ""
28 for name in save_var_name.split(): 28 for name in save_var_name.split():
29 val=e.data.getVar(name) 29 val = d.getVar(name)
30 if val: 30 if val:
31 e.data.setVar(name + "_MULTILIB_ORIGINAL", val) 31 d.setVar(name + "_MULTILIB_ORIGINAL", val)
32 32
33 # We nearly don't need this but dependencies on NON_MULTILIB_RECIPES don't work without it 33 # We nearly don't need this but dependencies on NON_MULTILIB_RECIPES don't work without it
34 d.setVar("SSTATE_ARCHS_TUNEPKG", "${@all_multilib_tune_values(d, 'TUNE_PKGARCH')}") 34 d.setVar("SSTATE_ARCHS_TUNEPKG", "${@all_multilib_tune_values(d, 'TUNE_PKGARCH')}")
@@ -36,66 +36,67 @@ python multilib_virtclass_handler () {
36 overrides = e.data.getVar("OVERRIDES", False) 36 overrides = e.data.getVar("OVERRIDES", False)
37 pn = e.data.getVar("PN", False) 37 pn = e.data.getVar("PN", False)
38 overrides = overrides.replace("pn-${PN}", "pn-${PN}:pn-" + pn) 38 overrides = overrides.replace("pn-${PN}", "pn-${PN}:pn-" + pn)
39 e.data.setVar("OVERRIDES", overrides) 39 d.setVar("OVERRIDES", overrides)
40 40
41 if bb.data.inherits_class('image', e.data): 41 if bb.data.inherits_class('image', d):
42 e.data.setVar("MLPREFIX", variant + "-") 42 d.setVar("MLPREFIX", variant + "-")
43 e.data.setVar("PN", variant + "-" + e.data.getVar("PN", False)) 43 d.setVar("PN", variant + "-" + d.getVar("PN", False))
44 e.data.setVar('SDKTARGETSYSROOT', e.data.getVar('SDKTARGETSYSROOT')) 44 d.setVar('SDKTARGETSYSROOT', d.getVar('SDKTARGETSYSROOT'))
45 override = ":virtclass-multilib-" + variant 45 override = ":virtclass-multilib-" + variant
46 e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + override) 46 d.setVar("OVERRIDES", d.getVar("OVERRIDES", False) + override)
47 target_vendor = e.data.getVar("TARGET_VENDOR:" + "virtclass-multilib-" + variant, False) 47 target_vendor = d.getVar("TARGET_VENDOR:" + "virtclass-multilib-" + variant, False)
48 if target_vendor: 48 if target_vendor:
49 e.data.setVar("TARGET_VENDOR", target_vendor) 49 d.setVar("TARGET_VENDOR", target_vendor)
50 return 50 return
51 51
52 if bb.data.inherits_class('cross-canadian', e.data): 52 if bb.data.inherits_class('cross-canadian', d):
53 # Multilib cross-candian should use the same nativesdk sysroot without MLPREFIX 53 # Multilib cross-candian should use the same nativesdk sysroot without MLPREFIX
54 e.data.setVar("RECIPE_SYSROOT", "${WORKDIR}/recipe-sysroot") 54 d.setVar("RECIPE_SYSROOT", "${WORKDIR}/recipe-sysroot")
55 e.data.setVar("STAGING_DIR_TARGET", "${WORKDIR}/recipe-sysroot") 55 d.setVar("STAGING_DIR_TARGET", "${WORKDIR}/recipe-sysroot")
56 e.data.setVar("STAGING_DIR_HOST", "${WORKDIR}/recipe-sysroot") 56 d.setVar("STAGING_DIR_HOST", "${WORKDIR}/recipe-sysroot")
57 e.data.setVar("RECIPE_SYSROOT_MANIFEST_SUBDIR", "nativesdk-" + variant) 57 d.setVar("RECIPE_SYSROOT_MANIFEST_SUBDIR", "nativesdk-" + variant)
58 e.data.setVar("MLPREFIX", variant + "-") 58 d.setVar("MLPREFIX", variant + "-")
59 override = ":virtclass-multilib-" + variant 59 override = ":virtclass-multilib-" + variant
60 e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + override) 60 d.setVar("OVERRIDES", d.getVar("OVERRIDES", False) + override)
61 return 61 return
62 62
63 if bb.data.inherits_class('native', e.data): 63 if bb.data.inherits_class('native', d):
64 raise bb.parse.SkipRecipe("We can't extend native recipes") 64 raise bb.parse.SkipRecipe("We can't extend native recipes")
65 65
66 if bb.data.inherits_class('nativesdk', e.data) or bb.data.inherits_class('crosssdk', e.data): 66 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d):
67 raise bb.parse.SkipRecipe("We can't extend nativesdk recipes") 67 raise bb.parse.SkipRecipe("We can't extend nativesdk recipes")
68 68
69 if bb.data.inherits_class('allarch', e.data) and not d.getVar('MULTILIB_VARIANTS') \ 69 if (bb.data.inherits_class('allarch', d)
70 and not bb.data.inherits_class('packagegroup', e.data): 70 and not d.getVar('MULTILIB_VARIANTS')
71 and not bb.data.inherits_class('packagegroup', d)):
71 raise bb.parse.SkipRecipe("Don't extend allarch recipes which are not packagegroups") 72 raise bb.parse.SkipRecipe("Don't extend allarch recipes which are not packagegroups")
72 73
73 # Expand this since this won't work correctly once we set a multilib into place 74 # Expand this since this won't work correctly once we set a multilib into place
74 e.data.setVar("ALL_MULTILIB_PACKAGE_ARCHS", e.data.getVar("ALL_MULTILIB_PACKAGE_ARCHS")) 75 d.setVar("ALL_MULTILIB_PACKAGE_ARCHS", d.getVar("ALL_MULTILIB_PACKAGE_ARCHS"))
75 76
76 override = ":virtclass-multilib-" + variant 77 override = ":virtclass-multilib-" + variant
77 78
78 skip_msg = e.data.getVarFlag('SKIP_RECIPE', e.data.getVar('PN')) 79 skip_msg = d.getVarFlag('SKIP_RECIPE', d.getVar('PN'))
79 if skip_msg: 80 if skip_msg:
80 pn_new = variant + "-" + e.data.getVar('PN') 81 pn_new = variant + "-" + d.getVar('PN')
81 if not e.data.getVarFlag('SKIP_RECIPE', pn_new): 82 if not d.getVarFlag('SKIP_RECIPE', pn_new):
82 e.data.setVarFlag('SKIP_RECIPE', pn_new, skip_msg) 83 d.setVarFlag('SKIP_RECIPE', pn_new, skip_msg)
83 84
84 e.data.setVar("MLPREFIX", variant + "-") 85 d.setVar("MLPREFIX", variant + "-")
85 e.data.setVar("PN", variant + "-" + e.data.getVar("PN", False)) 86 d.setVar("PN", variant + "-" + d.getVar("PN", False))
86 e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + override) 87 d.setVar("OVERRIDES", d.getVar("OVERRIDES", False) + override)
87 88
88 # Expand INCOMPATIBLE_LICENSE_EXCEPTIONS with multilib prefix 89 # Expand INCOMPATIBLE_LICENSE_EXCEPTIONS with multilib prefix
89 pkgs = e.data.getVar("INCOMPATIBLE_LICENSE_EXCEPTIONS") 90 pkgs = d.getVar("INCOMPATIBLE_LICENSE_EXCEPTIONS")
90 if pkgs: 91 if pkgs:
91 for pkg in pkgs.split(): 92 for pkg in pkgs.split():
92 pkgs += " " + variant + "-" + pkg 93 pkgs += " " + variant + "-" + pkg
93 e.data.setVar("INCOMPATIBLE_LICENSE_EXCEPTIONS", pkgs) 94 d.setVar("INCOMPATIBLE_LICENSE_EXCEPTIONS", pkgs)
94 95
95 # DEFAULTTUNE can change TARGET_ARCH override so expand this now before update_data 96 # DEFAULTTUNE can change TARGET_ARCH override so expand this now before update_data
96 newtune = e.data.getVar("DEFAULTTUNE:" + "virtclass-multilib-" + variant, False) 97 newtune = d.getVar("DEFAULTTUNE:" + "virtclass-multilib-" + variant, False)
97 if newtune: 98 if newtune:
98 e.data.setVar("DEFAULTTUNE", newtune) 99 d.setVar("DEFAULTTUNE", newtune)
99} 100}
100 101
101addhandler multilib_virtclass_handler 102addhandler multilib_virtclass_handler
diff --git a/meta/classes/multilib_global.bbclass b/meta/classes/multilib_global.bbclass
index 6095d278dd..973ac9130b 100644
--- a/meta/classes/multilib_global.bbclass
+++ b/meta/classes/multilib_global.bbclass
@@ -171,24 +171,23 @@ def preferred_ml_updates(d):
171 d.appendVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", " " + " ".join(extras)) 171 d.appendVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", " " + " ".join(extras))
172 172
173python multilib_virtclass_handler_vendor () { 173python multilib_virtclass_handler_vendor () {
174 if isinstance(e, bb.event.ConfigParsed): 174 for v in d.getVar("MULTILIB_VARIANTS").split():
175 for v in e.data.getVar("MULTILIB_VARIANTS").split(): 175 if d.getVar("TARGET_VENDOR:virtclass-multilib-" + v, False) is None:
176 if e.data.getVar("TARGET_VENDOR:virtclass-multilib-" + v, False) is None: 176 d.setVar("TARGET_VENDOR:virtclass-multilib-" + v, d.getVar("TARGET_VENDOR", False) + "ml" + v)
177 e.data.setVar("TARGET_VENDOR:virtclass-multilib-" + v, e.data.getVar("TARGET_VENDOR", False) + "ml" + v) 177 preferred_ml_updates(d)
178 preferred_ml_updates(e.data)
179} 178}
180addhandler multilib_virtclass_handler_vendor 179addhandler multilib_virtclass_handler_vendor
181multilib_virtclass_handler_vendor[eventmask] = "bb.event.ConfigParsed" 180multilib_virtclass_handler_vendor[eventmask] = "bb.event.ConfigParsed"
182 181
183python multilib_virtclass_handler_global () { 182python multilib_virtclass_handler_global () {
184 variant = e.data.getVar("BBEXTENDVARIANT") 183 variant = d.getVar("BBEXTENDVARIANT")
185 if variant: 184 if variant:
186 return 185 return
187 186
188 non_ml_recipes = d.getVar('NON_MULTILIB_RECIPES').split() 187 non_ml_recipes = d.getVar('NON_MULTILIB_RECIPES').split()
189 188
190 if bb.data.inherits_class('kernel', e.data) or \ 189 if bb.data.inherits_class('kernel', d) or \
191 bb.data.inherits_class('module-base', e.data) or \ 190 bb.data.inherits_class('module-base', d) or \
192 d.getVar('BPN') in non_ml_recipes: 191 d.getVar('BPN') in non_ml_recipes:
193 192
194 # We need to avoid expanding KERNEL_VERSION which we can do by deleting it 193 # We need to avoid expanding KERNEL_VERSION which we can do by deleting it
@@ -197,7 +196,7 @@ python multilib_virtclass_handler_global () {
197 localdata.delVar("KERNEL_VERSION") 196 localdata.delVar("KERNEL_VERSION")
198 localdata.delVar("KERNEL_VERSION_PKG_NAME") 197 localdata.delVar("KERNEL_VERSION_PKG_NAME")
199 198
200 variants = (e.data.getVar("MULTILIB_VARIANTS") or "").split() 199 variants = (d.getVar("MULTILIB_VARIANTS") or "").split()
201 200
202 import oe.classextend 201 import oe.classextend
203 clsextends = [] 202 clsextends = []
@@ -208,22 +207,22 @@ python multilib_virtclass_handler_global () {
208 origprovs = provs = localdata.getVar("PROVIDES") or "" 207 origprovs = provs = localdata.getVar("PROVIDES") or ""
209 for clsextend in clsextends: 208 for clsextend in clsextends:
210 provs = provs + " " + clsextend.map_variable("PROVIDES", setvar=False) 209 provs = provs + " " + clsextend.map_variable("PROVIDES", setvar=False)
211 e.data.setVar("PROVIDES", provs) 210 d.setVar("PROVIDES", provs)
212 211
213 # Process RPROVIDES 212 # Process RPROVIDES
214 origrprovs = rprovs = localdata.getVar("RPROVIDES") or "" 213 origrprovs = rprovs = localdata.getVar("RPROVIDES") or ""
215 for clsextend in clsextends: 214 for clsextend in clsextends:
216 rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES", setvar=False) 215 rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES", setvar=False)
217 if rprovs.strip(): 216 if rprovs.strip():
218 e.data.setVar("RPROVIDES", rprovs) 217 d.setVar("RPROVIDES", rprovs)
219 218
220 # Process RPROVIDES:${PN}... 219 # Process RPROVIDES:${PN}...
221 for pkg in (e.data.getVar("PACKAGES") or "").split(): 220 for pkg in (d.getVar("PACKAGES") or "").split():
222 origrprovs = rprovs = localdata.getVar("RPROVIDES:%s" % pkg) or "" 221 origrprovs = rprovs = localdata.getVar("RPROVIDES:%s" % pkg) or ""
223 for clsextend in clsextends: 222 for clsextend in clsextends:
224 rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES:%s" % pkg, setvar=False) 223 rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES:%s" % pkg, setvar=False)
225 rprovs = rprovs + " " + clsextend.extname + "-" + pkg 224 rprovs = rprovs + " " + clsextend.extname + "-" + pkg
226 e.data.setVar("RPROVIDES:%s" % pkg, rprovs) 225 d.setVar("RPROVIDES:%s" % pkg, rprovs)
227} 226}
228 227
229addhandler multilib_virtclass_handler_global 228addhandler multilib_virtclass_handler_global
diff --git a/meta/classes/siteconfig.bbclass b/meta/classes/siteconfig.bbclass
deleted file mode 100644
index 953cafd285..0000000000
--- a/meta/classes/siteconfig.bbclass
+++ /dev/null
@@ -1,39 +0,0 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7python siteconfig_do_siteconfig () {
8 shared_state = sstate_state_fromvars(d)
9 if shared_state['task'] != 'populate_sysroot':
10 return
11 if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME'), 'site_config')):
12 bb.debug(1, "No site_config directory, skipping do_siteconfig")
13 return
14 sstate_install(shared_state, d)
15 bb.build.exec_func('do_siteconfig_gencache', d)
16 sstate_clean(shared_state, d)
17}
18
19EXTRASITECONFIG ?= ""
20
21siteconfig_do_siteconfig_gencache () {
22 mkdir -p ${WORKDIR}/site_config_${MACHINE}
23 gen-site-config ${FILE_DIRNAME}/site_config \
24 >${WORKDIR}/site_config_${MACHINE}/configure.ac
25 cd ${WORKDIR}/site_config_${MACHINE}
26 autoconf
27 rm -f ${BPN}_cache
28 CONFIG_SITE="" ${EXTRASITECONFIG} ./configure ${CONFIGUREOPTS} --cache-file ${BPN}_cache
29 sed -n -e "/ac_cv_c_bigendian/p" -e "/ac_cv_sizeof_/p" \
30 -e "/ac_cv_type_/p" -e "/ac_cv_header_/p" -e "/ac_cv_func_/p" \
31 < ${BPN}_cache > ${BPN}_config
32 mkdir -p ${SYSROOT_DESTDIR}${datadir}/${TARGET_SYS}_config_site.d
33 cp ${BPN}_config ${SYSROOT_DESTDIR}${datadir}/${TARGET_SYS}_config_site.d
34
35}
36
37do_populate_sysroot[sstate-interceptfuncs] += "do_siteconfig "
38
39EXPORT_FUNCTIONS do_siteconfig do_siteconfig_gencache