diff options
Diffstat (limited to 'meta/lib/oe')
-rw-r--r-- | meta/lib/oe/classextend.py | 12 | ||||
-rw-r--r-- | meta/lib/oe/copy_buildsystem.py | 12 | ||||
-rw-r--r-- | meta/lib/oe/data.py | 2 | ||||
-rw-r--r-- | meta/lib/oe/distro_check.py | 20 | ||||
-rw-r--r-- | meta/lib/oe/gpg_sign.py | 4 | ||||
-rw-r--r-- | meta/lib/oe/manifest.py | 26 | ||||
-rw-r--r-- | meta/lib/oe/package.py | 2 | ||||
-rw-r--r-- | meta/lib/oe/package_manager.py | 174 | ||||
-rw-r--r-- | meta/lib/oe/packagedata.py | 2 | ||||
-rw-r--r-- | meta/lib/oe/packagegroup.py | 6 | ||||
-rw-r--r-- | meta/lib/oe/patch.py | 30 | ||||
-rw-r--r-- | meta/lib/oe/path.py | 2 | ||||
-rw-r--r-- | meta/lib/oe/prservice.py | 26 | ||||
-rw-r--r-- | meta/lib/oe/qa.py | 4 | ||||
-rw-r--r-- | meta/lib/oe/recipeutils.py | 38 | ||||
-rw-r--r-- | meta/lib/oe/rootfs.py | 114 | ||||
-rw-r--r-- | meta/lib/oe/sdk.py | 76 | ||||
-rw-r--r-- | meta/lib/oe/sstatesig.py | 30 | ||||
-rw-r--r-- | meta/lib/oe/terminal.py | 2 | ||||
-rw-r--r-- | meta/lib/oe/utils.py | 30 |
20 files changed, 306 insertions, 306 deletions
diff --git a/meta/lib/oe/classextend.py b/meta/lib/oe/classextend.py index 4c8a00070c..d2eeaf0e5c 100644 --- a/meta/lib/oe/classextend.py +++ b/meta/lib/oe/classextend.py | |||
@@ -25,7 +25,7 @@ class ClassExtender(object): | |||
25 | return name | 25 | return name |
26 | 26 | ||
27 | def map_variable(self, varname, setvar = True): | 27 | def map_variable(self, varname, setvar = True): |
28 | var = self.d.getVar(varname, True) | 28 | var = self.d.getVar(varname) |
29 | if not var: | 29 | if not var: |
30 | return "" | 30 | return "" |
31 | var = var.split() | 31 | var = var.split() |
@@ -38,7 +38,7 @@ class ClassExtender(object): | |||
38 | return newdata | 38 | return newdata |
39 | 39 | ||
40 | def map_regexp_variable(self, varname, setvar = True): | 40 | def map_regexp_variable(self, varname, setvar = True): |
41 | var = self.d.getVar(varname, True) | 41 | var = self.d.getVar(varname) |
42 | if not var: | 42 | if not var: |
43 | return "" | 43 | return "" |
44 | var = var.split() | 44 | var = var.split() |
@@ -60,7 +60,7 @@ class ClassExtender(object): | |||
60 | return dep | 60 | return dep |
61 | else: | 61 | else: |
62 | # Do not extend for that already have multilib prefix | 62 | # Do not extend for that already have multilib prefix |
63 | var = self.d.getVar("MULTILIB_VARIANTS", True) | 63 | var = self.d.getVar("MULTILIB_VARIANTS") |
64 | if var: | 64 | if var: |
65 | var = var.split() | 65 | var = var.split() |
66 | for v in var: | 66 | for v in var: |
@@ -74,7 +74,7 @@ class ClassExtender(object): | |||
74 | varname = varname + "_" + suffix | 74 | varname = varname + "_" + suffix |
75 | orig = self.d.getVar("EXTENDPKGV", False) | 75 | orig = self.d.getVar("EXTENDPKGV", False) |
76 | self.d.setVar("EXTENDPKGV", "EXTENDPKGV") | 76 | self.d.setVar("EXTENDPKGV", "EXTENDPKGV") |
77 | deps = self.d.getVar(varname, True) | 77 | deps = self.d.getVar(varname) |
78 | if not deps: | 78 | if not deps: |
79 | self.d.setVar("EXTENDPKGV", orig) | 79 | self.d.setVar("EXTENDPKGV", orig) |
80 | return | 80 | return |
@@ -87,7 +87,7 @@ class ClassExtender(object): | |||
87 | self.d.setVar("EXTENDPKGV", orig) | 87 | self.d.setVar("EXTENDPKGV", orig) |
88 | 88 | ||
89 | def map_packagevars(self): | 89 | def map_packagevars(self): |
90 | for pkg in (self.d.getVar("PACKAGES", True).split() + [""]): | 90 | for pkg in (self.d.getVar("PACKAGES").split() + [""]): |
91 | self.map_depends_variable("RDEPENDS", pkg) | 91 | self.map_depends_variable("RDEPENDS", pkg) |
92 | self.map_depends_variable("RRECOMMENDS", pkg) | 92 | self.map_depends_variable("RRECOMMENDS", pkg) |
93 | self.map_depends_variable("RSUGGESTS", pkg) | 93 | self.map_depends_variable("RSUGGESTS", pkg) |
@@ -97,7 +97,7 @@ class ClassExtender(object): | |||
97 | self.map_depends_variable("PKG", pkg) | 97 | self.map_depends_variable("PKG", pkg) |
98 | 98 | ||
99 | def rename_packages(self): | 99 | def rename_packages(self): |
100 | for pkg in (self.d.getVar("PACKAGES", True) or "").split(): | 100 | for pkg in (self.d.getVar("PACKAGES") or "").split(): |
101 | if pkg.startswith(self.extname): | 101 | if pkg.startswith(self.extname): |
102 | self.pkgs_mapping.append([pkg.split(self.extname + "-")[1], pkg]) | 102 | self.pkgs_mapping.append([pkg.split(self.extname + "-")[1], pkg]) |
103 | continue | 103 | continue |
diff --git a/meta/lib/oe/copy_buildsystem.py b/meta/lib/oe/copy_buildsystem.py index 29ac6d418f..a372904183 100644 --- a/meta/lib/oe/copy_buildsystem.py +++ b/meta/lib/oe/copy_buildsystem.py | |||
@@ -21,8 +21,8 @@ class BuildSystem(object): | |||
21 | def __init__(self, context, d): | 21 | def __init__(self, context, d): |
22 | self.d = d | 22 | self.d = d |
23 | self.context = context | 23 | self.context = context |
24 | self.layerdirs = [os.path.abspath(pth) for pth in d.getVar('BBLAYERS', True).split()] | 24 | self.layerdirs = [os.path.abspath(pth) for pth in d.getVar('BBLAYERS').split()] |
25 | self.layers_exclude = (d.getVar('SDK_LAYERS_EXCLUDE', True) or "").split() | 25 | self.layers_exclude = (d.getVar('SDK_LAYERS_EXCLUDE') or "").split() |
26 | 26 | ||
27 | def copy_bitbake_and_layers(self, destdir, workspace_name=None): | 27 | def copy_bitbake_and_layers(self, destdir, workspace_name=None): |
28 | # Copy in all metadata layers + bitbake (as repositories) | 28 | # Copy in all metadata layers + bitbake (as repositories) |
@@ -30,7 +30,7 @@ class BuildSystem(object): | |||
30 | bb.utils.mkdirhier(destdir) | 30 | bb.utils.mkdirhier(destdir) |
31 | layers = list(self.layerdirs) | 31 | layers = list(self.layerdirs) |
32 | 32 | ||
33 | corebase = os.path.abspath(self.d.getVar('COREBASE', True)) | 33 | corebase = os.path.abspath(self.d.getVar('COREBASE')) |
34 | layers.append(corebase) | 34 | layers.append(corebase) |
35 | 35 | ||
36 | # Exclude layers | 36 | # Exclude layers |
@@ -46,7 +46,7 @@ class BuildSystem(object): | |||
46 | extranum += 1 | 46 | extranum += 1 |
47 | workspace_newname = '%s-%d' % (workspace_name, extranum) | 47 | workspace_newname = '%s-%d' % (workspace_name, extranum) |
48 | 48 | ||
49 | corebase_files = self.d.getVar('COREBASE_FILES', True).split() | 49 | corebase_files = self.d.getVar('COREBASE_FILES').split() |
50 | corebase_files = [corebase + '/' +x for x in corebase_files] | 50 | corebase_files = [corebase + '/' +x for x in corebase_files] |
51 | # Make sure bitbake goes in | 51 | # Make sure bitbake goes in |
52 | bitbake_dir = bb.__file__.rsplit('/', 3)[0] | 52 | bitbake_dir = bb.__file__.rsplit('/', 3)[0] |
@@ -100,7 +100,7 @@ class BuildSystem(object): | |||
100 | # Drop all bbappends except the one for the image the SDK is being built for | 100 | # Drop all bbappends except the one for the image the SDK is being built for |
101 | # (because of externalsrc, the workspace bbappends will interfere with the | 101 | # (because of externalsrc, the workspace bbappends will interfere with the |
102 | # locked signatures if present, and we don't need them anyway) | 102 | # locked signatures if present, and we don't need them anyway) |
103 | image_bbappend = os.path.splitext(os.path.basename(self.d.getVar('FILE', True)))[0] + '.bbappend' | 103 | image_bbappend = os.path.splitext(os.path.basename(self.d.getVar('FILE')))[0] + '.bbappend' |
104 | appenddir = os.path.join(layerdestpath, 'appends') | 104 | appenddir = os.path.join(layerdestpath, 'appends') |
105 | if os.path.isdir(appenddir): | 105 | if os.path.isdir(appenddir): |
106 | for fn in os.listdir(appenddir): | 106 | for fn in os.listdir(appenddir): |
@@ -208,7 +208,7 @@ def create_locked_sstate_cache(lockedsigs, input_sstate_cache, output_sstate_cac | |||
208 | import shutil | 208 | import shutil |
209 | bb.note('Generating sstate-cache...') | 209 | bb.note('Generating sstate-cache...') |
210 | 210 | ||
211 | nativelsbstring = d.getVar('NATIVELSBSTRING', True) | 211 | nativelsbstring = d.getVar('NATIVELSBSTRING') |
212 | bb.process.run("gen-lockedsig-cache %s %s %s %s %s" % (lockedsigs, input_sstate_cache, output_sstate_cache, nativelsbstring, filterfile or '')) | 212 | bb.process.run("gen-lockedsig-cache %s %s %s %s %s" % (lockedsigs, input_sstate_cache, output_sstate_cache, nativelsbstring, filterfile or '')) |
213 | if fixedlsbstring and nativelsbstring != fixedlsbstring: | 213 | if fixedlsbstring and nativelsbstring != fixedlsbstring: |
214 | nativedir = output_sstate_cache + '/' + nativelsbstring | 214 | nativedir = output_sstate_cache + '/' + nativelsbstring |
diff --git a/meta/lib/oe/data.py b/meta/lib/oe/data.py index ee48950a82..032f68a847 100644 --- a/meta/lib/oe/data.py +++ b/meta/lib/oe/data.py | |||
@@ -12,6 +12,6 @@ def typed_value(key, d): | |||
12 | flags = {} | 12 | flags = {} |
13 | 13 | ||
14 | try: | 14 | try: |
15 | return oe.maketype.create(d.getVar(key, True) or '', var_type, **flags) | 15 | return oe.maketype.create(d.getVar(key) or '', var_type, **flags) |
16 | except (TypeError, ValueError) as exc: | 16 | except (TypeError, ValueError) as exc: |
17 | bb.msg.fatal("Data", "%s: %s" % (key, str(exc))) | 17 | bb.msg.fatal("Data", "%s: %s" % (key, str(exc))) |
diff --git a/meta/lib/oe/distro_check.py b/meta/lib/oe/distro_check.py index c666ddc257..f54f4bb67d 100644 --- a/meta/lib/oe/distro_check.py +++ b/meta/lib/oe/distro_check.py | |||
@@ -224,37 +224,37 @@ def compare_in_distro_packages_list(distro_check_dir, d): | |||
224 | localdata = bb.data.createCopy(d) | 224 | localdata = bb.data.createCopy(d) |
225 | pkglst_dir = os.path.join(distro_check_dir, "package_lists") | 225 | pkglst_dir = os.path.join(distro_check_dir, "package_lists") |
226 | matching_distros = [] | 226 | matching_distros = [] |
227 | pn = recipe_name = d.getVar('PN', True) | 227 | pn = recipe_name = d.getVar('PN') |
228 | bb.note("Checking: %s" % pn) | 228 | bb.note("Checking: %s" % pn) |
229 | 229 | ||
230 | if pn.find("-native") != -1: | 230 | if pn.find("-native") != -1: |
231 | pnstripped = pn.split("-native") | 231 | pnstripped = pn.split("-native") |
232 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) | 232 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) |
233 | bb.data.update_data(localdata) | 233 | bb.data.update_data(localdata) |
234 | recipe_name = pnstripped[0] | 234 | recipe_name = pnstripped[0] |
235 | 235 | ||
236 | if pn.startswith("nativesdk-"): | 236 | if pn.startswith("nativesdk-"): |
237 | pnstripped = pn.split("nativesdk-") | 237 | pnstripped = pn.split("nativesdk-") |
238 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[1] + ":" + d.getVar('OVERRIDES', True)) | 238 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[1] + ":" + d.getVar('OVERRIDES')) |
239 | bb.data.update_data(localdata) | 239 | bb.data.update_data(localdata) |
240 | recipe_name = pnstripped[1] | 240 | recipe_name = pnstripped[1] |
241 | 241 | ||
242 | if pn.find("-cross") != -1: | 242 | if pn.find("-cross") != -1: |
243 | pnstripped = pn.split("-cross") | 243 | pnstripped = pn.split("-cross") |
244 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) | 244 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) |
245 | bb.data.update_data(localdata) | 245 | bb.data.update_data(localdata) |
246 | recipe_name = pnstripped[0] | 246 | recipe_name = pnstripped[0] |
247 | 247 | ||
248 | if pn.find("-initial") != -1: | 248 | if pn.find("-initial") != -1: |
249 | pnstripped = pn.split("-initial") | 249 | pnstripped = pn.split("-initial") |
250 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) | 250 | localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) |
251 | bb.data.update_data(localdata) | 251 | bb.data.update_data(localdata) |
252 | recipe_name = pnstripped[0] | 252 | recipe_name = pnstripped[0] |
253 | 253 | ||
254 | bb.note("Recipe: %s" % recipe_name) | 254 | bb.note("Recipe: %s" % recipe_name) |
255 | 255 | ||
256 | distro_exceptions = dict({"OE-Core":'OE-Core', "OpenedHand":'OpenedHand', "Intel":'Intel', "Upstream":'Upstream', "Windriver":'Windriver', "OSPDT":'OSPDT Approved', "Poky":'poky'}) | 256 | distro_exceptions = dict({"OE-Core":'OE-Core', "OpenedHand":'OpenedHand', "Intel":'Intel', "Upstream":'Upstream', "Windriver":'Windriver', "OSPDT":'OSPDT Approved', "Poky":'poky'}) |
257 | tmp = localdata.getVar('DISTRO_PN_ALIAS', True) or "" | 257 | tmp = localdata.getVar('DISTRO_PN_ALIAS') or "" |
258 | for str in tmp.split(): | 258 | for str in tmp.split(): |
259 | if str and str.find("=") == -1 and distro_exceptions[str]: | 259 | if str and str.find("=") == -1 and distro_exceptions[str]: |
260 | matching_distros.append(str) | 260 | matching_distros.append(str) |
@@ -286,10 +286,10 @@ def compare_in_distro_packages_list(distro_check_dir, d): | |||
286 | return matching_distros | 286 | return matching_distros |
287 | 287 | ||
288 | def create_log_file(d, logname): | 288 | def create_log_file(d, logname): |
289 | logpath = d.getVar('LOG_DIR', True) | 289 | logpath = d.getVar('LOG_DIR') |
290 | bb.utils.mkdirhier(logpath) | 290 | bb.utils.mkdirhier(logpath) |
291 | logfn, logsuffix = os.path.splitext(logname) | 291 | logfn, logsuffix = os.path.splitext(logname) |
292 | logfile = os.path.join(logpath, "%s.%s%s" % (logfn, d.getVar('DATETIME', True), logsuffix)) | 292 | logfile = os.path.join(logpath, "%s.%s%s" % (logfn, d.getVar('DATETIME'), logsuffix)) |
293 | if not os.path.exists(logfile): | 293 | if not os.path.exists(logfile): |
294 | slogfile = os.path.join(logpath, logname) | 294 | slogfile = os.path.join(logpath, logname) |
295 | if os.path.exists(slogfile): | 295 | if os.path.exists(slogfile): |
@@ -301,8 +301,8 @@ def create_log_file(d, logname): | |||
301 | 301 | ||
302 | 302 | ||
303 | def save_distro_check_result(result, datetime, result_file, d): | 303 | def save_distro_check_result(result, datetime, result_file, d): |
304 | pn = d.getVar('PN', True) | 304 | pn = d.getVar('PN') |
305 | logdir = d.getVar('LOG_DIR', True) | 305 | logdir = d.getVar('LOG_DIR') |
306 | if not logdir: | 306 | if not logdir: |
307 | bb.error("LOG_DIR variable is not defined, can't write the distro_check results") | 307 | bb.error("LOG_DIR variable is not defined, can't write the distro_check results") |
308 | return | 308 | return |
diff --git a/meta/lib/oe/gpg_sign.py b/meta/lib/oe/gpg_sign.py index 38eb0cb137..dcd1990930 100644 --- a/meta/lib/oe/gpg_sign.py +++ b/meta/lib/oe/gpg_sign.py | |||
@@ -7,9 +7,9 @@ import oe.utils | |||
7 | class LocalSigner(object): | 7 | class LocalSigner(object): |
8 | """Class for handling local (on the build host) signing""" | 8 | """Class for handling local (on the build host) signing""" |
9 | def __init__(self, d): | 9 | def __init__(self, d): |
10 | self.gpg_bin = d.getVar('GPG_BIN', True) or \ | 10 | self.gpg_bin = d.getVar('GPG_BIN') or \ |
11 | bb.utils.which(os.getenv('PATH'), 'gpg') | 11 | bb.utils.which(os.getenv('PATH'), 'gpg') |
12 | self.gpg_path = d.getVar('GPG_PATH', True) | 12 | self.gpg_path = d.getVar('GPG_PATH') |
13 | self.rpm_bin = bb.utils.which(os.getenv('PATH'), "rpm") | 13 | self.rpm_bin = bb.utils.which(os.getenv('PATH'), "rpm") |
14 | 14 | ||
15 | def export_pubkey(self, output_file, keyid, armor=True): | 15 | def export_pubkey(self, output_file, keyid, armor=True): |
diff --git a/meta/lib/oe/manifest.py b/meta/lib/oe/manifest.py index 95f8eb2df3..6ec9b1af8b 100644 --- a/meta/lib/oe/manifest.py +++ b/meta/lib/oe/manifest.py | |||
@@ -59,9 +59,9 @@ class Manifest(object, metaclass=ABCMeta): | |||
59 | 59 | ||
60 | if manifest_dir is None: | 60 | if manifest_dir is None: |
61 | if manifest_type != self.MANIFEST_TYPE_IMAGE: | 61 | if manifest_type != self.MANIFEST_TYPE_IMAGE: |
62 | self.manifest_dir = self.d.getVar('SDK_DIR', True) | 62 | self.manifest_dir = self.d.getVar('SDK_DIR') |
63 | else: | 63 | else: |
64 | self.manifest_dir = self.d.getVar('WORKDIR', True) | 64 | self.manifest_dir = self.d.getVar('WORKDIR') |
65 | else: | 65 | else: |
66 | self.manifest_dir = manifest_dir | 66 | self.manifest_dir = manifest_dir |
67 | 67 | ||
@@ -82,7 +82,7 @@ class Manifest(object, metaclass=ABCMeta): | |||
82 | This will be used for testing until the class is implemented properly! | 82 | This will be used for testing until the class is implemented properly! |
83 | """ | 83 | """ |
84 | def _create_dummy_initial(self): | 84 | def _create_dummy_initial(self): |
85 | image_rootfs = self.d.getVar('IMAGE_ROOTFS', True) | 85 | image_rootfs = self.d.getVar('IMAGE_ROOTFS') |
86 | pkg_list = dict() | 86 | pkg_list = dict() |
87 | if image_rootfs.find("core-image-sato-sdk") > 0: | 87 | if image_rootfs.find("core-image-sato-sdk") > 0: |
88 | pkg_list[self.PKG_TYPE_MUST_INSTALL] = \ | 88 | pkg_list[self.PKG_TYPE_MUST_INSTALL] = \ |
@@ -195,7 +195,7 @@ class RpmManifest(Manifest): | |||
195 | for pkg in pkg_list.split(): | 195 | for pkg in pkg_list.split(): |
196 | pkg_type = self.PKG_TYPE_MUST_INSTALL | 196 | pkg_type = self.PKG_TYPE_MUST_INSTALL |
197 | 197 | ||
198 | ml_variants = self.d.getVar('MULTILIB_VARIANTS', True).split() | 198 | ml_variants = self.d.getVar('MULTILIB_VARIANTS').split() |
199 | 199 | ||
200 | for ml_variant in ml_variants: | 200 | for ml_variant in ml_variants: |
201 | if pkg.startswith(ml_variant + '-'): | 201 | if pkg.startswith(ml_variant + '-'): |
@@ -216,13 +216,13 @@ class RpmManifest(Manifest): | |||
216 | 216 | ||
217 | for var in self.var_maps[self.manifest_type]: | 217 | for var in self.var_maps[self.manifest_type]: |
218 | if var in self.vars_to_split: | 218 | if var in self.vars_to_split: |
219 | split_pkgs = self._split_multilib(self.d.getVar(var, True)) | 219 | split_pkgs = self._split_multilib(self.d.getVar(var)) |
220 | if split_pkgs is not None: | 220 | if split_pkgs is not None: |
221 | pkgs = dict(list(pkgs.items()) + list(split_pkgs.items())) | 221 | pkgs = dict(list(pkgs.items()) + list(split_pkgs.items())) |
222 | else: | 222 | else: |
223 | pkg_list = self.d.getVar(var, True) | 223 | pkg_list = self.d.getVar(var) |
224 | if pkg_list is not None: | 224 | if pkg_list is not None: |
225 | pkgs[self.var_maps[self.manifest_type][var]] = self.d.getVar(var, True) | 225 | pkgs[self.var_maps[self.manifest_type][var]] = self.d.getVar(var) |
226 | 226 | ||
227 | for pkg_type in pkgs: | 227 | for pkg_type in pkgs: |
228 | for pkg in pkgs[pkg_type].split(): | 228 | for pkg in pkgs[pkg_type].split(): |
@@ -245,7 +245,7 @@ class OpkgManifest(Manifest): | |||
245 | for pkg in pkg_list.split(): | 245 | for pkg in pkg_list.split(): |
246 | pkg_type = self.PKG_TYPE_MUST_INSTALL | 246 | pkg_type = self.PKG_TYPE_MUST_INSTALL |
247 | 247 | ||
248 | ml_variants = self.d.getVar('MULTILIB_VARIANTS', True).split() | 248 | ml_variants = self.d.getVar('MULTILIB_VARIANTS').split() |
249 | 249 | ||
250 | for ml_variant in ml_variants: | 250 | for ml_variant in ml_variants: |
251 | if pkg.startswith(ml_variant + '-'): | 251 | if pkg.startswith(ml_variant + '-'): |
@@ -266,13 +266,13 @@ class OpkgManifest(Manifest): | |||
266 | 266 | ||
267 | for var in self.var_maps[self.manifest_type]: | 267 | for var in self.var_maps[self.manifest_type]: |
268 | if var in self.vars_to_split: | 268 | if var in self.vars_to_split: |
269 | split_pkgs = self._split_multilib(self.d.getVar(var, True)) | 269 | split_pkgs = self._split_multilib(self.d.getVar(var)) |
270 | if split_pkgs is not None: | 270 | if split_pkgs is not None: |
271 | pkgs = dict(list(pkgs.items()) + list(split_pkgs.items())) | 271 | pkgs = dict(list(pkgs.items()) + list(split_pkgs.items())) |
272 | else: | 272 | else: |
273 | pkg_list = self.d.getVar(var, True) | 273 | pkg_list = self.d.getVar(var) |
274 | if pkg_list is not None: | 274 | if pkg_list is not None: |
275 | pkgs[self.var_maps[self.manifest_type][var]] = self.d.getVar(var, True) | 275 | pkgs[self.var_maps[self.manifest_type][var]] = self.d.getVar(var) |
276 | 276 | ||
277 | for pkg_type in pkgs: | 277 | for pkg_type in pkgs: |
278 | for pkg in pkgs[pkg_type].split(): | 278 | for pkg in pkgs[pkg_type].split(): |
@@ -310,7 +310,7 @@ class DpkgManifest(Manifest): | |||
310 | manifest.write(self.initial_manifest_file_header) | 310 | manifest.write(self.initial_manifest_file_header) |
311 | 311 | ||
312 | for var in self.var_maps[self.manifest_type]: | 312 | for var in self.var_maps[self.manifest_type]: |
313 | pkg_list = self.d.getVar(var, True) | 313 | pkg_list = self.d.getVar(var) |
314 | 314 | ||
315 | if pkg_list is None: | 315 | if pkg_list is None: |
316 | continue | 316 | continue |
@@ -332,7 +332,7 @@ def create_manifest(d, final_manifest=False, manifest_dir=None, | |||
332 | 'ipk': OpkgManifest, | 332 | 'ipk': OpkgManifest, |
333 | 'deb': DpkgManifest} | 333 | 'deb': DpkgManifest} |
334 | 334 | ||
335 | manifest = manifest_map[d.getVar('IMAGE_PKGTYPE', True)](d, manifest_dir, manifest_type) | 335 | manifest = manifest_map[d.getVar('IMAGE_PKGTYPE')](d, manifest_dir, manifest_type) |
336 | 336 | ||
337 | if final_manifest: | 337 | if final_manifest: |
338 | manifest.create_final() | 338 | manifest.create_final() |
diff --git a/meta/lib/oe/package.py b/meta/lib/oe/package.py index ae60a5843e..795389517f 100644 --- a/meta/lib/oe/package.py +++ b/meta/lib/oe/package.py | |||
@@ -104,7 +104,7 @@ def read_shlib_providers(d): | |||
104 | import re | 104 | import re |
105 | 105 | ||
106 | shlib_provider = {} | 106 | shlib_provider = {} |
107 | shlibs_dirs = d.getVar('SHLIBSDIRS', True).split() | 107 | shlibs_dirs = d.getVar('SHLIBSDIRS').split() |
108 | list_re = re.compile('^(.*)\.list$') | 108 | list_re = re.compile('^(.*)\.list$') |
109 | # Go from least to most specific since the last one found wins | 109 | # Go from least to most specific since the last one found wins |
110 | for dir in reversed(shlibs_dirs): | 110 | for dir in reversed(shlibs_dirs): |
diff --git a/meta/lib/oe/package_manager.py b/meta/lib/oe/package_manager.py index e5e3c3b679..bb458691e3 100644 --- a/meta/lib/oe/package_manager.py +++ b/meta/lib/oe/package_manager.py | |||
@@ -107,16 +107,16 @@ class RpmIndexer(Indexer): | |||
107 | target_os = collections.OrderedDict() | 107 | target_os = collections.OrderedDict() |
108 | 108 | ||
109 | if arch_var is not None and os_var is not None: | 109 | if arch_var is not None and os_var is not None: |
110 | package_archs['default'] = self.d.getVar(arch_var, True).split() | 110 | package_archs['default'] = self.d.getVar(arch_var).split() |
111 | package_archs['default'].reverse() | 111 | package_archs['default'].reverse() |
112 | target_os['default'] = self.d.getVar(os_var, True).strip() | 112 | target_os['default'] = self.d.getVar(os_var).strip() |
113 | else: | 113 | else: |
114 | package_archs['default'] = self.d.getVar("PACKAGE_ARCHS", True).split() | 114 | package_archs['default'] = self.d.getVar("PACKAGE_ARCHS").split() |
115 | # arch order is reversed. This ensures the -best- match is | 115 | # arch order is reversed. This ensures the -best- match is |
116 | # listed first! | 116 | # listed first! |
117 | package_archs['default'].reverse() | 117 | package_archs['default'].reverse() |
118 | target_os['default'] = self.d.getVar("TARGET_OS", True).strip() | 118 | target_os['default'] = self.d.getVar("TARGET_OS").strip() |
119 | multilibs = self.d.getVar('MULTILIBS', True) or "" | 119 | multilibs = self.d.getVar('MULTILIBS') or "" |
120 | for ext in multilibs.split(): | 120 | for ext in multilibs.split(): |
121 | eext = ext.split(':') | 121 | eext = ext.split(':') |
122 | if len(eext) > 1 and eext[0] == 'multilib': | 122 | if len(eext) > 1 and eext[0] == 'multilib': |
@@ -150,8 +150,8 @@ class RpmIndexer(Indexer): | |||
150 | return (ml_prefix_list, target_os) | 150 | return (ml_prefix_list, target_os) |
151 | 151 | ||
152 | def write_index(self): | 152 | def write_index(self): |
153 | sdk_pkg_archs = (self.d.getVar('SDK_PACKAGE_ARCHS', True) or "").replace('-', '_').split() | 153 | sdk_pkg_archs = (self.d.getVar('SDK_PACKAGE_ARCHS') or "").replace('-', '_').split() |
154 | all_mlb_pkg_archs = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").replace('-', '_').split() | 154 | all_mlb_pkg_archs = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").replace('-', '_').split() |
155 | 155 | ||
156 | mlb_prefix_list = self.get_ml_prefix_and_os_list()[0] | 156 | mlb_prefix_list = self.get_ml_prefix_and_os_list()[0] |
157 | 157 | ||
@@ -165,15 +165,15 @@ class RpmIndexer(Indexer): | |||
165 | archs = archs.union(set(sdk_pkg_archs)) | 165 | archs = archs.union(set(sdk_pkg_archs)) |
166 | 166 | ||
167 | rpm_createrepo = bb.utils.which(os.getenv('PATH'), "createrepo") | 167 | rpm_createrepo = bb.utils.which(os.getenv('PATH'), "createrepo") |
168 | if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1': | 168 | if self.d.getVar('PACKAGE_FEED_SIGN') == '1': |
169 | signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND', True)) | 169 | signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND')) |
170 | else: | 170 | else: |
171 | signer = None | 171 | signer = None |
172 | index_cmds = [] | 172 | index_cmds = [] |
173 | repomd_files = [] | 173 | repomd_files = [] |
174 | rpm_dirs_found = False | 174 | rpm_dirs_found = False |
175 | for arch in archs: | 175 | for arch in archs: |
176 | dbpath = os.path.join(self.d.getVar('WORKDIR', True), 'rpmdb', arch) | 176 | dbpath = os.path.join(self.d.getVar('WORKDIR'), 'rpmdb', arch) |
177 | if os.path.exists(dbpath): | 177 | if os.path.exists(dbpath): |
178 | bb.utils.remove(dbpath, True) | 178 | bb.utils.remove(dbpath, True) |
179 | arch_dir = os.path.join(self.deploy_dir, arch) | 179 | arch_dir = os.path.join(self.deploy_dir, arch) |
@@ -197,11 +197,11 @@ class RpmIndexer(Indexer): | |||
197 | # Sign repomd | 197 | # Sign repomd |
198 | if signer: | 198 | if signer: |
199 | for repomd in repomd_files: | 199 | for repomd in repomd_files: |
200 | feed_sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE', True) | 200 | feed_sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE') |
201 | is_ascii_sig = (feed_sig_type.upper() != "BIN") | 201 | is_ascii_sig = (feed_sig_type.upper() != "BIN") |
202 | signer.detach_sign(repomd, | 202 | signer.detach_sign(repomd, |
203 | self.d.getVar('PACKAGE_FEED_GPG_NAME', True), | 203 | self.d.getVar('PACKAGE_FEED_GPG_NAME'), |
204 | self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE', True), | 204 | self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE'), |
205 | armor=is_ascii_sig) | 205 | armor=is_ascii_sig) |
206 | 206 | ||
207 | 207 | ||
@@ -212,8 +212,8 @@ class OpkgIndexer(Indexer): | |||
212 | "MULTILIB_ARCHS"] | 212 | "MULTILIB_ARCHS"] |
213 | 213 | ||
214 | opkg_index_cmd = bb.utils.which(os.getenv('PATH'), "opkg-make-index") | 214 | opkg_index_cmd = bb.utils.which(os.getenv('PATH'), "opkg-make-index") |
215 | if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1': | 215 | if self.d.getVar('PACKAGE_FEED_SIGN') == '1': |
216 | signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND', True)) | 216 | signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND')) |
217 | else: | 217 | else: |
218 | signer = None | 218 | signer = None |
219 | 219 | ||
@@ -223,7 +223,7 @@ class OpkgIndexer(Indexer): | |||
223 | index_cmds = set() | 223 | index_cmds = set() |
224 | index_sign_files = set() | 224 | index_sign_files = set() |
225 | for arch_var in arch_vars: | 225 | for arch_var in arch_vars: |
226 | archs = self.d.getVar(arch_var, True) | 226 | archs = self.d.getVar(arch_var) |
227 | if archs is None: | 227 | if archs is None: |
228 | continue | 228 | continue |
229 | 229 | ||
@@ -251,12 +251,12 @@ class OpkgIndexer(Indexer): | |||
251 | bb.fatal('%s' % ('\n'.join(result))) | 251 | bb.fatal('%s' % ('\n'.join(result))) |
252 | 252 | ||
253 | if signer: | 253 | if signer: |
254 | feed_sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE', True) | 254 | feed_sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE') |
255 | is_ascii_sig = (feed_sig_type.upper() != "BIN") | 255 | is_ascii_sig = (feed_sig_type.upper() != "BIN") |
256 | for f in index_sign_files: | 256 | for f in index_sign_files: |
257 | signer.detach_sign(f, | 257 | signer.detach_sign(f, |
258 | self.d.getVar('PACKAGE_FEED_GPG_NAME', True), | 258 | self.d.getVar('PACKAGE_FEED_GPG_NAME'), |
259 | self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE', True), | 259 | self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE'), |
260 | armor=is_ascii_sig) | 260 | armor=is_ascii_sig) |
261 | 261 | ||
262 | 262 | ||
@@ -290,16 +290,16 @@ class DpkgIndexer(Indexer): | |||
290 | 290 | ||
291 | os.environ['APT_CONFIG'] = self.apt_conf_file | 291 | os.environ['APT_CONFIG'] = self.apt_conf_file |
292 | 292 | ||
293 | pkg_archs = self.d.getVar('PACKAGE_ARCHS', True) | 293 | pkg_archs = self.d.getVar('PACKAGE_ARCHS') |
294 | if pkg_archs is not None: | 294 | if pkg_archs is not None: |
295 | arch_list = pkg_archs.split() | 295 | arch_list = pkg_archs.split() |
296 | sdk_pkg_archs = self.d.getVar('SDK_PACKAGE_ARCHS', True) | 296 | sdk_pkg_archs = self.d.getVar('SDK_PACKAGE_ARCHS') |
297 | if sdk_pkg_archs is not None: | 297 | if sdk_pkg_archs is not None: |
298 | for a in sdk_pkg_archs.split(): | 298 | for a in sdk_pkg_archs.split(): |
299 | if a not in pkg_archs: | 299 | if a not in pkg_archs: |
300 | arch_list.append(a) | 300 | arch_list.append(a) |
301 | 301 | ||
302 | all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").split() | 302 | all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").split() |
303 | arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in arch_list) | 303 | arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in arch_list) |
304 | 304 | ||
305 | apt_ftparchive = bb.utils.which(os.getenv('PATH'), "apt-ftparchive") | 305 | apt_ftparchive = bb.utils.which(os.getenv('PATH'), "apt-ftparchive") |
@@ -332,7 +332,7 @@ class DpkgIndexer(Indexer): | |||
332 | result = oe.utils.multiprocess_exec(index_cmds, create_index) | 332 | result = oe.utils.multiprocess_exec(index_cmds, create_index) |
333 | if result: | 333 | if result: |
334 | bb.fatal('%s' % ('\n'.join(result))) | 334 | bb.fatal('%s' % ('\n'.join(result))) |
335 | if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1': | 335 | if self.d.getVar('PACKAGE_FEED_SIGN') == '1': |
336 | raise NotImplementedError('Package feed signing not implementd for dpkg') | 336 | raise NotImplementedError('Package feed signing not implementd for dpkg') |
337 | 337 | ||
338 | 338 | ||
@@ -386,7 +386,7 @@ class RpmPkgsList(PkgsList): | |||
386 | 386 | ||
387 | # Workaround for bug 3565. Simply look to see if we | 387 | # Workaround for bug 3565. Simply look to see if we |
388 | # know of a package with that name, if not try again! | 388 | # know of a package with that name, if not try again! |
389 | filename = os.path.join(self.d.getVar('PKGDATA_DIR', True), | 389 | filename = os.path.join(self.d.getVar('PKGDATA_DIR'), |
390 | 'runtime-reverse', | 390 | 'runtime-reverse', |
391 | new_pkg) | 391 | new_pkg) |
392 | if os.path.exists(filename): | 392 | if os.path.exists(filename): |
@@ -464,7 +464,7 @@ class OpkgPkgsList(PkgsList): | |||
464 | 464 | ||
465 | self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg") | 465 | self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg") |
466 | self.opkg_args = "-f %s -o %s " % (config_file, rootfs_dir) | 466 | self.opkg_args = "-f %s -o %s " % (config_file, rootfs_dir) |
467 | self.opkg_args += self.d.getVar("OPKG_ARGS", True) | 467 | self.opkg_args += self.d.getVar("OPKG_ARGS") |
468 | 468 | ||
469 | def list_pkgs(self, format=None): | 469 | def list_pkgs(self, format=None): |
470 | cmd = "%s %s status" % (self.opkg_cmd, self.opkg_args) | 470 | cmd = "%s %s status" % (self.opkg_cmd, self.opkg_args) |
@@ -512,9 +512,9 @@ class PackageManager(object, metaclass=ABCMeta): | |||
512 | self.d = d | 512 | self.d = d |
513 | self.deploy_dir = None | 513 | self.deploy_dir = None |
514 | self.deploy_lock = None | 514 | self.deploy_lock = None |
515 | self.feed_uris = self.d.getVar('PACKAGE_FEED_URIS', True) or "" | 515 | self.feed_uris = self.d.getVar('PACKAGE_FEED_URIS') or "" |
516 | self.feed_base_paths = self.d.getVar('PACKAGE_FEED_BASE_PATHS', True) or "" | 516 | self.feed_base_paths = self.d.getVar('PACKAGE_FEED_BASE_PATHS') or "" |
517 | self.feed_archs = self.d.getVar('PACKAGE_FEED_ARCHS', True) | 517 | self.feed_archs = self.d.getVar('PACKAGE_FEED_ARCHS') |
518 | 518 | ||
519 | """ | 519 | """ |
520 | Update the package manager package database. | 520 | Update the package manager package database. |
@@ -568,7 +568,7 @@ class PackageManager(object, metaclass=ABCMeta): | |||
568 | def install_complementary(self, globs=None): | 568 | def install_complementary(self, globs=None): |
569 | # we need to write the list of installed packages to a file because the | 569 | # we need to write the list of installed packages to a file because the |
570 | # oe-pkgdata-util reads it from a file | 570 | # oe-pkgdata-util reads it from a file |
571 | installed_pkgs_file = os.path.join(self.d.getVar('WORKDIR', True), | 571 | installed_pkgs_file = os.path.join(self.d.getVar('WORKDIR'), |
572 | "installed_pkgs.txt") | 572 | "installed_pkgs.txt") |
573 | with open(installed_pkgs_file, "w+") as installed_pkgs: | 573 | with open(installed_pkgs_file, "w+") as installed_pkgs: |
574 | pkgs = self.list_installed() | 574 | pkgs = self.list_installed() |
@@ -576,10 +576,10 @@ class PackageManager(object, metaclass=ABCMeta): | |||
576 | installed_pkgs.write(output) | 576 | installed_pkgs.write(output) |
577 | 577 | ||
578 | if globs is None: | 578 | if globs is None: |
579 | globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY', True) | 579 | globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY') |
580 | split_linguas = set() | 580 | split_linguas = set() |
581 | 581 | ||
582 | for translation in self.d.getVar('IMAGE_LINGUAS', True).split(): | 582 | for translation in self.d.getVar('IMAGE_LINGUAS').split(): |
583 | split_linguas.add(translation) | 583 | split_linguas.add(translation) |
584 | split_linguas.add(translation.split('-')[0]) | 584 | split_linguas.add(translation.split('-')[0]) |
585 | 585 | ||
@@ -592,9 +592,9 @@ class PackageManager(object, metaclass=ABCMeta): | |||
592 | return | 592 | return |
593 | 593 | ||
594 | cmd = [bb.utils.which(os.getenv('PATH'), "oe-pkgdata-util"), | 594 | cmd = [bb.utils.which(os.getenv('PATH'), "oe-pkgdata-util"), |
595 | "-p", self.d.getVar('PKGDATA_DIR', True), "glob", installed_pkgs_file, | 595 | "-p", self.d.getVar('PKGDATA_DIR'), "glob", installed_pkgs_file, |
596 | globs] | 596 | globs] |
597 | exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY', True) | 597 | exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY') |
598 | if exclude: | 598 | if exclude: |
599 | cmd.extend(['--exclude=' + '|'.join(exclude.split())]) | 599 | cmd.extend(['--exclude=' + '|'.join(exclude.split())]) |
600 | try: | 600 | try: |
@@ -659,7 +659,7 @@ class RpmPM(PackageManager): | |||
659 | self.task_name = task_name | 659 | self.task_name = task_name |
660 | self.providename = providename | 660 | self.providename = providename |
661 | self.fullpkglist = list() | 661 | self.fullpkglist = list() |
662 | self.deploy_dir = self.d.getVar('DEPLOY_DIR_RPM', True) | 662 | self.deploy_dir = self.d.getVar('DEPLOY_DIR_RPM') |
663 | self.etcrpm_dir = os.path.join(self.target_rootfs, "etc/rpm") | 663 | self.etcrpm_dir = os.path.join(self.target_rootfs, "etc/rpm") |
664 | self.install_dir_name = "oe_install" | 664 | self.install_dir_name = "oe_install" |
665 | self.install_dir_path = os.path.join(self.target_rootfs, self.install_dir_name) | 665 | self.install_dir_path = os.path.join(self.target_rootfs, self.install_dir_name) |
@@ -669,7 +669,7 @@ class RpmPM(PackageManager): | |||
669 | # 1 = --log-level=info (includes information about executing scriptlets and their output) | 669 | # 1 = --log-level=info (includes information about executing scriptlets and their output) |
670 | # 2 = --log-level=debug | 670 | # 2 = --log-level=debug |
671 | # 3 = --log-level=debug plus dumps of scriplet content and command invocation | 671 | # 3 = --log-level=debug plus dumps of scriplet content and command invocation |
672 | self.debug_level = int(d.getVar('ROOTFS_RPM_DEBUG', True) or "0") | 672 | self.debug_level = int(d.getVar('ROOTFS_RPM_DEBUG') or "0") |
673 | self.smart_opt = ["--log-level=%s" % | 673 | self.smart_opt = ["--log-level=%s" % |
674 | ("warning" if self.debug_level == 0 else | 674 | ("warning" if self.debug_level == 0 else |
675 | "info" if self.debug_level == 1 else | 675 | "info" if self.debug_level == 1 else |
@@ -684,7 +684,7 @@ class RpmPM(PackageManager): | |||
684 | if not os.path.exists(self.d.expand('${T}/saved')): | 684 | if not os.path.exists(self.d.expand('${T}/saved')): |
685 | bb.utils.mkdirhier(self.d.expand('${T}/saved')) | 685 | bb.utils.mkdirhier(self.d.expand('${T}/saved')) |
686 | 686 | ||
687 | packageindex_dir = os.path.join(self.d.getVar('WORKDIR', True), 'rpms') | 687 | packageindex_dir = os.path.join(self.d.getVar('WORKDIR'), 'rpms') |
688 | self.indexer = RpmIndexer(self.d, packageindex_dir) | 688 | self.indexer = RpmIndexer(self.d, packageindex_dir) |
689 | self.pkgs_list = RpmPkgsList(self.d, self.target_rootfs, arch_var, os_var) | 689 | self.pkgs_list = RpmPkgsList(self.d, self.target_rootfs, arch_var, os_var) |
690 | 690 | ||
@@ -702,7 +702,7 @@ class RpmPM(PackageManager): | |||
702 | # List must be prefered to least preferred order | 702 | # List must be prefered to least preferred order |
703 | default_platform_extra = list() | 703 | default_platform_extra = list() |
704 | platform_extra = list() | 704 | platform_extra = list() |
705 | bbextendvariant = self.d.getVar('BBEXTENDVARIANT', True) or "" | 705 | bbextendvariant = self.d.getVar('BBEXTENDVARIANT') or "" |
706 | for mlib in self.ml_os_list: | 706 | for mlib in self.ml_os_list: |
707 | for arch in self.ml_prefix_list[mlib]: | 707 | for arch in self.ml_prefix_list[mlib]: |
708 | plt = arch.replace('-', '_') + '-.*-' + self.ml_os_list[mlib] | 708 | plt = arch.replace('-', '_') + '-.*-' + self.ml_os_list[mlib] |
@@ -750,7 +750,7 @@ class RpmPM(PackageManager): | |||
750 | Create configs for rpm and smart, and multilib is supported | 750 | Create configs for rpm and smart, and multilib is supported |
751 | ''' | 751 | ''' |
752 | def create_configs(self): | 752 | def create_configs(self): |
753 | target_arch = self.d.getVar('TARGET_ARCH', True) | 753 | target_arch = self.d.getVar('TARGET_ARCH') |
754 | platform = '%s%s-%s' % (target_arch.replace('-', '_'), | 754 | platform = '%s%s-%s' % (target_arch.replace('-', '_'), |
755 | self.target_vendor, | 755 | self.target_vendor, |
756 | self.ml_os_list['default']) | 756 | self.ml_os_list['default']) |
@@ -758,7 +758,7 @@ class RpmPM(PackageManager): | |||
758 | # List must be prefered to least preferred order | 758 | # List must be prefered to least preferred order |
759 | default_platform_extra = list() | 759 | default_platform_extra = list() |
760 | platform_extra = list() | 760 | platform_extra = list() |
761 | bbextendvariant = self.d.getVar('BBEXTENDVARIANT', True) or "" | 761 | bbextendvariant = self.d.getVar('BBEXTENDVARIANT') or "" |
762 | for mlib in self.ml_os_list: | 762 | for mlib in self.ml_os_list: |
763 | for arch in self.ml_prefix_list[mlib]: | 763 | for arch in self.ml_prefix_list[mlib]: |
764 | plt = arch.replace('-', '_') + '-.*-' + self.ml_os_list[mlib] | 764 | plt = arch.replace('-', '_') + '-.*-' + self.ml_os_list[mlib] |
@@ -841,7 +841,7 @@ class RpmPM(PackageManager): | |||
841 | if not new_pkg: | 841 | if not new_pkg: |
842 | # Failed to translate, package not found! | 842 | # Failed to translate, package not found! |
843 | err_msg = '%s not found in the %s feeds (%s) in %s.' % \ | 843 | err_msg = '%s not found in the %s feeds (%s) in %s.' % \ |
844 | (pkg, mlib, " ".join(feed_archs), self.d.getVar('DEPLOY_DIR_RPM', True)) | 844 | (pkg, mlib, " ".join(feed_archs), self.d.getVar('DEPLOY_DIR_RPM')) |
845 | if not attempt_only: | 845 | if not attempt_only: |
846 | bb.error(err_msg) | 846 | bb.error(err_msg) |
847 | bb.fatal("This is often caused by an empty package declared " \ | 847 | bb.fatal("This is often caused by an empty package declared " \ |
@@ -860,7 +860,7 @@ class RpmPM(PackageManager): | |||
860 | new_pkg = self._search_pkg_name_in_feeds(pkg, default_archs) | 860 | new_pkg = self._search_pkg_name_in_feeds(pkg, default_archs) |
861 | if not new_pkg: | 861 | if not new_pkg: |
862 | err_msg = '%s not found in the feeds (%s) in %s.' % \ | 862 | err_msg = '%s not found in the feeds (%s) in %s.' % \ |
863 | (pkg, " ".join(default_archs), self.d.getVar('DEPLOY_DIR_RPM', True)) | 863 | (pkg, " ".join(default_archs), self.d.getVar('DEPLOY_DIR_RPM')) |
864 | if not attempt_only: | 864 | if not attempt_only: |
865 | bb.error(err_msg) | 865 | bb.error(err_msg) |
866 | bb.fatal("This is often caused by an empty package declared " \ | 866 | bb.fatal("This is often caused by an empty package declared " \ |
@@ -887,7 +887,7 @@ class RpmPM(PackageManager): | |||
887 | 887 | ||
888 | channel_priority = 5 | 888 | channel_priority = 5 |
889 | platform_dir = os.path.join(self.etcrpm_dir, "platform") | 889 | platform_dir = os.path.join(self.etcrpm_dir, "platform") |
890 | sdkos = self.d.getVar("SDK_OS", True) | 890 | sdkos = self.d.getVar("SDK_OS") |
891 | with open(platform_dir, "w+") as platform_fd: | 891 | with open(platform_dir, "w+") as platform_fd: |
892 | platform_fd.write(platform + '\n') | 892 | platform_fd.write(platform + '\n') |
893 | for pt in platform_extra: | 893 | for pt in platform_extra: |
@@ -957,8 +957,8 @@ class RpmPM(PackageManager): | |||
957 | bb.fatal("Create rpm database failed. Command '%s' " | 957 | bb.fatal("Create rpm database failed. Command '%s' " |
958 | "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output.decode("utf-8"))) | 958 | "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output.decode("utf-8"))) |
959 | # Import GPG key to RPM database of the target system | 959 | # Import GPG key to RPM database of the target system |
960 | if self.d.getVar('RPM_SIGN_PACKAGES', True) == '1': | 960 | if self.d.getVar('RPM_SIGN_PACKAGES') == '1': |
961 | pubkey_path = self.d.getVar('RPM_GPG_PUBKEY', True) | 961 | pubkey_path = self.d.getVar('RPM_GPG_PUBKEY') |
962 | cmd = [self.rpm_cmd, '--root', self.target_rootfs, '--dbpath', '/var/lib/rpm', '--import', pubkey_path] | 962 | cmd = [self.rpm_cmd, '--root', self.target_rootfs, '--dbpath', '/var/lib/rpm', '--import', pubkey_path] |
963 | try: | 963 | try: |
964 | subprocess.check_output(cmd, stderr=subprocess.STDOUT) | 964 | subprocess.check_output(cmd, stderr=subprocess.STDOUT) |
@@ -974,10 +974,10 @@ class RpmPM(PackageManager): | |||
974 | self._invoke_smart(['config', '--set', 'rpm-root=%s' % self.target_rootfs]) | 974 | self._invoke_smart(['config', '--set', 'rpm-root=%s' % self.target_rootfs]) |
975 | self._invoke_smart(['config', '--set', 'rpm-dbpath=/var/lib/rpm']) | 975 | self._invoke_smart(['config', '--set', 'rpm-dbpath=/var/lib/rpm']) |
976 | self._invoke_smart(['config', '--set', 'rpm-extra-macros._var=%s' % | 976 | self._invoke_smart(['config', '--set', 'rpm-extra-macros._var=%s' % |
977 | self.d.getVar('localstatedir', True)]) | 977 | self.d.getVar('localstatedir')]) |
978 | cmd = ["config", "--set", "rpm-extra-macros._tmppath=/%s/tmp" % self.install_dir_name] | 978 | cmd = ["config", "--set", "rpm-extra-macros._tmppath=/%s/tmp" % self.install_dir_name] |
979 | 979 | ||
980 | prefer_color = self.d.getVar('RPM_PREFER_ELF_ARCH', True) | 980 | prefer_color = self.d.getVar('RPM_PREFER_ELF_ARCH') |
981 | if prefer_color: | 981 | if prefer_color: |
982 | if prefer_color not in ['0', '1', '2', '4']: | 982 | if prefer_color not in ['0', '1', '2', '4']: |
983 | bb.fatal("Invalid RPM_PREFER_ELF_ARCH: %s, it should be one of:\n" | 983 | bb.fatal("Invalid RPM_PREFER_ELF_ARCH: %s, it should be one of:\n" |
@@ -985,7 +985,7 @@ class RpmPM(PackageManager): | |||
985 | "\t2: ELF64 wins\n" | 985 | "\t2: ELF64 wins\n" |
986 | "\t4: ELF64 N32 wins (mips64 or mips64el only)" % | 986 | "\t4: ELF64 N32 wins (mips64 or mips64el only)" % |
987 | prefer_color) | 987 | prefer_color) |
988 | if prefer_color == "4" and self.d.getVar("TUNE_ARCH", True) not in \ | 988 | if prefer_color == "4" and self.d.getVar("TUNE_ARCH") not in \ |
989 | ['mips64', 'mips64el']: | 989 | ['mips64', 'mips64el']: |
990 | bb.fatal("RPM_PREFER_ELF_ARCH = \"4\" is for mips64 or mips64el " | 990 | bb.fatal("RPM_PREFER_ELF_ARCH = \"4\" is for mips64 or mips64el " |
991 | "only.") | 991 | "only.") |
@@ -998,17 +998,17 @@ class RpmPM(PackageManager): | |||
998 | # Write common configuration for host and target usage | 998 | # Write common configuration for host and target usage |
999 | self._invoke_smart(['config', '--set', 'rpm-nolinktos=1']) | 999 | self._invoke_smart(['config', '--set', 'rpm-nolinktos=1']) |
1000 | self._invoke_smart(['config', '--set', 'rpm-noparentdirs=1']) | 1000 | self._invoke_smart(['config', '--set', 'rpm-noparentdirs=1']) |
1001 | check_signature = self.d.getVar('RPM_CHECK_SIGNATURES', True) | 1001 | check_signature = self.d.getVar('RPM_CHECK_SIGNATURES') |
1002 | if check_signature and check_signature.strip() == "0": | 1002 | if check_signature and check_signature.strip() == "0": |
1003 | self._invoke_smart(['config', '--set rpm-check-signatures=false']) | 1003 | self._invoke_smart(['config', '--set rpm-check-signatures=false']) |
1004 | for i in self.d.getVar('BAD_RECOMMENDATIONS', True).split(): | 1004 | for i in self.d.getVar('BAD_RECOMMENDATIONS').split(): |
1005 | self._invoke_smart(['flag', '--set', 'ignore-recommends', i]) | 1005 | self._invoke_smart(['flag', '--set', 'ignore-recommends', i]) |
1006 | 1006 | ||
1007 | # Do the following configurations here, to avoid them being | 1007 | # Do the following configurations here, to avoid them being |
1008 | # saved for field upgrade | 1008 | # saved for field upgrade |
1009 | if self.d.getVar('NO_RECOMMENDATIONS', True).strip() == "1": | 1009 | if self.d.getVar('NO_RECOMMENDATIONS').strip() == "1": |
1010 | self._invoke_smart(['config', '--set', 'ignore-all-recommends=1']) | 1010 | self._invoke_smart(['config', '--set', 'ignore-all-recommends=1']) |
1011 | pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE', True) or "" | 1011 | pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE') or "" |
1012 | for i in pkg_exclude.split(): | 1012 | for i in pkg_exclude.split(): |
1013 | self._invoke_smart(['flag', '--set', 'exclude-packages', i]) | 1013 | self._invoke_smart(['flag', '--set', 'exclude-packages', i]) |
1014 | 1014 | ||
@@ -1019,13 +1019,13 @@ class RpmPM(PackageManager): | |||
1019 | ch_already_added = [] | 1019 | ch_already_added = [] |
1020 | for canonical_arch in platform_extra: | 1020 | for canonical_arch in platform_extra: |
1021 | arch = canonical_arch.split('-')[0] | 1021 | arch = canonical_arch.split('-')[0] |
1022 | arch_channel = os.path.join(self.d.getVar('WORKDIR', True), 'rpms', arch) | 1022 | arch_channel = os.path.join(self.d.getVar('WORKDIR'), 'rpms', arch) |
1023 | oe.path.remove(arch_channel) | 1023 | oe.path.remove(arch_channel) |
1024 | deploy_arch_dir = os.path.join(self.deploy_dir, arch) | 1024 | deploy_arch_dir = os.path.join(self.deploy_dir, arch) |
1025 | if not os.path.exists(deploy_arch_dir): | 1025 | if not os.path.exists(deploy_arch_dir): |
1026 | continue | 1026 | continue |
1027 | 1027 | ||
1028 | lockfilename = self.d.getVar('DEPLOY_DIR_RPM', True) + "/rpm.lock" | 1028 | lockfilename = self.d.getVar('DEPLOY_DIR_RPM') + "/rpm.lock" |
1029 | lf = bb.utils.lockfile(lockfilename, False) | 1029 | lf = bb.utils.lockfile(lockfilename, False) |
1030 | oe.path.copyhardlinktree(deploy_arch_dir, arch_channel) | 1030 | oe.path.copyhardlinktree(deploy_arch_dir, arch_channel) |
1031 | bb.utils.unlockfile(lf) | 1031 | bb.utils.unlockfile(lf) |
@@ -1096,7 +1096,7 @@ class RpmPM(PackageManager): | |||
1096 | "fi\n" | 1096 | "fi\n" |
1097 | 1097 | ||
1098 | intercept_dir = self.d.expand('${WORKDIR}/intercept_scripts') | 1098 | intercept_dir = self.d.expand('${WORKDIR}/intercept_scripts') |
1099 | native_root = self.d.getVar('STAGING_DIR_NATIVE', True) | 1099 | native_root = self.d.getVar('STAGING_DIR_NATIVE') |
1100 | scriptlet_content = SCRIPTLET_FORMAT % (os.environ['PATH'], | 1100 | scriptlet_content = SCRIPTLET_FORMAT % (os.environ['PATH'], |
1101 | self.target_rootfs, | 1101 | self.target_rootfs, |
1102 | intercept_dir, | 1102 | intercept_dir, |
@@ -1170,7 +1170,7 @@ class RpmPM(PackageManager): | |||
1170 | ml_pkgs = [] | 1170 | ml_pkgs = [] |
1171 | non_ml_pkgs = pkgs[:] | 1171 | non_ml_pkgs = pkgs[:] |
1172 | for pkg in pkgs: | 1172 | for pkg in pkgs: |
1173 | for mlib in (self.d.getVar("MULTILIB_VARIANTS", True) or "").split(): | 1173 | for mlib in (self.d.getVar("MULTILIB_VARIANTS") or "").split(): |
1174 | if pkg.startswith(mlib + '-'): | 1174 | if pkg.startswith(mlib + '-'): |
1175 | ml_pkgs.append(pkg) | 1175 | ml_pkgs.append(pkg) |
1176 | non_ml_pkgs.remove(pkg) | 1176 | non_ml_pkgs.remove(pkg) |
@@ -1184,7 +1184,7 @@ class RpmPM(PackageManager): | |||
1184 | # correctly. | 1184 | # correctly. |
1185 | pkgs_new = [] | 1185 | pkgs_new = [] |
1186 | for pkg in non_ml_pkgs: | 1186 | for pkg in non_ml_pkgs: |
1187 | for mlib in (self.d.getVar("MULTILIB_VARIANTS", True) or "").split(): | 1187 | for mlib in (self.d.getVar("MULTILIB_VARIANTS") or "").split(): |
1188 | mlib_pkg = mlib + "-" + pkg | 1188 | mlib_pkg = mlib + "-" + pkg |
1189 | if mlib_pkg in ml_pkgs: | 1189 | if mlib_pkg in ml_pkgs: |
1190 | pkgs_new.append(pkg) | 1190 | pkgs_new.append(pkg) |
@@ -1401,7 +1401,7 @@ class RpmPM(PackageManager): | |||
1401 | 1401 | ||
1402 | self._invoke_smart(['config', '--set', 'rpm-nolinktos=1']) | 1402 | self._invoke_smart(['config', '--set', 'rpm-nolinktos=1']) |
1403 | self._invoke_smart(['config', '--set', 'rpm-noparentdirs=1']) | 1403 | self._invoke_smart(['config', '--set', 'rpm-noparentdirs=1']) |
1404 | for i in self.d.getVar('BAD_RECOMMENDATIONS', True).split(): | 1404 | for i in self.d.getVar('BAD_RECOMMENDATIONS').split(): |
1405 | self._invoke_smart(['flag', '--set', 'ignore-recommends', i]) | 1405 | self._invoke_smart(['flag', '--set', 'ignore-recommends', i]) |
1406 | self._invoke_smart(['channel', '--add', 'rpmsys', 'type=rpm-sys', '-y']) | 1406 | self._invoke_smart(['channel', '--add', 'rpmsys', 'type=rpm-sys', '-y']) |
1407 | 1407 | ||
@@ -1575,13 +1575,13 @@ class OpkgPM(OpkgDpkgPM): | |||
1575 | self.pkg_archs = archs | 1575 | self.pkg_archs = archs |
1576 | self.task_name = task_name | 1576 | self.task_name = task_name |
1577 | 1577 | ||
1578 | self.deploy_dir = self.d.getVar("DEPLOY_DIR_IPK", True) | 1578 | self.deploy_dir = self.d.getVar("DEPLOY_DIR_IPK") |
1579 | self.deploy_lock_file = os.path.join(self.deploy_dir, "deploy.lock") | 1579 | self.deploy_lock_file = os.path.join(self.deploy_dir, "deploy.lock") |
1580 | self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg") | 1580 | self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg") |
1581 | self.opkg_args = "--volatile-cache -f %s -t %s -o %s " % (self.config_file, self.d.expand('${T}/ipktemp/') ,target_rootfs) | 1581 | self.opkg_args = "--volatile-cache -f %s -t %s -o %s " % (self.config_file, self.d.expand('${T}/ipktemp/') ,target_rootfs) |
1582 | self.opkg_args += self.d.getVar("OPKG_ARGS", True) | 1582 | self.opkg_args += self.d.getVar("OPKG_ARGS") |
1583 | 1583 | ||
1584 | opkg_lib_dir = self.d.getVar('OPKGLIBDIR', True) | 1584 | opkg_lib_dir = self.d.getVar('OPKGLIBDIR') |
1585 | if opkg_lib_dir[0] == "/": | 1585 | if opkg_lib_dir[0] == "/": |
1586 | opkg_lib_dir = opkg_lib_dir[1:] | 1586 | opkg_lib_dir = opkg_lib_dir[1:] |
1587 | 1587 | ||
@@ -1593,7 +1593,7 @@ class OpkgPM(OpkgDpkgPM): | |||
1593 | if not os.path.exists(self.d.expand('${T}/saved')): | 1593 | if not os.path.exists(self.d.expand('${T}/saved')): |
1594 | bb.utils.mkdirhier(self.d.expand('${T}/saved')) | 1594 | bb.utils.mkdirhier(self.d.expand('${T}/saved')) |
1595 | 1595 | ||
1596 | self.from_feeds = (self.d.getVar('BUILD_IMAGES_FROM_FEEDS', True) or "") == "1" | 1596 | self.from_feeds = (self.d.getVar('BUILD_IMAGES_FROM_FEEDS') or "") == "1" |
1597 | if self.from_feeds: | 1597 | if self.from_feeds: |
1598 | self._create_custom_config() | 1598 | self._create_custom_config() |
1599 | else: | 1599 | else: |
@@ -1638,7 +1638,7 @@ class OpkgPM(OpkgDpkgPM): | |||
1638 | config_file.write("arch %s %d\n" % (arch, priority)) | 1638 | config_file.write("arch %s %d\n" % (arch, priority)) |
1639 | priority += 5 | 1639 | priority += 5 |
1640 | 1640 | ||
1641 | for line in (self.d.getVar('IPK_FEED_URIS', True) or "").split(): | 1641 | for line in (self.d.getVar('IPK_FEED_URIS') or "").split(): |
1642 | feed_match = re.match("^[ \t]*(.*)##([^ \t]*)[ \t]*$", line) | 1642 | feed_match = re.match("^[ \t]*(.*)##([^ \t]*)[ \t]*$", line) |
1643 | 1643 | ||
1644 | if feed_match is not None: | 1644 | if feed_match is not None: |
@@ -1655,17 +1655,17 @@ class OpkgPM(OpkgDpkgPM): | |||
1655 | specified as compatible for the current machine. | 1655 | specified as compatible for the current machine. |
1656 | NOTE: Development-helper feature, NOT a full-fledged feed. | 1656 | NOTE: Development-helper feature, NOT a full-fledged feed. |
1657 | """ | 1657 | """ |
1658 | if (self.d.getVar('FEED_DEPLOYDIR_BASE_URI', True) or "") != "": | 1658 | if (self.d.getVar('FEED_DEPLOYDIR_BASE_URI') or "") != "": |
1659 | for arch in self.pkg_archs.split(): | 1659 | for arch in self.pkg_archs.split(): |
1660 | cfg_file_name = os.path.join(self.target_rootfs, | 1660 | cfg_file_name = os.path.join(self.target_rootfs, |
1661 | self.d.getVar("sysconfdir", True), | 1661 | self.d.getVar("sysconfdir"), |
1662 | "opkg", | 1662 | "opkg", |
1663 | "local-%s-feed.conf" % arch) | 1663 | "local-%s-feed.conf" % arch) |
1664 | 1664 | ||
1665 | with open(cfg_file_name, "w+") as cfg_file: | 1665 | with open(cfg_file_name, "w+") as cfg_file: |
1666 | cfg_file.write("src/gz local-%s %s/%s" % | 1666 | cfg_file.write("src/gz local-%s %s/%s" % |
1667 | (arch, | 1667 | (arch, |
1668 | self.d.getVar('FEED_DEPLOYDIR_BASE_URI', True), | 1668 | self.d.getVar('FEED_DEPLOYDIR_BASE_URI'), |
1669 | arch)) | 1669 | arch)) |
1670 | 1670 | ||
1671 | if self.opkg_dir != '/var/lib/opkg': | 1671 | if self.opkg_dir != '/var/lib/opkg': |
@@ -1674,8 +1674,8 @@ class OpkgPM(OpkgDpkgPM): | |||
1674 | # the default value of "/var/lib" as defined in opkg: | 1674 | # the default value of "/var/lib" as defined in opkg: |
1675 | # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR "/var/lib/opkg/info" | 1675 | # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR "/var/lib/opkg/info" |
1676 | # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE "/var/lib/opkg/status" | 1676 | # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE "/var/lib/opkg/status" |
1677 | cfg_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'info')) | 1677 | cfg_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'info')) |
1678 | cfg_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'status')) | 1678 | cfg_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'status')) |
1679 | 1679 | ||
1680 | 1680 | ||
1681 | def _create_config(self): | 1681 | def _create_config(self): |
@@ -1699,8 +1699,8 @@ class OpkgPM(OpkgDpkgPM): | |||
1699 | # the default value of "/var/lib" as defined in opkg: | 1699 | # the default value of "/var/lib" as defined in opkg: |
1700 | # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR "/var/lib/opkg/info" | 1700 | # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR "/var/lib/opkg/info" |
1701 | # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE "/var/lib/opkg/status" | 1701 | # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE "/var/lib/opkg/status" |
1702 | config_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'info')) | 1702 | config_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'info')) |
1703 | config_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'status')) | 1703 | config_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'status')) |
1704 | 1704 | ||
1705 | def insert_feeds_uris(self): | 1705 | def insert_feeds_uris(self): |
1706 | if self.feed_uris == "": | 1706 | if self.feed_uris == "": |
@@ -1755,9 +1755,9 @@ class OpkgPM(OpkgDpkgPM): | |||
1755 | os.environ['OFFLINE_ROOT'] = self.target_rootfs | 1755 | os.environ['OFFLINE_ROOT'] = self.target_rootfs |
1756 | os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs | 1756 | os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs |
1757 | os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs | 1757 | os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs |
1758 | os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR', True), | 1758 | os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR'), |
1759 | "intercept_scripts") | 1759 | "intercept_scripts") |
1760 | os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE', True) | 1760 | os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE') |
1761 | 1761 | ||
1762 | try: | 1762 | try: |
1763 | bb.note("Installing the following packages: %s" % ' '.join(pkgs)) | 1763 | bb.note("Installing the following packages: %s" % ' '.join(pkgs)) |
@@ -1808,7 +1808,7 @@ class OpkgPM(OpkgDpkgPM): | |||
1808 | return OpkgPkgsList(self.d, self.target_rootfs, self.config_file).list_pkgs() | 1808 | return OpkgPkgsList(self.d, self.target_rootfs, self.config_file).list_pkgs() |
1809 | 1809 | ||
1810 | def handle_bad_recommendations(self): | 1810 | def handle_bad_recommendations(self): |
1811 | bad_recommendations = self.d.getVar("BAD_RECOMMENDATIONS", True) or "" | 1811 | bad_recommendations = self.d.getVar("BAD_RECOMMENDATIONS") or "" |
1812 | if bad_recommendations.strip() == "": | 1812 | if bad_recommendations.strip() == "": |
1813 | return | 1813 | return |
1814 | 1814 | ||
@@ -1859,7 +1859,7 @@ class OpkgPM(OpkgDpkgPM): | |||
1859 | bb.utils.mkdirhier(temp_opkg_dir) | 1859 | bb.utils.mkdirhier(temp_opkg_dir) |
1860 | 1860 | ||
1861 | opkg_args = "-f %s -o %s " % (self.config_file, temp_rootfs) | 1861 | opkg_args = "-f %s -o %s " % (self.config_file, temp_rootfs) |
1862 | opkg_args += self.d.getVar("OPKG_ARGS", True) | 1862 | opkg_args += self.d.getVar("OPKG_ARGS") |
1863 | 1863 | ||
1864 | cmd = "%s %s update" % (self.opkg_cmd, opkg_args) | 1864 | cmd = "%s %s update" % (self.opkg_cmd, opkg_args) |
1865 | try: | 1865 | try: |
@@ -1935,7 +1935,7 @@ class DpkgPM(OpkgDpkgPM): | |||
1935 | def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None): | 1935 | def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None): |
1936 | super(DpkgPM, self).__init__(d) | 1936 | super(DpkgPM, self).__init__(d) |
1937 | self.target_rootfs = target_rootfs | 1937 | self.target_rootfs = target_rootfs |
1938 | self.deploy_dir = self.d.getVar('DEPLOY_DIR_DEB', True) | 1938 | self.deploy_dir = self.d.getVar('DEPLOY_DIR_DEB') |
1939 | if apt_conf_dir is None: | 1939 | if apt_conf_dir is None: |
1940 | self.apt_conf_dir = self.d.expand("${APTCONF_TARGET}/apt") | 1940 | self.apt_conf_dir = self.d.expand("${APTCONF_TARGET}/apt") |
1941 | else: | 1941 | else: |
@@ -1944,10 +1944,10 @@ class DpkgPM(OpkgDpkgPM): | |||
1944 | self.apt_get_cmd = bb.utils.which(os.getenv('PATH'), "apt-get") | 1944 | self.apt_get_cmd = bb.utils.which(os.getenv('PATH'), "apt-get") |
1945 | self.apt_cache_cmd = bb.utils.which(os.getenv('PATH'), "apt-cache") | 1945 | self.apt_cache_cmd = bb.utils.which(os.getenv('PATH'), "apt-cache") |
1946 | 1946 | ||
1947 | self.apt_args = d.getVar("APT_ARGS", True) | 1947 | self.apt_args = d.getVar("APT_ARGS") |
1948 | 1948 | ||
1949 | self.all_arch_list = archs.split() | 1949 | self.all_arch_list = archs.split() |
1950 | all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").split() | 1950 | all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").split() |
1951 | self.all_arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in self.all_arch_list) | 1951 | self.all_arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in self.all_arch_list) |
1952 | 1952 | ||
1953 | self._create_configs(archs, base_archs) | 1953 | self._create_configs(archs, base_archs) |
@@ -2008,9 +2008,9 @@ class DpkgPM(OpkgDpkgPM): | |||
2008 | os.environ['OFFLINE_ROOT'] = self.target_rootfs | 2008 | os.environ['OFFLINE_ROOT'] = self.target_rootfs |
2009 | os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs | 2009 | os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs |
2010 | os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs | 2010 | os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs |
2011 | os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR', True), | 2011 | os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR'), |
2012 | "intercept_scripts") | 2012 | "intercept_scripts") |
2013 | os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE', True) | 2013 | os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE') |
2014 | 2014 | ||
2015 | failed_pkgs = [] | 2015 | failed_pkgs = [] |
2016 | for pkg_name in installed_pkgs: | 2016 | for pkg_name in installed_pkgs: |
@@ -2161,7 +2161,7 @@ class DpkgPM(OpkgDpkgPM): | |||
2161 | 2161 | ||
2162 | priority += 5 | 2162 | priority += 5 |
2163 | 2163 | ||
2164 | pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE', True) or "" | 2164 | pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE') or "" |
2165 | for pkg in pkg_exclude.split(): | 2165 | for pkg in pkg_exclude.split(): |
2166 | prefs_file.write( | 2166 | prefs_file.write( |
2167 | "Package: %s\n" | 2167 | "Package: %s\n" |
@@ -2176,14 +2176,14 @@ class DpkgPM(OpkgDpkgPM): | |||
2176 | os.path.join(self.deploy_dir, arch)) | 2176 | os.path.join(self.deploy_dir, arch)) |
2177 | 2177 | ||
2178 | base_arch_list = base_archs.split() | 2178 | base_arch_list = base_archs.split() |
2179 | multilib_variants = self.d.getVar("MULTILIB_VARIANTS", True); | 2179 | multilib_variants = self.d.getVar("MULTILIB_VARIANTS"); |
2180 | for variant in multilib_variants.split(): | 2180 | for variant in multilib_variants.split(): |
2181 | localdata = bb.data.createCopy(self.d) | 2181 | localdata = bb.data.createCopy(self.d) |
2182 | variant_tune = localdata.getVar("DEFAULTTUNE_virtclass-multilib-" + variant, False) | 2182 | variant_tune = localdata.getVar("DEFAULTTUNE_virtclass-multilib-" + variant, False) |
2183 | orig_arch = localdata.getVar("DPKG_ARCH", True) | 2183 | orig_arch = localdata.getVar("DPKG_ARCH") |
2184 | localdata.setVar("DEFAULTTUNE", variant_tune) | 2184 | localdata.setVar("DEFAULTTUNE", variant_tune) |
2185 | bb.data.update_data(localdata) | 2185 | bb.data.update_data(localdata) |
2186 | variant_arch = localdata.getVar("DPKG_ARCH", True) | 2186 | variant_arch = localdata.getVar("DPKG_ARCH") |
2187 | if variant_arch not in base_arch_list: | 2187 | if variant_arch not in base_arch_list: |
2188 | base_arch_list.append(variant_arch) | 2188 | base_arch_list.append(variant_arch) |
2189 | 2189 | ||
@@ -2214,7 +2214,7 @@ class DpkgPM(OpkgDpkgPM): | |||
2214 | 2214 | ||
2215 | def remove_packaging_data(self): | 2215 | def remove_packaging_data(self): |
2216 | bb.utils.remove(os.path.join(self.target_rootfs, | 2216 | bb.utils.remove(os.path.join(self.target_rootfs, |
2217 | self.d.getVar('opkglibdir', True)), True) | 2217 | self.d.getVar('opkglibdir')), True) |
2218 | bb.utils.remove(self.target_rootfs + "/var/lib/dpkg/", True) | 2218 | bb.utils.remove(self.target_rootfs + "/var/lib/dpkg/", True) |
2219 | 2219 | ||
2220 | def fix_broken_dependencies(self): | 2220 | def fix_broken_dependencies(self): |
@@ -2262,12 +2262,12 @@ class DpkgPM(OpkgDpkgPM): | |||
2262 | return tmp_dir | 2262 | return tmp_dir |
2263 | 2263 | ||
2264 | def generate_index_files(d): | 2264 | def generate_index_files(d): |
2265 | classes = d.getVar('PACKAGE_CLASSES', True).replace("package_", "").split() | 2265 | classes = d.getVar('PACKAGE_CLASSES').replace("package_", "").split() |
2266 | 2266 | ||
2267 | indexer_map = { | 2267 | indexer_map = { |
2268 | "rpm": (RpmIndexer, d.getVar('DEPLOY_DIR_RPM', True)), | 2268 | "rpm": (RpmIndexer, d.getVar('DEPLOY_DIR_RPM')), |
2269 | "ipk": (OpkgIndexer, d.getVar('DEPLOY_DIR_IPK', True)), | 2269 | "ipk": (OpkgIndexer, d.getVar('DEPLOY_DIR_IPK')), |
2270 | "deb": (DpkgIndexer, d.getVar('DEPLOY_DIR_DEB', True)) | 2270 | "deb": (DpkgIndexer, d.getVar('DEPLOY_DIR_DEB')) |
2271 | } | 2271 | } |
2272 | 2272 | ||
2273 | result = None | 2273 | result = None |
diff --git a/meta/lib/oe/packagedata.py b/meta/lib/oe/packagedata.py index 21d4de914f..32e5c82a94 100644 --- a/meta/lib/oe/packagedata.py +++ b/meta/lib/oe/packagedata.py | |||
@@ -57,7 +57,7 @@ def read_subpkgdata_dict(pkg, d): | |||
57 | def _pkgmap(d): | 57 | def _pkgmap(d): |
58 | """Return a dictionary mapping package to recipe name.""" | 58 | """Return a dictionary mapping package to recipe name.""" |
59 | 59 | ||
60 | pkgdatadir = d.getVar("PKGDATA_DIR", True) | 60 | pkgdatadir = d.getVar("PKGDATA_DIR") |
61 | 61 | ||
62 | pkgmap = {} | 62 | pkgmap = {} |
63 | try: | 63 | try: |
diff --git a/meta/lib/oe/packagegroup.py b/meta/lib/oe/packagegroup.py index 97819279b7..d68e5d322b 100644 --- a/meta/lib/oe/packagegroup.py +++ b/meta/lib/oe/packagegroup.py | |||
@@ -1,7 +1,7 @@ | |||
1 | import itertools | 1 | import itertools |
2 | 2 | ||
3 | def is_optional(feature, d): | 3 | def is_optional(feature, d): |
4 | packages = d.getVar("FEATURE_PACKAGES_%s" % feature, True) | 4 | packages = d.getVar("FEATURE_PACKAGES_%s" % feature) |
5 | if packages: | 5 | if packages: |
6 | return bool(d.getVarFlag("FEATURE_PACKAGES_%s" % feature, "optional", True)) | 6 | return bool(d.getVarFlag("FEATURE_PACKAGES_%s" % feature, "optional", True)) |
7 | else: | 7 | else: |
@@ -9,9 +9,9 @@ def is_optional(feature, d): | |||
9 | 9 | ||
10 | def packages(features, d): | 10 | def packages(features, d): |
11 | for feature in features: | 11 | for feature in features: |
12 | packages = d.getVar("FEATURE_PACKAGES_%s" % feature, True) | 12 | packages = d.getVar("FEATURE_PACKAGES_%s" % feature) |
13 | if not packages: | 13 | if not packages: |
14 | packages = d.getVar("PACKAGE_GROUP_%s" % feature, True) | 14 | packages = d.getVar("PACKAGE_GROUP_%s" % feature) |
15 | for pkg in (packages or "").split(): | 15 | for pkg in (packages or "").split(): |
16 | yield pkg | 16 | yield pkg |
17 | 17 | ||
diff --git a/meta/lib/oe/patch.py b/meta/lib/oe/patch.py index 456ee70f7d..95674b3706 100644 --- a/meta/lib/oe/patch.py +++ b/meta/lib/oe/patch.py | |||
@@ -281,8 +281,8 @@ class GitApplyTree(PatchTree): | |||
281 | 281 | ||
282 | def __init__(self, dir, d): | 282 | def __init__(self, dir, d): |
283 | PatchTree.__init__(self, dir, d) | 283 | PatchTree.__init__(self, dir, d) |
284 | self.commituser = d.getVar('PATCH_GIT_USER_NAME', True) | 284 | self.commituser = d.getVar('PATCH_GIT_USER_NAME') |
285 | self.commitemail = d.getVar('PATCH_GIT_USER_EMAIL', True) | 285 | self.commitemail = d.getVar('PATCH_GIT_USER_EMAIL') |
286 | 286 | ||
287 | @staticmethod | 287 | @staticmethod |
288 | def extractPatchHeader(patchfile): | 288 | def extractPatchHeader(patchfile): |
@@ -371,8 +371,8 @@ class GitApplyTree(PatchTree): | |||
371 | @staticmethod | 371 | @staticmethod |
372 | def gitCommandUserOptions(cmd, commituser=None, commitemail=None, d=None): | 372 | def gitCommandUserOptions(cmd, commituser=None, commitemail=None, d=None): |
373 | if d: | 373 | if d: |
374 | commituser = d.getVar('PATCH_GIT_USER_NAME', True) | 374 | commituser = d.getVar('PATCH_GIT_USER_NAME') |
375 | commitemail = d.getVar('PATCH_GIT_USER_EMAIL', True) | 375 | commitemail = d.getVar('PATCH_GIT_USER_EMAIL') |
376 | if commituser: | 376 | if commituser: |
377 | cmd += ['-c', 'user.name="%s"' % commituser] | 377 | cmd += ['-c', 'user.name="%s"' % commituser] |
378 | if commitemail: | 378 | if commitemail: |
@@ -551,7 +551,7 @@ class GitApplyTree(PatchTree): | |||
551 | 551 | ||
552 | class QuiltTree(PatchSet): | 552 | class QuiltTree(PatchSet): |
553 | def _runcmd(self, args, run = True): | 553 | def _runcmd(self, args, run = True): |
554 | quiltrc = self.d.getVar('QUILTRCFILE', True) | 554 | quiltrc = self.d.getVar('QUILTRCFILE') |
555 | if not run: | 555 | if not run: |
556 | return ["quilt"] + ["--quiltrc"] + [quiltrc] + args | 556 | return ["quilt"] + ["--quiltrc"] + [quiltrc] + args |
557 | runcmd(["quilt"] + ["--quiltrc"] + [quiltrc] + args, self.dir) | 557 | runcmd(["quilt"] + ["--quiltrc"] + [quiltrc] + args, self.dir) |
@@ -727,7 +727,7 @@ class UserResolver(Resolver): | |||
727 | # Patch application failed | 727 | # Patch application failed |
728 | patchcmd = self.patchset.Push(True, False, False) | 728 | patchcmd = self.patchset.Push(True, False, False) |
729 | 729 | ||
730 | t = self.patchset.d.getVar('T', True) | 730 | t = self.patchset.d.getVar('T') |
731 | if not t: | 731 | if not t: |
732 | bb.msg.fatal("Build", "T not set") | 732 | bb.msg.fatal("Build", "T not set") |
733 | bb.utils.mkdirhier(t) | 733 | bb.utils.mkdirhier(t) |
@@ -792,7 +792,7 @@ def patch_path(url, fetch, workdir, expand=True): | |||
792 | return local | 792 | return local |
793 | 793 | ||
794 | def src_patches(d, all=False, expand=True): | 794 | def src_patches(d, all=False, expand=True): |
795 | workdir = d.getVar('WORKDIR', True) | 795 | workdir = d.getVar('WORKDIR') |
796 | fetch = bb.fetch2.Fetch([], d) | 796 | fetch = bb.fetch2.Fetch([], d) |
797 | patches = [] | 797 | patches = [] |
798 | sources = [] | 798 | sources = [] |
@@ -839,13 +839,13 @@ def src_patches(d, all=False, expand=True): | |||
839 | 839 | ||
840 | def should_apply(parm, d): | 840 | def should_apply(parm, d): |
841 | if "mindate" in parm or "maxdate" in parm: | 841 | if "mindate" in parm or "maxdate" in parm: |
842 | pn = d.getVar('PN', True) | 842 | pn = d.getVar('PN') |
843 | srcdate = d.getVar('SRCDATE_%s' % pn, True) | 843 | srcdate = d.getVar('SRCDATE_%s' % pn) |
844 | if not srcdate: | 844 | if not srcdate: |
845 | srcdate = d.getVar('SRCDATE', True) | 845 | srcdate = d.getVar('SRCDATE') |
846 | 846 | ||
847 | if srcdate == "now": | 847 | if srcdate == "now": |
848 | srcdate = d.getVar('DATE', True) | 848 | srcdate = d.getVar('DATE') |
849 | 849 | ||
850 | if "maxdate" in parm and parm["maxdate"] < srcdate: | 850 | if "maxdate" in parm and parm["maxdate"] < srcdate: |
851 | return False, 'is outdated' | 851 | return False, 'is outdated' |
@@ -855,22 +855,22 @@ def should_apply(parm, d): | |||
855 | 855 | ||
856 | 856 | ||
857 | if "minrev" in parm: | 857 | if "minrev" in parm: |
858 | srcrev = d.getVar('SRCREV', True) | 858 | srcrev = d.getVar('SRCREV') |
859 | if srcrev and srcrev < parm["minrev"]: | 859 | if srcrev and srcrev < parm["minrev"]: |
860 | return False, 'applies to later revisions' | 860 | return False, 'applies to later revisions' |
861 | 861 | ||
862 | if "maxrev" in parm: | 862 | if "maxrev" in parm: |
863 | srcrev = d.getVar('SRCREV', True) | 863 | srcrev = d.getVar('SRCREV') |
864 | if srcrev and srcrev > parm["maxrev"]: | 864 | if srcrev and srcrev > parm["maxrev"]: |
865 | return False, 'applies to earlier revisions' | 865 | return False, 'applies to earlier revisions' |
866 | 866 | ||
867 | if "rev" in parm: | 867 | if "rev" in parm: |
868 | srcrev = d.getVar('SRCREV', True) | 868 | srcrev = d.getVar('SRCREV') |
869 | if srcrev and parm["rev"] not in srcrev: | 869 | if srcrev and parm["rev"] not in srcrev: |
870 | return False, "doesn't apply to revision" | 870 | return False, "doesn't apply to revision" |
871 | 871 | ||
872 | if "notrev" in parm: | 872 | if "notrev" in parm: |
873 | srcrev = d.getVar('SRCREV', True) | 873 | srcrev = d.getVar('SRCREV') |
874 | if srcrev and parm["notrev"] in srcrev: | 874 | if srcrev and parm["notrev"] in srcrev: |
875 | return False, "doesn't apply to revision" | 875 | return False, "doesn't apply to revision" |
876 | 876 | ||
diff --git a/meta/lib/oe/path.py b/meta/lib/oe/path.py index f73fd4ac07..804ecd5fea 100644 --- a/meta/lib/oe/path.py +++ b/meta/lib/oe/path.py | |||
@@ -52,7 +52,7 @@ def make_relative_symlink(path): | |||
52 | 52 | ||
53 | def format_display(path, metadata): | 53 | def format_display(path, metadata): |
54 | """ Prepare a path for display to the user. """ | 54 | """ Prepare a path for display to the user. """ |
55 | rel = relative(metadata.getVar("TOPDIR", True), path) | 55 | rel = relative(metadata.getVar("TOPDIR"), path) |
56 | if len(rel) > len(path): | 56 | if len(rel) > len(path): |
57 | return path | 57 | return path |
58 | else: | 58 | else: |
diff --git a/meta/lib/oe/prservice.py b/meta/lib/oe/prservice.py index 0054f954cc..32dfc15e88 100644 --- a/meta/lib/oe/prservice.py +++ b/meta/lib/oe/prservice.py | |||
@@ -1,7 +1,7 @@ | |||
1 | 1 | ||
2 | def prserv_make_conn(d, check = False): | 2 | def prserv_make_conn(d, check = False): |
3 | import prserv.serv | 3 | import prserv.serv |
4 | host_params = list([_f for _f in (d.getVar("PRSERV_HOST", True) or '').split(':') if _f]) | 4 | host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f]) |
5 | try: | 5 | try: |
6 | conn = None | 6 | conn = None |
7 | conn = prserv.serv.PRServerConnection(host_params[0], int(host_params[1])) | 7 | conn = prserv.serv.PRServerConnection(host_params[0], int(host_params[1])) |
@@ -15,11 +15,11 @@ def prserv_make_conn(d, check = False): | |||
15 | return conn | 15 | return conn |
16 | 16 | ||
17 | def prserv_dump_db(d): | 17 | def prserv_dump_db(d): |
18 | if not d.getVar('PRSERV_HOST', True): | 18 | if not d.getVar('PRSERV_HOST'): |
19 | bb.error("Not using network based PR service") | 19 | bb.error("Not using network based PR service") |
20 | return None | 20 | return None |
21 | 21 | ||
22 | conn = d.getVar("__PRSERV_CONN", True) | 22 | conn = d.getVar("__PRSERV_CONN") |
23 | if conn is None: | 23 | if conn is None: |
24 | conn = prserv_make_conn(d) | 24 | conn = prserv_make_conn(d) |
25 | if conn is None: | 25 | if conn is None: |
@@ -27,18 +27,18 @@ def prserv_dump_db(d): | |||
27 | return None | 27 | return None |
28 | 28 | ||
29 | #dump db | 29 | #dump db |
30 | opt_version = d.getVar('PRSERV_DUMPOPT_VERSION', True) | 30 | opt_version = d.getVar('PRSERV_DUMPOPT_VERSION') |
31 | opt_pkgarch = d.getVar('PRSERV_DUMPOPT_PKGARCH', True) | 31 | opt_pkgarch = d.getVar('PRSERV_DUMPOPT_PKGARCH') |
32 | opt_checksum = d.getVar('PRSERV_DUMPOPT_CHECKSUM', True) | 32 | opt_checksum = d.getVar('PRSERV_DUMPOPT_CHECKSUM') |
33 | opt_col = ("1" == d.getVar('PRSERV_DUMPOPT_COL', True)) | 33 | opt_col = ("1" == d.getVar('PRSERV_DUMPOPT_COL')) |
34 | return conn.export(opt_version, opt_pkgarch, opt_checksum, opt_col) | 34 | return conn.export(opt_version, opt_pkgarch, opt_checksum, opt_col) |
35 | 35 | ||
36 | def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksum=None): | 36 | def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksum=None): |
37 | if not d.getVar('PRSERV_HOST', True): | 37 | if not d.getVar('PRSERV_HOST'): |
38 | bb.error("Not using network based PR service") | 38 | bb.error("Not using network based PR service") |
39 | return None | 39 | return None |
40 | 40 | ||
41 | conn = d.getVar("__PRSERV_CONN", True) | 41 | conn = d.getVar("__PRSERV_CONN") |
42 | if conn is None: | 42 | if conn is None: |
43 | conn = prserv_make_conn(d) | 43 | conn = prserv_make_conn(d) |
44 | if conn is None: | 44 | if conn is None: |
@@ -58,7 +58,7 @@ def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksu | |||
58 | (filter_checksum and filter_checksum != checksum): | 58 | (filter_checksum and filter_checksum != checksum): |
59 | continue | 59 | continue |
60 | try: | 60 | try: |
61 | value = int(d.getVar(remain + '$' + version + '$' + pkgarch + '$' + checksum, True)) | 61 | value = int(d.getVar(remain + '$' + version + '$' + pkgarch + '$' + checksum)) |
62 | except BaseException as exc: | 62 | except BaseException as exc: |
63 | bb.debug("Not valid value of %s:%s" % (v,str(exc))) | 63 | bb.debug("Not valid value of %s:%s" % (v,str(exc))) |
64 | continue | 64 | continue |
@@ -72,8 +72,8 @@ def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksu | |||
72 | def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False): | 72 | def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False): |
73 | import bb.utils | 73 | import bb.utils |
74 | #initilize the output file | 74 | #initilize the output file |
75 | bb.utils.mkdirhier(d.getVar('PRSERV_DUMPDIR', True)) | 75 | bb.utils.mkdirhier(d.getVar('PRSERV_DUMPDIR')) |
76 | df = d.getVar('PRSERV_DUMPFILE', True) | 76 | df = d.getVar('PRSERV_DUMPFILE') |
77 | #write data | 77 | #write data |
78 | lf = bb.utils.lockfile("%s.lock" % df) | 78 | lf = bb.utils.lockfile("%s.lock" % df) |
79 | f = open(df, "a") | 79 | f = open(df, "a") |
@@ -114,7 +114,7 @@ def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False): | |||
114 | bb.utils.unlockfile(lf) | 114 | bb.utils.unlockfile(lf) |
115 | 115 | ||
116 | def prserv_check_avail(d): | 116 | def prserv_check_avail(d): |
117 | host_params = list([_f for _f in (d.getVar("PRSERV_HOST", True) or '').split(':') if _f]) | 117 | host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f]) |
118 | try: | 118 | try: |
119 | if len(host_params) != 2: | 119 | if len(host_params) != 2: |
120 | raise TypeError | 120 | raise TypeError |
diff --git a/meta/lib/oe/qa.py b/meta/lib/oe/qa.py index 22d76dcbcd..3231e60cea 100644 --- a/meta/lib/oe/qa.py +++ b/meta/lib/oe/qa.py | |||
@@ -129,11 +129,11 @@ class ELFFile: | |||
129 | if cmd in self.objdump_output: | 129 | if cmd in self.objdump_output: |
130 | return self.objdump_output[cmd] | 130 | return self.objdump_output[cmd] |
131 | 131 | ||
132 | objdump = d.getVar('OBJDUMP', True) | 132 | objdump = d.getVar('OBJDUMP') |
133 | 133 | ||
134 | env = os.environ.copy() | 134 | env = os.environ.copy() |
135 | env["LC_ALL"] = "C" | 135 | env["LC_ALL"] = "C" |
136 | env["PATH"] = d.getVar('PATH', True) | 136 | env["PATH"] = d.getVar('PATH') |
137 | 137 | ||
138 | try: | 138 | try: |
139 | bb.note("%s %s %s" % (objdump, cmd, self.name)) | 139 | bb.note("%s %s %s" % (objdump, cmd, self.name)) |
diff --git a/meta/lib/oe/recipeutils.py b/meta/lib/oe/recipeutils.py index 26c926f214..a7fdd36e40 100644 --- a/meta/lib/oe/recipeutils.py +++ b/meta/lib/oe/recipeutils.py | |||
@@ -328,16 +328,16 @@ def copy_recipe_files(d, tgt_dir, whole_dir=False, download=True): | |||
328 | 328 | ||
329 | # FIXME need a warning if the unexpanded SRC_URI value contains variable references | 329 | # FIXME need a warning if the unexpanded SRC_URI value contains variable references |
330 | 330 | ||
331 | uris = (d.getVar('SRC_URI', True) or "").split() | 331 | uris = (d.getVar('SRC_URI') or "").split() |
332 | fetch = bb.fetch2.Fetch(uris, d) | 332 | fetch = bb.fetch2.Fetch(uris, d) |
333 | if download: | 333 | if download: |
334 | fetch.download() | 334 | fetch.download() |
335 | 335 | ||
336 | # Copy local files to target directory and gather any remote files | 336 | # Copy local files to target directory and gather any remote files |
337 | bb_dir = os.path.dirname(d.getVar('FILE', True)) + os.sep | 337 | bb_dir = os.path.dirname(d.getVar('FILE')) + os.sep |
338 | remotes = [] | 338 | remotes = [] |
339 | copied = [] | 339 | copied = [] |
340 | includes = [path for path in d.getVar('BBINCLUDED', True).split() if | 340 | includes = [path for path in d.getVar('BBINCLUDED').split() if |
341 | path.startswith(bb_dir) and os.path.exists(path)] | 341 | path.startswith(bb_dir) and os.path.exists(path)] |
342 | for path in fetch.localpaths() + includes: | 342 | for path in fetch.localpaths() + includes: |
343 | # Only import files that are under the meta directory | 343 | # Only import files that are under the meta directory |
@@ -361,7 +361,7 @@ def copy_recipe_files(d, tgt_dir, whole_dir=False, download=True): | |||
361 | def get_recipe_local_files(d, patches=False, archives=False): | 361 | def get_recipe_local_files(d, patches=False, archives=False): |
362 | """Get a list of local files in SRC_URI within a recipe.""" | 362 | """Get a list of local files in SRC_URI within a recipe.""" |
363 | import oe.patch | 363 | import oe.patch |
364 | uris = (d.getVar('SRC_URI', True) or "").split() | 364 | uris = (d.getVar('SRC_URI') or "").split() |
365 | fetch = bb.fetch2.Fetch(uris, d) | 365 | fetch = bb.fetch2.Fetch(uris, d) |
366 | # FIXME this list should be factored out somewhere else (such as the | 366 | # FIXME this list should be factored out somewhere else (such as the |
367 | # fetcher) though note that this only encompasses actual container formats | 367 | # fetcher) though note that this only encompasses actual container formats |
@@ -421,7 +421,7 @@ def get_recipe_patched_files(d): | |||
421 | for patch in patches: | 421 | for patch in patches: |
422 | _, _, patchfile, _, _, parm = bb.fetch.decodeurl(patch) | 422 | _, _, patchfile, _, _, parm = bb.fetch.decodeurl(patch) |
423 | striplevel = int(parm['striplevel']) | 423 | striplevel = int(parm['striplevel']) |
424 | patchedfiles[patchfile] = oe.patch.PatchSet.getPatchedFiles(patchfile, striplevel, os.path.join(d.getVar('S', True), parm.get('patchdir', ''))) | 424 | patchedfiles[patchfile] = oe.patch.PatchSet.getPatchedFiles(patchfile, striplevel, os.path.join(d.getVar('S'), parm.get('patchdir', ''))) |
425 | return patchedfiles | 425 | return patchedfiles |
426 | 426 | ||
427 | 427 | ||
@@ -459,9 +459,9 @@ def get_bbfile_path(d, destdir, extrapathhint=None): | |||
459 | confdata.setVar('LAYERDIR', destlayerdir) | 459 | confdata.setVar('LAYERDIR', destlayerdir) |
460 | destlayerconf = os.path.join(destlayerdir, "conf", "layer.conf") | 460 | destlayerconf = os.path.join(destlayerdir, "conf", "layer.conf") |
461 | confdata = bb.cookerdata.parse_config_file(destlayerconf, confdata) | 461 | confdata = bb.cookerdata.parse_config_file(destlayerconf, confdata) |
462 | pn = d.getVar('PN', True) | 462 | pn = d.getVar('PN') |
463 | 463 | ||
464 | bbfilespecs = (confdata.getVar('BBFILES', True) or '').split() | 464 | bbfilespecs = (confdata.getVar('BBFILES') or '').split() |
465 | if destdir == destlayerdir: | 465 | if destdir == destlayerdir: |
466 | for bbfilespec in bbfilespecs: | 466 | for bbfilespec in bbfilespecs: |
467 | if not bbfilespec.endswith('.bbappend'): | 467 | if not bbfilespec.endswith('.bbappend'): |
@@ -474,8 +474,8 @@ def get_bbfile_path(d, destdir, extrapathhint=None): | |||
474 | 474 | ||
475 | # Try to make up a path that matches BBFILES | 475 | # Try to make up a path that matches BBFILES |
476 | # this is a little crude, but better than nothing | 476 | # this is a little crude, but better than nothing |
477 | bpn = d.getVar('BPN', True) | 477 | bpn = d.getVar('BPN') |
478 | recipefn = os.path.basename(d.getVar('FILE', True)) | 478 | recipefn = os.path.basename(d.getVar('FILE')) |
479 | pathoptions = [destdir] | 479 | pathoptions = [destdir] |
480 | if extrapathhint: | 480 | if extrapathhint: |
481 | pathoptions.append(os.path.join(destdir, extrapathhint)) | 481 | pathoptions.append(os.path.join(destdir, extrapathhint)) |
@@ -499,7 +499,7 @@ def get_bbappend_path(d, destlayerdir, wildcardver=False): | |||
499 | import bb.cookerdata | 499 | import bb.cookerdata |
500 | 500 | ||
501 | destlayerdir = os.path.abspath(destlayerdir) | 501 | destlayerdir = os.path.abspath(destlayerdir) |
502 | recipefile = d.getVar('FILE', True) | 502 | recipefile = d.getVar('FILE') |
503 | recipefn = os.path.splitext(os.path.basename(recipefile))[0] | 503 | recipefn = os.path.splitext(os.path.basename(recipefile))[0] |
504 | if wildcardver and '_' in recipefn: | 504 | if wildcardver and '_' in recipefn: |
505 | recipefn = recipefn.split('_', 1)[0] + '_%' | 505 | recipefn = recipefn.split('_', 1)[0] + '_%' |
@@ -519,7 +519,7 @@ def get_bbappend_path(d, destlayerdir, wildcardver=False): | |||
519 | appendpath = os.path.join(destlayerdir, os.path.relpath(os.path.dirname(recipefile), origlayerdir), appendfn) | 519 | appendpath = os.path.join(destlayerdir, os.path.relpath(os.path.dirname(recipefile), origlayerdir), appendfn) |
520 | closepath = '' | 520 | closepath = '' |
521 | pathok = True | 521 | pathok = True |
522 | for bbfilespec in confdata.getVar('BBFILES', True).split(): | 522 | for bbfilespec in confdata.getVar('BBFILES').split(): |
523 | if fnmatch.fnmatchcase(appendpath, bbfilespec): | 523 | if fnmatch.fnmatchcase(appendpath, bbfilespec): |
524 | # Our append path works, we're done | 524 | # Our append path works, we're done |
525 | break | 525 | break |
@@ -592,7 +592,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, | |||
592 | 592 | ||
593 | # FIXME check if the bbappend doesn't get overridden by a higher priority layer? | 593 | # FIXME check if the bbappend doesn't get overridden by a higher priority layer? |
594 | 594 | ||
595 | layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS', True).split()] | 595 | layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS').split()] |
596 | if not os.path.abspath(destlayerdir) in layerdirs: | 596 | if not os.path.abspath(destlayerdir) in layerdirs: |
597 | bb.warn('Specified layer is not currently enabled in bblayers.conf, you will need to add it before this bbappend will be active') | 597 | bb.warn('Specified layer is not currently enabled in bblayers.conf, you will need to add it before this bbappend will be active') |
598 | 598 | ||
@@ -628,7 +628,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, | |||
628 | else: | 628 | else: |
629 | bbappendlines.append((varname, op, value)) | 629 | bbappendlines.append((varname, op, value)) |
630 | 630 | ||
631 | destsubdir = rd.getVar('PN', True) | 631 | destsubdir = rd.getVar('PN') |
632 | if srcfiles: | 632 | if srcfiles: |
633 | bbappendlines.append(('FILESEXTRAPATHS_prepend', ':=', '${THISDIR}/${PN}:')) | 633 | bbappendlines.append(('FILESEXTRAPATHS_prepend', ':=', '${THISDIR}/${PN}:')) |
634 | 634 | ||
@@ -647,7 +647,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, | |||
647 | srcurientry = 'file://%s' % srcfile | 647 | srcurientry = 'file://%s' % srcfile |
648 | # Double-check it's not there already | 648 | # Double-check it's not there already |
649 | # FIXME do we care if the entry is added by another bbappend that might go away? | 649 | # FIXME do we care if the entry is added by another bbappend that might go away? |
650 | if not srcurientry in rd.getVar('SRC_URI', True).split(): | 650 | if not srcurientry in rd.getVar('SRC_URI').split(): |
651 | if machine: | 651 | if machine: |
652 | appendline('SRC_URI_append%s' % appendoverride, '=', ' ' + srcurientry) | 652 | appendline('SRC_URI_append%s' % appendoverride, '=', ' ' + srcurientry) |
653 | else: | 653 | else: |
@@ -796,7 +796,7 @@ def replace_dir_vars(path, d): | |||
796 | # Sort by length so we get the variables we're interested in first | 796 | # Sort by length so we get the variables we're interested in first |
797 | for var in sorted(list(d.keys()), key=len): | 797 | for var in sorted(list(d.keys()), key=len): |
798 | if var.endswith('dir') and var.lower() == var: | 798 | if var.endswith('dir') and var.lower() == var: |
799 | value = d.getVar(var, True) | 799 | value = d.getVar(var) |
800 | if value.startswith('/') and not '\n' in value and value not in dirvars: | 800 | if value.startswith('/') and not '\n' in value and value not in dirvars: |
801 | dirvars[value] = var | 801 | dirvars[value] = var |
802 | for dirpath in sorted(list(dirvars.keys()), reverse=True): | 802 | for dirpath in sorted(list(dirvars.keys()), reverse=True): |
@@ -850,12 +850,12 @@ def get_recipe_upstream_version(rd): | |||
850 | ru['type'] = 'U' | 850 | ru['type'] = 'U' |
851 | ru['datetime'] = '' | 851 | ru['datetime'] = '' |
852 | 852 | ||
853 | pv = rd.getVar('PV', True) | 853 | pv = rd.getVar('PV') |
854 | 854 | ||
855 | # XXX: If don't have SRC_URI means that don't have upstream sources so | 855 | # XXX: If don't have SRC_URI means that don't have upstream sources so |
856 | # returns the current recipe version, so that upstream version check | 856 | # returns the current recipe version, so that upstream version check |
857 | # declares a match. | 857 | # declares a match. |
858 | src_uris = rd.getVar('SRC_URI', True) | 858 | src_uris = rd.getVar('SRC_URI') |
859 | if not src_uris: | 859 | if not src_uris: |
860 | ru['version'] = pv | 860 | ru['version'] = pv |
861 | ru['type'] = 'M' | 861 | ru['type'] = 'M' |
@@ -866,13 +866,13 @@ def get_recipe_upstream_version(rd): | |||
866 | src_uri = src_uris.split()[0] | 866 | src_uri = src_uris.split()[0] |
867 | uri_type, _, _, _, _, _ = decodeurl(src_uri) | 867 | uri_type, _, _, _, _, _ = decodeurl(src_uri) |
868 | 868 | ||
869 | manual_upstream_version = rd.getVar("RECIPE_UPSTREAM_VERSION", True) | 869 | manual_upstream_version = rd.getVar("RECIPE_UPSTREAM_VERSION") |
870 | if manual_upstream_version: | 870 | if manual_upstream_version: |
871 | # manual tracking of upstream version. | 871 | # manual tracking of upstream version. |
872 | ru['version'] = manual_upstream_version | 872 | ru['version'] = manual_upstream_version |
873 | ru['type'] = 'M' | 873 | ru['type'] = 'M' |
874 | 874 | ||
875 | manual_upstream_date = rd.getVar("CHECK_DATE", True) | 875 | manual_upstream_date = rd.getVar("CHECK_DATE") |
876 | if manual_upstream_date: | 876 | if manual_upstream_date: |
877 | date = datetime.strptime(manual_upstream_date, "%b %d, %Y") | 877 | date = datetime.strptime(manual_upstream_date, "%b %d, %Y") |
878 | else: | 878 | else: |
diff --git a/meta/lib/oe/rootfs.py b/meta/lib/oe/rootfs.py index 74fc3bd256..d9a473006a 100644 --- a/meta/lib/oe/rootfs.py +++ b/meta/lib/oe/rootfs.py | |||
@@ -18,8 +18,8 @@ class Rootfs(object, metaclass=ABCMeta): | |||
18 | def __init__(self, d, progress_reporter=None, logcatcher=None): | 18 | def __init__(self, d, progress_reporter=None, logcatcher=None): |
19 | self.d = d | 19 | self.d = d |
20 | self.pm = None | 20 | self.pm = None |
21 | self.image_rootfs = self.d.getVar('IMAGE_ROOTFS', True) | 21 | self.image_rootfs = self.d.getVar('IMAGE_ROOTFS') |
22 | self.deploydir = self.d.getVar('IMGDEPLOYDIR', True) | 22 | self.deploydir = self.d.getVar('IMGDEPLOYDIR') |
23 | self.progress_reporter = progress_reporter | 23 | self.progress_reporter = progress_reporter |
24 | self.logcatcher = logcatcher | 24 | self.logcatcher = logcatcher |
25 | 25 | ||
@@ -72,7 +72,7 @@ class Rootfs(object, metaclass=ABCMeta): | |||
72 | else: | 72 | else: |
73 | msg = '%d %s messages' % (len(messages), type) | 73 | msg = '%d %s messages' % (len(messages), type) |
74 | msg = '[log_check] %s: found %s in the logfile:\n%s' % \ | 74 | msg = '[log_check] %s: found %s in the logfile:\n%s' % \ |
75 | (self.d.getVar('PN', True), msg, ''.join(messages)) | 75 | (self.d.getVar('PN'), msg, ''.join(messages)) |
76 | if type == 'error': | 76 | if type == 'error': |
77 | bb.fatal(msg) | 77 | bb.fatal(msg) |
78 | else: | 78 | else: |
@@ -103,7 +103,7 @@ class Rootfs(object, metaclass=ABCMeta): | |||
103 | pass | 103 | pass |
104 | 104 | ||
105 | def _setup_dbg_rootfs(self, dirs): | 105 | def _setup_dbg_rootfs(self, dirs): |
106 | gen_debugfs = self.d.getVar('IMAGE_GEN_DEBUGFS', True) or '0' | 106 | gen_debugfs = self.d.getVar('IMAGE_GEN_DEBUGFS') or '0' |
107 | if gen_debugfs != '1': | 107 | if gen_debugfs != '1': |
108 | return | 108 | return |
109 | 109 | ||
@@ -156,7 +156,7 @@ class Rootfs(object, metaclass=ABCMeta): | |||
156 | os.rename(self.image_rootfs + '-orig', self.image_rootfs) | 156 | os.rename(self.image_rootfs + '-orig', self.image_rootfs) |
157 | 157 | ||
158 | def _exec_shell_cmd(self, cmd): | 158 | def _exec_shell_cmd(self, cmd): |
159 | fakerootcmd = self.d.getVar('FAKEROOT', True) | 159 | fakerootcmd = self.d.getVar('FAKEROOT') |
160 | if fakerootcmd is not None: | 160 | if fakerootcmd is not None: |
161 | exec_cmd = [fakerootcmd, cmd] | 161 | exec_cmd = [fakerootcmd, cmd] |
162 | else: | 162 | else: |
@@ -171,14 +171,14 @@ class Rootfs(object, metaclass=ABCMeta): | |||
171 | 171 | ||
172 | def create(self): | 172 | def create(self): |
173 | bb.note("###### Generate rootfs #######") | 173 | bb.note("###### Generate rootfs #######") |
174 | pre_process_cmds = self.d.getVar("ROOTFS_PREPROCESS_COMMAND", True) | 174 | pre_process_cmds = self.d.getVar("ROOTFS_PREPROCESS_COMMAND") |
175 | post_process_cmds = self.d.getVar("ROOTFS_POSTPROCESS_COMMAND", True) | 175 | post_process_cmds = self.d.getVar("ROOTFS_POSTPROCESS_COMMAND") |
176 | rootfs_post_install_cmds = self.d.getVar('ROOTFS_POSTINSTALL_COMMAND', True) | 176 | rootfs_post_install_cmds = self.d.getVar('ROOTFS_POSTINSTALL_COMMAND') |
177 | 177 | ||
178 | postinst_intercepts_dir = self.d.getVar("POSTINST_INTERCEPTS_DIR", True) | 178 | postinst_intercepts_dir = self.d.getVar("POSTINST_INTERCEPTS_DIR") |
179 | if not postinst_intercepts_dir: | 179 | if not postinst_intercepts_dir: |
180 | postinst_intercepts_dir = self.d.expand("${COREBASE}/scripts/postinst-intercepts") | 180 | postinst_intercepts_dir = self.d.expand("${COREBASE}/scripts/postinst-intercepts") |
181 | intercepts_dir = os.path.join(self.d.getVar('WORKDIR', True), | 181 | intercepts_dir = os.path.join(self.d.getVar('WORKDIR'), |
182 | "intercept_scripts") | 182 | "intercept_scripts") |
183 | 183 | ||
184 | bb.utils.remove(intercepts_dir, True) | 184 | bb.utils.remove(intercepts_dir, True) |
@@ -201,10 +201,10 @@ class Rootfs(object, metaclass=ABCMeta): | |||
201 | # call the package manager dependent create method | 201 | # call the package manager dependent create method |
202 | self._create() | 202 | self._create() |
203 | 203 | ||
204 | sysconfdir = self.image_rootfs + self.d.getVar('sysconfdir', True) | 204 | sysconfdir = self.image_rootfs + self.d.getVar('sysconfdir') |
205 | bb.utils.mkdirhier(sysconfdir) | 205 | bb.utils.mkdirhier(sysconfdir) |
206 | with open(sysconfdir + "/version", "w+") as ver: | 206 | with open(sysconfdir + "/version", "w+") as ver: |
207 | ver.write(self.d.getVar('BUILDNAME', True) + "\n") | 207 | ver.write(self.d.getVar('BUILDNAME') + "\n") |
208 | 208 | ||
209 | execute_pre_post_process(self.d, rootfs_post_install_cmds) | 209 | execute_pre_post_process(self.d, rootfs_post_install_cmds) |
210 | 210 | ||
@@ -223,7 +223,7 @@ class Rootfs(object, metaclass=ABCMeta): | |||
223 | "offline and rootfs is read-only: %s" % | 223 | "offline and rootfs is read-only: %s" % |
224 | delayed_postinsts) | 224 | delayed_postinsts) |
225 | 225 | ||
226 | if self.d.getVar('USE_DEVFS', True) != "1": | 226 | if self.d.getVar('USE_DEVFS') != "1": |
227 | self._create_devfs() | 227 | self._create_devfs() |
228 | 228 | ||
229 | self._uninstall_unneeded() | 229 | self._uninstall_unneeded() |
@@ -235,7 +235,7 @@ class Rootfs(object, metaclass=ABCMeta): | |||
235 | 235 | ||
236 | self._run_ldconfig() | 236 | self._run_ldconfig() |
237 | 237 | ||
238 | if self.d.getVar('USE_DEPMOD', True) != "0": | 238 | if self.d.getVar('USE_DEPMOD') != "0": |
239 | self._generate_kernel_module_deps() | 239 | self._generate_kernel_module_deps() |
240 | 240 | ||
241 | self._cleanup() | 241 | self._cleanup() |
@@ -251,16 +251,16 @@ class Rootfs(object, metaclass=ABCMeta): | |||
251 | if delayed_postinsts is None: | 251 | if delayed_postinsts is None: |
252 | if os.path.exists(self.d.expand("${IMAGE_ROOTFS}${sysconfdir}/init.d/run-postinsts")): | 252 | if os.path.exists(self.d.expand("${IMAGE_ROOTFS}${sysconfdir}/init.d/run-postinsts")): |
253 | self._exec_shell_cmd(["update-rc.d", "-f", "-r", | 253 | self._exec_shell_cmd(["update-rc.d", "-f", "-r", |
254 | self.d.getVar('IMAGE_ROOTFS', True), | 254 | self.d.getVar('IMAGE_ROOTFS'), |
255 | "run-postinsts", "remove"]) | 255 | "run-postinsts", "remove"]) |
256 | 256 | ||
257 | image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", | 257 | image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", |
258 | True, False, self.d) | 258 | True, False, self.d) |
259 | image_rorfs_force = self.d.getVar('FORCE_RO_REMOVE', True) | 259 | image_rorfs_force = self.d.getVar('FORCE_RO_REMOVE') |
260 | 260 | ||
261 | if image_rorfs or image_rorfs_force == "1": | 261 | if image_rorfs or image_rorfs_force == "1": |
262 | # Remove components that we don't need if it's a read-only rootfs | 262 | # Remove components that we don't need if it's a read-only rootfs |
263 | unneeded_pkgs = self.d.getVar("ROOTFS_RO_UNNEEDED", True).split() | 263 | unneeded_pkgs = self.d.getVar("ROOTFS_RO_UNNEEDED").split() |
264 | pkgs_installed = image_list_installed_packages(self.d) | 264 | pkgs_installed = image_list_installed_packages(self.d) |
265 | pkgs_to_remove = [pkg for pkg in pkgs_installed if pkg in unneeded_pkgs] | 265 | pkgs_to_remove = [pkg for pkg in pkgs_installed if pkg in unneeded_pkgs] |
266 | 266 | ||
@@ -273,7 +273,7 @@ class Rootfs(object, metaclass=ABCMeta): | |||
273 | bb.warn("There are post install scripts " | 273 | bb.warn("There are post install scripts " |
274 | "in a read-only rootfs") | 274 | "in a read-only rootfs") |
275 | 275 | ||
276 | post_uninstall_cmds = self.d.getVar("ROOTFS_POSTUNINSTALL_COMMAND", True) | 276 | post_uninstall_cmds = self.d.getVar("ROOTFS_POSTUNINSTALL_COMMAND") |
277 | execute_pre_post_process(self.d, post_uninstall_cmds) | 277 | execute_pre_post_process(self.d, post_uninstall_cmds) |
278 | 278 | ||
279 | runtime_pkgmanage = bb.utils.contains("IMAGE_FEATURES", "package-management", | 279 | runtime_pkgmanage = bb.utils.contains("IMAGE_FEATURES", "package-management", |
@@ -283,12 +283,12 @@ class Rootfs(object, metaclass=ABCMeta): | |||
283 | self.pm.remove_packaging_data() | 283 | self.pm.remove_packaging_data() |
284 | 284 | ||
285 | def _run_intercepts(self): | 285 | def _run_intercepts(self): |
286 | intercepts_dir = os.path.join(self.d.getVar('WORKDIR', True), | 286 | intercepts_dir = os.path.join(self.d.getVar('WORKDIR'), |
287 | "intercept_scripts") | 287 | "intercept_scripts") |
288 | 288 | ||
289 | bb.note("Running intercept scripts:") | 289 | bb.note("Running intercept scripts:") |
290 | os.environ['D'] = self.image_rootfs | 290 | os.environ['D'] = self.image_rootfs |
291 | os.environ['STAGING_DIR_NATIVE'] = self.d.getVar('STAGING_DIR_NATIVE', True) | 291 | os.environ['STAGING_DIR_NATIVE'] = self.d.getVar('STAGING_DIR_NATIVE') |
292 | for script in os.listdir(intercepts_dir): | 292 | for script in os.listdir(intercepts_dir): |
293 | script_full = os.path.join(intercepts_dir, script) | 293 | script_full = os.path.join(intercepts_dir, script) |
294 | 294 | ||
@@ -320,7 +320,7 @@ class Rootfs(object, metaclass=ABCMeta): | |||
320 | self._handle_intercept_failure(registered_pkgs) | 320 | self._handle_intercept_failure(registered_pkgs) |
321 | 321 | ||
322 | def _run_ldconfig(self): | 322 | def _run_ldconfig(self): |
323 | if self.d.getVar('LDCONFIGDEPEND', True): | 323 | if self.d.getVar('LDCONFIGDEPEND'): |
324 | bb.note("Executing: ldconfig -r" + self.image_rootfs + "-c new -v") | 324 | bb.note("Executing: ldconfig -r" + self.image_rootfs + "-c new -v") |
325 | self._exec_shell_cmd(['ldconfig', '-r', self.image_rootfs, '-c', | 325 | self._exec_shell_cmd(['ldconfig', '-r', self.image_rootfs, '-c', |
326 | 'new', '-v']) | 326 | 'new', '-v']) |
@@ -340,7 +340,7 @@ class Rootfs(object, metaclass=ABCMeta): | |||
340 | bb.note("No Kernel Modules found, not running depmod") | 340 | bb.note("No Kernel Modules found, not running depmod") |
341 | return | 341 | return |
342 | 342 | ||
343 | kernel_abi_ver_file = oe.path.join(self.d.getVar('PKGDATA_DIR', True), "kernel-depmod", | 343 | kernel_abi_ver_file = oe.path.join(self.d.getVar('PKGDATA_DIR'), "kernel-depmod", |
344 | 'kernel-abiversion') | 344 | 'kernel-abiversion') |
345 | if not os.path.exists(kernel_abi_ver_file): | 345 | if not os.path.exists(kernel_abi_ver_file): |
346 | bb.fatal("No kernel-abiversion file found (%s), cannot run depmod, aborting" % kernel_abi_ver_file) | 346 | bb.fatal("No kernel-abiversion file found (%s), cannot run depmod, aborting" % kernel_abi_ver_file) |
@@ -362,15 +362,15 @@ class Rootfs(object, metaclass=ABCMeta): | |||
362 | """ | 362 | """ |
363 | def _create_devfs(self): | 363 | def _create_devfs(self): |
364 | devtable_list = [] | 364 | devtable_list = [] |
365 | devtable = self.d.getVar('IMAGE_DEVICE_TABLE', True) | 365 | devtable = self.d.getVar('IMAGE_DEVICE_TABLE') |
366 | if devtable is not None: | 366 | if devtable is not None: |
367 | devtable_list.append(devtable) | 367 | devtable_list.append(devtable) |
368 | else: | 368 | else: |
369 | devtables = self.d.getVar('IMAGE_DEVICE_TABLES', True) | 369 | devtables = self.d.getVar('IMAGE_DEVICE_TABLES') |
370 | if devtables is None: | 370 | if devtables is None: |
371 | devtables = 'files/device_table-minimal.txt' | 371 | devtables = 'files/device_table-minimal.txt' |
372 | for devtable in devtables.split(): | 372 | for devtable in devtables.split(): |
373 | devtable_list.append("%s" % bb.utils.which(self.d.getVar('BBPATH', True), devtable)) | 373 | devtable_list.append("%s" % bb.utils.which(self.d.getVar('BBPATH'), devtable)) |
374 | 374 | ||
375 | for devtable in devtable_list: | 375 | for devtable in devtable_list: |
376 | self._exec_shell_cmd(["makedevs", "-r", | 376 | self._exec_shell_cmd(["makedevs", "-r", |
@@ -386,16 +386,16 @@ class RpmRootfs(Rootfs): | |||
386 | self.manifest = RpmManifest(d, manifest_dir) | 386 | self.manifest = RpmManifest(d, manifest_dir) |
387 | 387 | ||
388 | self.pm = RpmPM(d, | 388 | self.pm = RpmPM(d, |
389 | d.getVar('IMAGE_ROOTFS', True), | 389 | d.getVar('IMAGE_ROOTFS'), |
390 | self.d.getVar('TARGET_VENDOR', True) | 390 | self.d.getVar('TARGET_VENDOR') |
391 | ) | 391 | ) |
392 | 392 | ||
393 | self.inc_rpm_image_gen = self.d.getVar('INC_RPM_IMAGE_GEN', True) | 393 | self.inc_rpm_image_gen = self.d.getVar('INC_RPM_IMAGE_GEN') |
394 | if self.inc_rpm_image_gen != "1": | 394 | if self.inc_rpm_image_gen != "1": |
395 | bb.utils.remove(self.image_rootfs, True) | 395 | bb.utils.remove(self.image_rootfs, True) |
396 | else: | 396 | else: |
397 | self.pm.recovery_packaging_data() | 397 | self.pm.recovery_packaging_data() |
398 | bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS', True), True) | 398 | bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True) |
399 | 399 | ||
400 | self.pm.create_configs() | 400 | self.pm.create_configs() |
401 | 401 | ||
@@ -429,8 +429,8 @@ class RpmRootfs(Rootfs): | |||
429 | 429 | ||
430 | def _create(self): | 430 | def _create(self): |
431 | pkgs_to_install = self.manifest.parse_initial_manifest() | 431 | pkgs_to_install = self.manifest.parse_initial_manifest() |
432 | rpm_pre_process_cmds = self.d.getVar('RPM_PREPROCESS_COMMANDS', True) | 432 | rpm_pre_process_cmds = self.d.getVar('RPM_PREPROCESS_COMMANDS') |
433 | rpm_post_process_cmds = self.d.getVar('RPM_POSTPROCESS_COMMANDS', True) | 433 | rpm_post_process_cmds = self.d.getVar('RPM_POSTPROCESS_COMMANDS') |
434 | 434 | ||
435 | # update PM index files | 435 | # update PM index files |
436 | self.pm.write_index() | 436 | self.pm.write_index() |
@@ -601,7 +601,7 @@ class DpkgOpkgRootfs(Rootfs): | |||
601 | pkg_list = [] | 601 | pkg_list = [] |
602 | 602 | ||
603 | pkgs = None | 603 | pkgs = None |
604 | if not self.d.getVar('PACKAGE_INSTALL', True).strip(): | 604 | if not self.d.getVar('PACKAGE_INSTALL').strip(): |
605 | bb.note("Building empty image") | 605 | bb.note("Building empty image") |
606 | else: | 606 | else: |
607 | pkgs = self._get_pkgs_postinsts(status_file) | 607 | pkgs = self._get_pkgs_postinsts(status_file) |
@@ -637,17 +637,17 @@ class DpkgRootfs(DpkgOpkgRootfs): | |||
637 | ] | 637 | ] |
638 | 638 | ||
639 | bb.utils.remove(self.image_rootfs, True) | 639 | bb.utils.remove(self.image_rootfs, True) |
640 | bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS', True), True) | 640 | bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True) |
641 | self.manifest = DpkgManifest(d, manifest_dir) | 641 | self.manifest = DpkgManifest(d, manifest_dir) |
642 | self.pm = DpkgPM(d, d.getVar('IMAGE_ROOTFS', True), | 642 | self.pm = DpkgPM(d, d.getVar('IMAGE_ROOTFS'), |
643 | d.getVar('PACKAGE_ARCHS', True), | 643 | d.getVar('PACKAGE_ARCHS'), |
644 | d.getVar('DPKG_ARCH', True)) | 644 | d.getVar('DPKG_ARCH')) |
645 | 645 | ||
646 | 646 | ||
647 | def _create(self): | 647 | def _create(self): |
648 | pkgs_to_install = self.manifest.parse_initial_manifest() | 648 | pkgs_to_install = self.manifest.parse_initial_manifest() |
649 | deb_pre_process_cmds = self.d.getVar('DEB_PREPROCESS_COMMANDS', True) | 649 | deb_pre_process_cmds = self.d.getVar('DEB_PREPROCESS_COMMANDS') |
650 | deb_post_process_cmds = self.d.getVar('DEB_POSTPROCESS_COMMANDS', True) | 650 | deb_post_process_cmds = self.d.getVar('DEB_POSTPROCESS_COMMANDS') |
651 | 651 | ||
652 | alt_dir = self.d.expand("${IMAGE_ROOTFS}/var/lib/dpkg/alternatives") | 652 | alt_dir = self.d.expand("${IMAGE_ROOTFS}/var/lib/dpkg/alternatives") |
653 | bb.utils.mkdirhier(alt_dir) | 653 | bb.utils.mkdirhier(alt_dir) |
@@ -725,10 +725,10 @@ class OpkgRootfs(DpkgOpkgRootfs): | |||
725 | self.log_check_regex = '(exit 1|Collected errors)' | 725 | self.log_check_regex = '(exit 1|Collected errors)' |
726 | 726 | ||
727 | self.manifest = OpkgManifest(d, manifest_dir) | 727 | self.manifest = OpkgManifest(d, manifest_dir) |
728 | self.opkg_conf = self.d.getVar("IPKGCONF_TARGET", True) | 728 | self.opkg_conf = self.d.getVar("IPKGCONF_TARGET") |
729 | self.pkg_archs = self.d.getVar("ALL_MULTILIB_PACKAGE_ARCHS", True) | 729 | self.pkg_archs = self.d.getVar("ALL_MULTILIB_PACKAGE_ARCHS") |
730 | 730 | ||
731 | self.inc_opkg_image_gen = self.d.getVar('INC_IPK_IMAGE_GEN', True) or "" | 731 | self.inc_opkg_image_gen = self.d.getVar('INC_IPK_IMAGE_GEN') or "" |
732 | if self._remove_old_rootfs(): | 732 | if self._remove_old_rootfs(): |
733 | bb.utils.remove(self.image_rootfs, True) | 733 | bb.utils.remove(self.image_rootfs, True) |
734 | self.pm = OpkgPM(d, | 734 | self.pm = OpkgPM(d, |
@@ -742,7 +742,7 @@ class OpkgRootfs(DpkgOpkgRootfs): | |||
742 | self.pkg_archs) | 742 | self.pkg_archs) |
743 | self.pm.recover_packaging_data() | 743 | self.pm.recover_packaging_data() |
744 | 744 | ||
745 | bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS', True), True) | 745 | bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True) |
746 | 746 | ||
747 | def _prelink_file(self, root_dir, filename): | 747 | def _prelink_file(self, root_dir, filename): |
748 | bb.note('prelink %s in %s' % (filename, root_dir)) | 748 | bb.note('prelink %s in %s' % (filename, root_dir)) |
@@ -797,7 +797,7 @@ class OpkgRootfs(DpkgOpkgRootfs): | |||
797 | """ | 797 | """ |
798 | def _multilib_sanity_test(self, dirs): | 798 | def _multilib_sanity_test(self, dirs): |
799 | 799 | ||
800 | allow_replace = self.d.getVar("MULTILIBRE_ALLOW_REP", True) | 800 | allow_replace = self.d.getVar("MULTILIBRE_ALLOW_REP") |
801 | if allow_replace is None: | 801 | if allow_replace is None: |
802 | allow_replace = "" | 802 | allow_replace = "" |
803 | 803 | ||
@@ -829,12 +829,12 @@ class OpkgRootfs(DpkgOpkgRootfs): | |||
829 | files[key] = item | 829 | files[key] = item |
830 | 830 | ||
831 | def _multilib_test_install(self, pkgs): | 831 | def _multilib_test_install(self, pkgs): |
832 | ml_temp = self.d.getVar("MULTILIB_TEMP_ROOTFS", True) | 832 | ml_temp = self.d.getVar("MULTILIB_TEMP_ROOTFS") |
833 | bb.utils.mkdirhier(ml_temp) | 833 | bb.utils.mkdirhier(ml_temp) |
834 | 834 | ||
835 | dirs = [self.image_rootfs] | 835 | dirs = [self.image_rootfs] |
836 | 836 | ||
837 | for variant in self.d.getVar("MULTILIB_VARIANTS", True).split(): | 837 | for variant in self.d.getVar("MULTILIB_VARIANTS").split(): |
838 | ml_target_rootfs = os.path.join(ml_temp, variant) | 838 | ml_target_rootfs = os.path.join(ml_temp, variant) |
839 | 839 | ||
840 | bb.utils.remove(ml_target_rootfs, True) | 840 | bb.utils.remove(ml_target_rootfs, True) |
@@ -894,9 +894,9 @@ class OpkgRootfs(DpkgOpkgRootfs): | |||
894 | old_vars_list = open(vars_list_file, 'r+').read() | 894 | old_vars_list = open(vars_list_file, 'r+').read() |
895 | 895 | ||
896 | new_vars_list = '%s:%s:%s\n' % \ | 896 | new_vars_list = '%s:%s:%s\n' % \ |
897 | ((self.d.getVar('BAD_RECOMMENDATIONS', True) or '').strip(), | 897 | ((self.d.getVar('BAD_RECOMMENDATIONS') or '').strip(), |
898 | (self.d.getVar('NO_RECOMMENDATIONS', True) or '').strip(), | 898 | (self.d.getVar('NO_RECOMMENDATIONS') or '').strip(), |
899 | (self.d.getVar('PACKAGE_EXCLUDE', True) or '').strip()) | 899 | (self.d.getVar('PACKAGE_EXCLUDE') or '').strip()) |
900 | open(vars_list_file, 'w+').write(new_vars_list) | 900 | open(vars_list_file, 'w+').write(new_vars_list) |
901 | 901 | ||
902 | if old_vars_list != new_vars_list: | 902 | if old_vars_list != new_vars_list: |
@@ -906,11 +906,11 @@ class OpkgRootfs(DpkgOpkgRootfs): | |||
906 | 906 | ||
907 | def _create(self): | 907 | def _create(self): |
908 | pkgs_to_install = self.manifest.parse_initial_manifest() | 908 | pkgs_to_install = self.manifest.parse_initial_manifest() |
909 | opkg_pre_process_cmds = self.d.getVar('OPKG_PREPROCESS_COMMANDS', True) | 909 | opkg_pre_process_cmds = self.d.getVar('OPKG_PREPROCESS_COMMANDS') |
910 | opkg_post_process_cmds = self.d.getVar('OPKG_POSTPROCESS_COMMANDS', True) | 910 | opkg_post_process_cmds = self.d.getVar('OPKG_POSTPROCESS_COMMANDS') |
911 | 911 | ||
912 | # update PM index files, unless users provide their own feeds | 912 | # update PM index files, unless users provide their own feeds |
913 | if (self.d.getVar('BUILD_IMAGES_FROM_FEEDS', True) or "") != "1": | 913 | if (self.d.getVar('BUILD_IMAGES_FROM_FEEDS') or "") != "1": |
914 | self.pm.write_index() | 914 | self.pm.write_index() |
915 | 915 | ||
916 | execute_pre_post_process(self.d, opkg_pre_process_cmds) | 916 | execute_pre_post_process(self.d, opkg_pre_process_cmds) |
@@ -968,7 +968,7 @@ class OpkgRootfs(DpkgOpkgRootfs): | |||
968 | 968 | ||
969 | def _get_delayed_postinsts(self): | 969 | def _get_delayed_postinsts(self): |
970 | status_file = os.path.join(self.image_rootfs, | 970 | status_file = os.path.join(self.image_rootfs, |
971 | self.d.getVar('OPKGLIBDIR', True).strip('/'), | 971 | self.d.getVar('OPKGLIBDIR').strip('/'), |
972 | "opkg", "status") | 972 | "opkg", "status") |
973 | return self._get_delayed_postinsts_common(status_file) | 973 | return self._get_delayed_postinsts_common(status_file) |
974 | 974 | ||
@@ -993,14 +993,14 @@ def get_class_for_type(imgtype): | |||
993 | "deb": DpkgRootfs}[imgtype] | 993 | "deb": DpkgRootfs}[imgtype] |
994 | 994 | ||
995 | def variable_depends(d, manifest_dir=None): | 995 | def variable_depends(d, manifest_dir=None): |
996 | img_type = d.getVar('IMAGE_PKGTYPE', True) | 996 | img_type = d.getVar('IMAGE_PKGTYPE') |
997 | cls = get_class_for_type(img_type) | 997 | cls = get_class_for_type(img_type) |
998 | return cls._depends_list() | 998 | return cls._depends_list() |
999 | 999 | ||
1000 | def create_rootfs(d, manifest_dir=None, progress_reporter=None, logcatcher=None): | 1000 | def create_rootfs(d, manifest_dir=None, progress_reporter=None, logcatcher=None): |
1001 | env_bkp = os.environ.copy() | 1001 | env_bkp = os.environ.copy() |
1002 | 1002 | ||
1003 | img_type = d.getVar('IMAGE_PKGTYPE', True) | 1003 | img_type = d.getVar('IMAGE_PKGTYPE') |
1004 | if img_type == "rpm": | 1004 | if img_type == "rpm": |
1005 | RpmRootfs(d, manifest_dir, progress_reporter, logcatcher).create() | 1005 | RpmRootfs(d, manifest_dir, progress_reporter, logcatcher).create() |
1006 | elif img_type == "ipk": | 1006 | elif img_type == "ipk": |
@@ -1014,13 +1014,13 @@ def create_rootfs(d, manifest_dir=None, progress_reporter=None, logcatcher=None) | |||
1014 | 1014 | ||
1015 | def image_list_installed_packages(d, rootfs_dir=None): | 1015 | def image_list_installed_packages(d, rootfs_dir=None): |
1016 | if not rootfs_dir: | 1016 | if not rootfs_dir: |
1017 | rootfs_dir = d.getVar('IMAGE_ROOTFS', True) | 1017 | rootfs_dir = d.getVar('IMAGE_ROOTFS') |
1018 | 1018 | ||
1019 | img_type = d.getVar('IMAGE_PKGTYPE', True) | 1019 | img_type = d.getVar('IMAGE_PKGTYPE') |
1020 | if img_type == "rpm": | 1020 | if img_type == "rpm": |
1021 | return RpmPkgsList(d, rootfs_dir).list_pkgs() | 1021 | return RpmPkgsList(d, rootfs_dir).list_pkgs() |
1022 | elif img_type == "ipk": | 1022 | elif img_type == "ipk": |
1023 | return OpkgPkgsList(d, rootfs_dir, d.getVar("IPKGCONF_TARGET", True)).list_pkgs() | 1023 | return OpkgPkgsList(d, rootfs_dir, d.getVar("IPKGCONF_TARGET")).list_pkgs() |
1024 | elif img_type == "deb": | 1024 | elif img_type == "deb": |
1025 | return DpkgPkgsList(d, rootfs_dir).list_pkgs() | 1025 | return DpkgPkgsList(d, rootfs_dir).list_pkgs() |
1026 | 1026 | ||
diff --git a/meta/lib/oe/sdk.py b/meta/lib/oe/sdk.py index c74525f929..fef02d0777 100644 --- a/meta/lib/oe/sdk.py +++ b/meta/lib/oe/sdk.py | |||
@@ -11,16 +11,16 @@ import traceback | |||
11 | class Sdk(object, metaclass=ABCMeta): | 11 | class Sdk(object, metaclass=ABCMeta): |
12 | def __init__(self, d, manifest_dir): | 12 | def __init__(self, d, manifest_dir): |
13 | self.d = d | 13 | self.d = d |
14 | self.sdk_output = self.d.getVar('SDK_OUTPUT', True) | 14 | self.sdk_output = self.d.getVar('SDK_OUTPUT') |
15 | self.sdk_native_path = self.d.getVar('SDKPATHNATIVE', True).strip('/') | 15 | self.sdk_native_path = self.d.getVar('SDKPATHNATIVE').strip('/') |
16 | self.target_path = self.d.getVar('SDKTARGETSYSROOT', True).strip('/') | 16 | self.target_path = self.d.getVar('SDKTARGETSYSROOT').strip('/') |
17 | self.sysconfdir = self.d.getVar('sysconfdir', True).strip('/') | 17 | self.sysconfdir = self.d.getVar('sysconfdir').strip('/') |
18 | 18 | ||
19 | self.sdk_target_sysroot = os.path.join(self.sdk_output, self.target_path) | 19 | self.sdk_target_sysroot = os.path.join(self.sdk_output, self.target_path) |
20 | self.sdk_host_sysroot = self.sdk_output | 20 | self.sdk_host_sysroot = self.sdk_output |
21 | 21 | ||
22 | if manifest_dir is None: | 22 | if manifest_dir is None: |
23 | self.manifest_dir = self.d.getVar("SDK_DIR", True) | 23 | self.manifest_dir = self.d.getVar("SDK_DIR") |
24 | else: | 24 | else: |
25 | self.manifest_dir = manifest_dir | 25 | self.manifest_dir = manifest_dir |
26 | 26 | ||
@@ -40,12 +40,12 @@ class Sdk(object, metaclass=ABCMeta): | |||
40 | 40 | ||
41 | # Don't ship any libGL in the SDK | 41 | # Don't ship any libGL in the SDK |
42 | self.remove(os.path.join(self.sdk_output, self.sdk_native_path, | 42 | self.remove(os.path.join(self.sdk_output, self.sdk_native_path, |
43 | self.d.getVar('libdir_nativesdk', True).strip('/'), | 43 | self.d.getVar('libdir_nativesdk').strip('/'), |
44 | "libGL*")) | 44 | "libGL*")) |
45 | 45 | ||
46 | # Fix or remove broken .la files | 46 | # Fix or remove broken .la files |
47 | self.remove(os.path.join(self.sdk_output, self.sdk_native_path, | 47 | self.remove(os.path.join(self.sdk_output, self.sdk_native_path, |
48 | self.d.getVar('libdir_nativesdk', True).strip('/'), | 48 | self.d.getVar('libdir_nativesdk').strip('/'), |
49 | "*.la")) | 49 | "*.la")) |
50 | 50 | ||
51 | # Link the ld.so.cache file into the hosts filesystem | 51 | # Link the ld.so.cache file into the hosts filesystem |
@@ -54,7 +54,7 @@ class Sdk(object, metaclass=ABCMeta): | |||
54 | self.mkdirhier(os.path.dirname(link_name)) | 54 | self.mkdirhier(os.path.dirname(link_name)) |
55 | os.symlink("/etc/ld.so.cache", link_name) | 55 | os.symlink("/etc/ld.so.cache", link_name) |
56 | 56 | ||
57 | execute_pre_post_process(self.d, self.d.getVar('SDK_POSTPROCESS_COMMAND', True)) | 57 | execute_pre_post_process(self.d, self.d.getVar('SDK_POSTPROCESS_COMMAND')) |
58 | 58 | ||
59 | def movefile(self, sourcefile, destdir): | 59 | def movefile(self, sourcefile, destdir): |
60 | try: | 60 | try: |
@@ -102,7 +102,7 @@ class RpmSdk(Sdk): | |||
102 | 102 | ||
103 | self.target_pm = RpmPM(d, | 103 | self.target_pm = RpmPM(d, |
104 | self.sdk_target_sysroot, | 104 | self.sdk_target_sysroot, |
105 | self.d.getVar('TARGET_VENDOR', True), | 105 | self.d.getVar('TARGET_VENDOR'), |
106 | 'target', | 106 | 'target', |
107 | target_providename | 107 | target_providename |
108 | ) | 108 | ) |
@@ -118,7 +118,7 @@ class RpmSdk(Sdk): | |||
118 | 118 | ||
119 | self.host_pm = RpmPM(d, | 119 | self.host_pm = RpmPM(d, |
120 | self.sdk_host_sysroot, | 120 | self.sdk_host_sysroot, |
121 | self.d.getVar('SDK_VENDOR', True), | 121 | self.d.getVar('SDK_VENDOR'), |
122 | 'host', | 122 | 'host', |
123 | sdk_providename, | 123 | sdk_providename, |
124 | "SDK_PACKAGE_ARCHS", | 124 | "SDK_PACKAGE_ARCHS", |
@@ -149,9 +149,9 @@ class RpmSdk(Sdk): | |||
149 | bb.note("Installing TARGET packages") | 149 | bb.note("Installing TARGET packages") |
150 | self._populate_sysroot(self.target_pm, self.target_manifest) | 150 | self._populate_sysroot(self.target_pm, self.target_manifest) |
151 | 151 | ||
152 | self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY', True)) | 152 | self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY')) |
153 | 153 | ||
154 | execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND", True)) | 154 | execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND")) |
155 | 155 | ||
156 | if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): | 156 | if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): |
157 | self.target_pm.remove_packaging_data() | 157 | self.target_pm.remove_packaging_data() |
@@ -159,7 +159,7 @@ class RpmSdk(Sdk): | |||
159 | bb.note("Installing NATIVESDK packages") | 159 | bb.note("Installing NATIVESDK packages") |
160 | self._populate_sysroot(self.host_pm, self.host_manifest) | 160 | self._populate_sysroot(self.host_pm, self.host_manifest) |
161 | 161 | ||
162 | execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND", True)) | 162 | execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND")) |
163 | 163 | ||
164 | if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): | 164 | if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): |
165 | self.host_pm.remove_packaging_data() | 165 | self.host_pm.remove_packaging_data() |
@@ -167,7 +167,7 @@ class RpmSdk(Sdk): | |||
167 | # Move host RPM library data | 167 | # Move host RPM library data |
168 | native_rpm_state_dir = os.path.join(self.sdk_output, | 168 | native_rpm_state_dir = os.path.join(self.sdk_output, |
169 | self.sdk_native_path, | 169 | self.sdk_native_path, |
170 | self.d.getVar('localstatedir_nativesdk', True).strip('/'), | 170 | self.d.getVar('localstatedir_nativesdk').strip('/'), |
171 | "lib", | 171 | "lib", |
172 | "rpm" | 172 | "rpm" |
173 | ) | 173 | ) |
@@ -197,8 +197,8 @@ class OpkgSdk(Sdk): | |||
197 | def __init__(self, d, manifest_dir=None): | 197 | def __init__(self, d, manifest_dir=None): |
198 | super(OpkgSdk, self).__init__(d, manifest_dir) | 198 | super(OpkgSdk, self).__init__(d, manifest_dir) |
199 | 199 | ||
200 | self.target_conf = self.d.getVar("IPKGCONF_TARGET", True) | 200 | self.target_conf = self.d.getVar("IPKGCONF_TARGET") |
201 | self.host_conf = self.d.getVar("IPKGCONF_SDK", True) | 201 | self.host_conf = self.d.getVar("IPKGCONF_SDK") |
202 | 202 | ||
203 | self.target_manifest = OpkgManifest(d, self.manifest_dir, | 203 | self.target_manifest = OpkgManifest(d, self.manifest_dir, |
204 | Manifest.MANIFEST_TYPE_SDK_TARGET) | 204 | Manifest.MANIFEST_TYPE_SDK_TARGET) |
@@ -206,15 +206,15 @@ class OpkgSdk(Sdk): | |||
206 | Manifest.MANIFEST_TYPE_SDK_HOST) | 206 | Manifest.MANIFEST_TYPE_SDK_HOST) |
207 | 207 | ||
208 | self.target_pm = OpkgPM(d, self.sdk_target_sysroot, self.target_conf, | 208 | self.target_pm = OpkgPM(d, self.sdk_target_sysroot, self.target_conf, |
209 | self.d.getVar("ALL_MULTILIB_PACKAGE_ARCHS", True)) | 209 | self.d.getVar("ALL_MULTILIB_PACKAGE_ARCHS")) |
210 | 210 | ||
211 | self.host_pm = OpkgPM(d, self.sdk_host_sysroot, self.host_conf, | 211 | self.host_pm = OpkgPM(d, self.sdk_host_sysroot, self.host_conf, |
212 | self.d.getVar("SDK_PACKAGE_ARCHS", True)) | 212 | self.d.getVar("SDK_PACKAGE_ARCHS")) |
213 | 213 | ||
214 | def _populate_sysroot(self, pm, manifest): | 214 | def _populate_sysroot(self, pm, manifest): |
215 | pkgs_to_install = manifest.parse_initial_manifest() | 215 | pkgs_to_install = manifest.parse_initial_manifest() |
216 | 216 | ||
217 | if (self.d.getVar('BUILD_IMAGES_FROM_FEEDS', True) or "") != "1": | 217 | if (self.d.getVar('BUILD_IMAGES_FROM_FEEDS') or "") != "1": |
218 | pm.write_index() | 218 | pm.write_index() |
219 | 219 | ||
220 | pm.update() | 220 | pm.update() |
@@ -228,9 +228,9 @@ class OpkgSdk(Sdk): | |||
228 | bb.note("Installing TARGET packages") | 228 | bb.note("Installing TARGET packages") |
229 | self._populate_sysroot(self.target_pm, self.target_manifest) | 229 | self._populate_sysroot(self.target_pm, self.target_manifest) |
230 | 230 | ||
231 | self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY', True)) | 231 | self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY')) |
232 | 232 | ||
233 | execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND", True)) | 233 | execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND")) |
234 | 234 | ||
235 | if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): | 235 | if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): |
236 | self.target_pm.remove_packaging_data() | 236 | self.target_pm.remove_packaging_data() |
@@ -238,7 +238,7 @@ class OpkgSdk(Sdk): | |||
238 | bb.note("Installing NATIVESDK packages") | 238 | bb.note("Installing NATIVESDK packages") |
239 | self._populate_sysroot(self.host_pm, self.host_manifest) | 239 | self._populate_sysroot(self.host_pm, self.host_manifest) |
240 | 240 | ||
241 | execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND", True)) | 241 | execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND")) |
242 | 242 | ||
243 | if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): | 243 | if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): |
244 | self.host_pm.remove_packaging_data() | 244 | self.host_pm.remove_packaging_data() |
@@ -257,7 +257,7 @@ class OpkgSdk(Sdk): | |||
257 | os.path.basename(self.host_conf)), 0o644) | 257 | os.path.basename(self.host_conf)), 0o644) |
258 | 258 | ||
259 | native_opkg_state_dir = os.path.join(self.sdk_output, self.sdk_native_path, | 259 | native_opkg_state_dir = os.path.join(self.sdk_output, self.sdk_native_path, |
260 | self.d.getVar('localstatedir_nativesdk', True).strip('/'), | 260 | self.d.getVar('localstatedir_nativesdk').strip('/'), |
261 | "lib", "opkg") | 261 | "lib", "opkg") |
262 | self.mkdirhier(native_opkg_state_dir) | 262 | self.mkdirhier(native_opkg_state_dir) |
263 | for f in glob.glob(os.path.join(self.sdk_output, "var", "lib", "opkg", "*")): | 263 | for f in glob.glob(os.path.join(self.sdk_output, "var", "lib", "opkg", "*")): |
@@ -270,8 +270,8 @@ class DpkgSdk(Sdk): | |||
270 | def __init__(self, d, manifest_dir=None): | 270 | def __init__(self, d, manifest_dir=None): |
271 | super(DpkgSdk, self).__init__(d, manifest_dir) | 271 | super(DpkgSdk, self).__init__(d, manifest_dir) |
272 | 272 | ||
273 | self.target_conf_dir = os.path.join(self.d.getVar("APTCONF_TARGET", True), "apt") | 273 | self.target_conf_dir = os.path.join(self.d.getVar("APTCONF_TARGET"), "apt") |
274 | self.host_conf_dir = os.path.join(self.d.getVar("APTCONF_TARGET", True), "apt-sdk") | 274 | self.host_conf_dir = os.path.join(self.d.getVar("APTCONF_TARGET"), "apt-sdk") |
275 | 275 | ||
276 | self.target_manifest = DpkgManifest(d, self.manifest_dir, | 276 | self.target_manifest = DpkgManifest(d, self.manifest_dir, |
277 | Manifest.MANIFEST_TYPE_SDK_TARGET) | 277 | Manifest.MANIFEST_TYPE_SDK_TARGET) |
@@ -279,17 +279,17 @@ class DpkgSdk(Sdk): | |||
279 | Manifest.MANIFEST_TYPE_SDK_HOST) | 279 | Manifest.MANIFEST_TYPE_SDK_HOST) |
280 | 280 | ||
281 | self.target_pm = DpkgPM(d, self.sdk_target_sysroot, | 281 | self.target_pm = DpkgPM(d, self.sdk_target_sysroot, |
282 | self.d.getVar("PACKAGE_ARCHS", True), | 282 | self.d.getVar("PACKAGE_ARCHS"), |
283 | self.d.getVar("DPKG_ARCH", True), | 283 | self.d.getVar("DPKG_ARCH"), |
284 | self.target_conf_dir) | 284 | self.target_conf_dir) |
285 | 285 | ||
286 | self.host_pm = DpkgPM(d, self.sdk_host_sysroot, | 286 | self.host_pm = DpkgPM(d, self.sdk_host_sysroot, |
287 | self.d.getVar("SDK_PACKAGE_ARCHS", True), | 287 | self.d.getVar("SDK_PACKAGE_ARCHS"), |
288 | self.d.getVar("DEB_SDK_ARCH", True), | 288 | self.d.getVar("DEB_SDK_ARCH"), |
289 | self.host_conf_dir) | 289 | self.host_conf_dir) |
290 | 290 | ||
291 | def _copy_apt_dir_to(self, dst_dir): | 291 | def _copy_apt_dir_to(self, dst_dir): |
292 | staging_etcdir_native = self.d.getVar("STAGING_ETCDIR_NATIVE", True) | 292 | staging_etcdir_native = self.d.getVar("STAGING_ETCDIR_NATIVE") |
293 | 293 | ||
294 | self.remove(dst_dir, True) | 294 | self.remove(dst_dir, True) |
295 | 295 | ||
@@ -310,9 +310,9 @@ class DpkgSdk(Sdk): | |||
310 | bb.note("Installing TARGET packages") | 310 | bb.note("Installing TARGET packages") |
311 | self._populate_sysroot(self.target_pm, self.target_manifest) | 311 | self._populate_sysroot(self.target_pm, self.target_manifest) |
312 | 312 | ||
313 | self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY', True)) | 313 | self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY')) |
314 | 314 | ||
315 | execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND", True)) | 315 | execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND")) |
316 | 316 | ||
317 | self._copy_apt_dir_to(os.path.join(self.sdk_target_sysroot, "etc", "apt")) | 317 | self._copy_apt_dir_to(os.path.join(self.sdk_target_sysroot, "etc", "apt")) |
318 | 318 | ||
@@ -322,7 +322,7 @@ class DpkgSdk(Sdk): | |||
322 | bb.note("Installing NATIVESDK packages") | 322 | bb.note("Installing NATIVESDK packages") |
323 | self._populate_sysroot(self.host_pm, self.host_manifest) | 323 | self._populate_sysroot(self.host_pm, self.host_manifest) |
324 | 324 | ||
325 | execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND", True)) | 325 | execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND")) |
326 | 326 | ||
327 | self._copy_apt_dir_to(os.path.join(self.sdk_output, self.sdk_native_path, | 327 | self._copy_apt_dir_to(os.path.join(self.sdk_output, self.sdk_native_path, |
328 | "etc", "apt")) | 328 | "etc", "apt")) |
@@ -341,26 +341,26 @@ class DpkgSdk(Sdk): | |||
341 | 341 | ||
342 | def sdk_list_installed_packages(d, target, rootfs_dir=None): | 342 | def sdk_list_installed_packages(d, target, rootfs_dir=None): |
343 | if rootfs_dir is None: | 343 | if rootfs_dir is None: |
344 | sdk_output = d.getVar('SDK_OUTPUT', True) | 344 | sdk_output = d.getVar('SDK_OUTPUT') |
345 | target_path = d.getVar('SDKTARGETSYSROOT', True).strip('/') | 345 | target_path = d.getVar('SDKTARGETSYSROOT').strip('/') |
346 | 346 | ||
347 | rootfs_dir = [sdk_output, os.path.join(sdk_output, target_path)][target is True] | 347 | rootfs_dir = [sdk_output, os.path.join(sdk_output, target_path)][target is True] |
348 | 348 | ||
349 | img_type = d.getVar('IMAGE_PKGTYPE', True) | 349 | img_type = d.getVar('IMAGE_PKGTYPE') |
350 | if img_type == "rpm": | 350 | if img_type == "rpm": |
351 | arch_var = ["SDK_PACKAGE_ARCHS", None][target is True] | 351 | arch_var = ["SDK_PACKAGE_ARCHS", None][target is True] |
352 | os_var = ["SDK_OS", None][target is True] | 352 | os_var = ["SDK_OS", None][target is True] |
353 | return RpmPkgsList(d, rootfs_dir, arch_var, os_var).list_pkgs() | 353 | return RpmPkgsList(d, rootfs_dir, arch_var, os_var).list_pkgs() |
354 | elif img_type == "ipk": | 354 | elif img_type == "ipk": |
355 | conf_file_var = ["IPKGCONF_SDK", "IPKGCONF_TARGET"][target is True] | 355 | conf_file_var = ["IPKGCONF_SDK", "IPKGCONF_TARGET"][target is True] |
356 | return OpkgPkgsList(d, rootfs_dir, d.getVar(conf_file_var, True)).list_pkgs() | 356 | return OpkgPkgsList(d, rootfs_dir, d.getVar(conf_file_var)).list_pkgs() |
357 | elif img_type == "deb": | 357 | elif img_type == "deb": |
358 | return DpkgPkgsList(d, rootfs_dir).list_pkgs() | 358 | return DpkgPkgsList(d, rootfs_dir).list_pkgs() |
359 | 359 | ||
360 | def populate_sdk(d, manifest_dir=None): | 360 | def populate_sdk(d, manifest_dir=None): |
361 | env_bkp = os.environ.copy() | 361 | env_bkp = os.environ.copy() |
362 | 362 | ||
363 | img_type = d.getVar('IMAGE_PKGTYPE', True) | 363 | img_type = d.getVar('IMAGE_PKGTYPE') |
364 | if img_type == "rpm": | 364 | if img_type == "rpm": |
365 | RpmSdk(d, manifest_dir).populate() | 365 | RpmSdk(d, manifest_dir).populate() |
366 | elif img_type == "ipk": | 366 | elif img_type == "ipk": |
diff --git a/meta/lib/oe/sstatesig.py b/meta/lib/oe/sstatesig.py index 8224e3a12e..e053c37e96 100644 --- a/meta/lib/oe/sstatesig.py +++ b/meta/lib/oe/sstatesig.py | |||
@@ -63,10 +63,10 @@ def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCache): | |||
63 | 63 | ||
64 | def sstate_lockedsigs(d): | 64 | def sstate_lockedsigs(d): |
65 | sigs = {} | 65 | sigs = {} |
66 | types = (d.getVar("SIGGEN_LOCKEDSIGS_TYPES", True) or "").split() | 66 | types = (d.getVar("SIGGEN_LOCKEDSIGS_TYPES") or "").split() |
67 | for t in types: | 67 | for t in types: |
68 | siggen_lockedsigs_var = "SIGGEN_LOCKEDSIGS_%s" % t | 68 | siggen_lockedsigs_var = "SIGGEN_LOCKEDSIGS_%s" % t |
69 | lockedsigs = (d.getVar(siggen_lockedsigs_var, True) or "").split() | 69 | lockedsigs = (d.getVar(siggen_lockedsigs_var) or "").split() |
70 | for ls in lockedsigs: | 70 | for ls in lockedsigs: |
71 | pn, task, h = ls.split(":", 2) | 71 | pn, task, h = ls.split(":", 2) |
72 | if pn not in sigs: | 72 | if pn not in sigs: |
@@ -77,8 +77,8 @@ def sstate_lockedsigs(d): | |||
77 | class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic): | 77 | class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic): |
78 | name = "OEBasic" | 78 | name = "OEBasic" |
79 | def init_rundepcheck(self, data): | 79 | def init_rundepcheck(self, data): |
80 | self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE", True) or "").split() | 80 | self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() |
81 | self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", True) or "").split() | 81 | self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() |
82 | pass | 82 | pass |
83 | def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None): | 83 | def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None): |
84 | return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache) | 84 | return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache) |
@@ -86,15 +86,15 @@ class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic): | |||
86 | class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash): | 86 | class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash): |
87 | name = "OEBasicHash" | 87 | name = "OEBasicHash" |
88 | def init_rundepcheck(self, data): | 88 | def init_rundepcheck(self, data): |
89 | self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE", True) or "").split() | 89 | self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() |
90 | self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", True) or "").split() | 90 | self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() |
91 | self.lockedsigs = sstate_lockedsigs(data) | 91 | self.lockedsigs = sstate_lockedsigs(data) |
92 | self.lockedhashes = {} | 92 | self.lockedhashes = {} |
93 | self.lockedpnmap = {} | 93 | self.lockedpnmap = {} |
94 | self.lockedhashfn = {} | 94 | self.lockedhashfn = {} |
95 | self.machine = data.getVar("MACHINE", True) | 95 | self.machine = data.getVar("MACHINE") |
96 | self.mismatch_msgs = [] | 96 | self.mismatch_msgs = [] |
97 | self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES", True) or | 97 | self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or |
98 | "").split() | 98 | "").split() |
99 | self.unlockedrecipes = { k: "" for k in self.unlockedrecipes } | 99 | self.unlockedrecipes = { k: "" for k in self.unlockedrecipes } |
100 | pass | 100 | pass |
@@ -224,13 +224,13 @@ class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash): | |||
224 | sstate_missing_msgs.append("Locked sig is set for %s:%s (%s) yet not in sstate cache?" | 224 | sstate_missing_msgs.append("Locked sig is set for %s:%s (%s) yet not in sstate cache?" |
225 | % (pn, sq_task[task], sq_hash[task])) | 225 | % (pn, sq_task[task], sq_hash[task])) |
226 | 226 | ||
227 | checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK", True) | 227 | checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK") |
228 | if checklevel == 'warn': | 228 | if checklevel == 'warn': |
229 | warn_msgs += self.mismatch_msgs | 229 | warn_msgs += self.mismatch_msgs |
230 | elif checklevel == 'error': | 230 | elif checklevel == 'error': |
231 | error_msgs += self.mismatch_msgs | 231 | error_msgs += self.mismatch_msgs |
232 | 232 | ||
233 | checklevel = d.getVar("SIGGEN_LOCKEDSIGS_SSTATE_EXISTS_CHECK", True) | 233 | checklevel = d.getVar("SIGGEN_LOCKEDSIGS_SSTATE_EXISTS_CHECK") |
234 | if checklevel == 'warn': | 234 | if checklevel == 'warn': |
235 | warn_msgs += sstate_missing_msgs | 235 | warn_msgs += sstate_missing_msgs |
236 | elif checklevel == 'error': | 236 | elif checklevel == 'error': |
@@ -274,7 +274,7 @@ def find_siginfo(pn, taskname, taskhashlist, d): | |||
274 | localdata.setVar('PV', '*') | 274 | localdata.setVar('PV', '*') |
275 | localdata.setVar('PR', '*') | 275 | localdata.setVar('PR', '*') |
276 | localdata.setVar('EXTENDPE', '') | 276 | localdata.setVar('EXTENDPE', '') |
277 | stamp = localdata.getVar('STAMP', True) | 277 | stamp = localdata.getVar('STAMP') |
278 | if pn.startswith("gcc-source"): | 278 | if pn.startswith("gcc-source"): |
279 | # gcc-source shared workdir is a special case :( | 279 | # gcc-source shared workdir is a special case :( |
280 | stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}") | 280 | stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}") |
@@ -309,18 +309,18 @@ def find_siginfo(pn, taskname, taskhashlist, d): | |||
309 | localdata.setVar('PV', '*') | 309 | localdata.setVar('PV', '*') |
310 | localdata.setVar('PR', '*') | 310 | localdata.setVar('PR', '*') |
311 | localdata.setVar('BB_TASKHASH', hashval) | 311 | localdata.setVar('BB_TASKHASH', hashval) |
312 | swspec = localdata.getVar('SSTATE_SWSPEC', True) | 312 | swspec = localdata.getVar('SSTATE_SWSPEC') |
313 | if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec: | 313 | if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec: |
314 | localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}') | 314 | localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}') |
315 | elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn: | 315 | elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn: |
316 | localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/") | 316 | localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/") |
317 | sstatename = taskname[3:] | 317 | sstatename = taskname[3:] |
318 | filespec = '%s_%s.*.siginfo' % (localdata.getVar('SSTATE_PKG', True), sstatename) | 318 | filespec = '%s_%s.*.siginfo' % (localdata.getVar('SSTATE_PKG'), sstatename) |
319 | 319 | ||
320 | if hashval != '*': | 320 | if hashval != '*': |
321 | sstatedir = "%s/%s" % (d.getVar('SSTATE_DIR', True), hashval[:2]) | 321 | sstatedir = "%s/%s" % (d.getVar('SSTATE_DIR'), hashval[:2]) |
322 | else: | 322 | else: |
323 | sstatedir = d.getVar('SSTATE_DIR', True) | 323 | sstatedir = d.getVar('SSTATE_DIR') |
324 | 324 | ||
325 | for root, dirs, files in os.walk(sstatedir): | 325 | for root, dirs, files in os.walk(sstatedir): |
326 | for fn in files: | 326 | for fn in files: |
diff --git a/meta/lib/oe/terminal.py b/meta/lib/oe/terminal.py index a89fa45691..0426e15834 100644 --- a/meta/lib/oe/terminal.py +++ b/meta/lib/oe/terminal.py | |||
@@ -196,7 +196,7 @@ class Custom(Terminal): | |||
196 | priority = 3 | 196 | priority = 3 |
197 | 197 | ||
198 | def __init__(self, sh_cmd, title=None, env=None, d=None): | 198 | def __init__(self, sh_cmd, title=None, env=None, d=None): |
199 | self.command = d and d.getVar('OE_TERMINAL_CUSTOMCMD', True) | 199 | self.command = d and d.getVar('OE_TERMINAL_CUSTOMCMD') |
200 | if self.command: | 200 | if self.command: |
201 | if not '{command}' in self.command: | 201 | if not '{command}' in self.command: |
202 | self.command += ' {command}' | 202 | self.command += ' {command}' |
diff --git a/meta/lib/oe/utils.py b/meta/lib/oe/utils.py index 2b095f1f0a..bb3f0e5d75 100644 --- a/meta/lib/oe/utils.py +++ b/meta/lib/oe/utils.py | |||
@@ -23,13 +23,13 @@ def ifelse(condition, iftrue = True, iffalse = False): | |||
23 | return iffalse | 23 | return iffalse |
24 | 24 | ||
25 | def conditional(variable, checkvalue, truevalue, falsevalue, d): | 25 | def conditional(variable, checkvalue, truevalue, falsevalue, d): |
26 | if d.getVar(variable, True) == checkvalue: | 26 | if d.getVar(variable) == checkvalue: |
27 | return truevalue | 27 | return truevalue |
28 | else: | 28 | else: |
29 | return falsevalue | 29 | return falsevalue |
30 | 30 | ||
31 | def less_or_equal(variable, checkvalue, truevalue, falsevalue, d): | 31 | def less_or_equal(variable, checkvalue, truevalue, falsevalue, d): |
32 | if float(d.getVar(variable, True)) <= float(checkvalue): | 32 | if float(d.getVar(variable)) <= float(checkvalue): |
33 | return truevalue | 33 | return truevalue |
34 | else: | 34 | else: |
35 | return falsevalue | 35 | return falsevalue |
@@ -42,8 +42,8 @@ def version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d): | |||
42 | return falsevalue | 42 | return falsevalue |
43 | 43 | ||
44 | def both_contain(variable1, variable2, checkvalue, d): | 44 | def both_contain(variable1, variable2, checkvalue, d): |
45 | val1 = d.getVar(variable1, True) | 45 | val1 = d.getVar(variable1) |
46 | val2 = d.getVar(variable2, True) | 46 | val2 = d.getVar(variable2) |
47 | val1 = set(val1.split()) | 47 | val1 = set(val1.split()) |
48 | val2 = set(val2.split()) | 48 | val2 = set(val2.split()) |
49 | if isinstance(checkvalue, str): | 49 | if isinstance(checkvalue, str): |
@@ -66,8 +66,8 @@ def set_intersect(variable1, variable2, d): | |||
66 | s3 = set_intersect(s1, s2) | 66 | s3 = set_intersect(s1, s2) |
67 | => s3 = "b c" | 67 | => s3 = "b c" |
68 | """ | 68 | """ |
69 | val1 = set(d.getVar(variable1, True).split()) | 69 | val1 = set(d.getVar(variable1).split()) |
70 | val2 = set(d.getVar(variable2, True).split()) | 70 | val2 = set(d.getVar(variable2).split()) |
71 | return " ".join(val1 & val2) | 71 | return " ".join(val1 & val2) |
72 | 72 | ||
73 | def prune_suffix(var, suffixes, d): | 73 | def prune_suffix(var, suffixes, d): |
@@ -77,7 +77,7 @@ def prune_suffix(var, suffixes, d): | |||
77 | if var.endswith(suffix): | 77 | if var.endswith(suffix): |
78 | var = var.replace(suffix, "") | 78 | var = var.replace(suffix, "") |
79 | 79 | ||
80 | prefix = d.getVar("MLPREFIX", True) | 80 | prefix = d.getVar("MLPREFIX") |
81 | if prefix and var.startswith(prefix): | 81 | if prefix and var.startswith(prefix): |
82 | var = var.replace(prefix, "") | 82 | var = var.replace(prefix, "") |
83 | 83 | ||
@@ -115,9 +115,9 @@ def features_backfill(var,d): | |||
115 | # disturbing distributions that have already set DISTRO_FEATURES. | 115 | # disturbing distributions that have already set DISTRO_FEATURES. |
116 | # Distributions wanting to elide a value in DISTRO_FEATURES_BACKFILL should | 116 | # Distributions wanting to elide a value in DISTRO_FEATURES_BACKFILL should |
117 | # add the feature to DISTRO_FEATURES_BACKFILL_CONSIDERED | 117 | # add the feature to DISTRO_FEATURES_BACKFILL_CONSIDERED |
118 | features = (d.getVar(var, True) or "").split() | 118 | features = (d.getVar(var) or "").split() |
119 | backfill = (d.getVar(var+"_BACKFILL", True) or "").split() | 119 | backfill = (d.getVar(var+"_BACKFILL") or "").split() |
120 | considered = (d.getVar(var+"_BACKFILL_CONSIDERED", True) or "").split() | 120 | considered = (d.getVar(var+"_BACKFILL_CONSIDERED") or "").split() |
121 | 121 | ||
122 | addfeatures = [] | 122 | addfeatures = [] |
123 | for feature in backfill: | 123 | for feature in backfill: |
@@ -133,12 +133,12 @@ def packages_filter_out_system(d): | |||
133 | Return a list of packages from PACKAGES with the "system" packages such as | 133 | Return a list of packages from PACKAGES with the "system" packages such as |
134 | PN-dbg PN-doc PN-locale-eb-gb removed. | 134 | PN-dbg PN-doc PN-locale-eb-gb removed. |
135 | """ | 135 | """ |
136 | pn = d.getVar('PN', True) | 136 | pn = d.getVar('PN') |
137 | blacklist = [pn + suffix for suffix in ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev')] | 137 | blacklist = [pn + suffix for suffix in ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev')] |
138 | localepkg = pn + "-locale-" | 138 | localepkg = pn + "-locale-" |
139 | pkgs = [] | 139 | pkgs = [] |
140 | 140 | ||
141 | for pkg in d.getVar('PACKAGES', True).split(): | 141 | for pkg in d.getVar('PACKAGES').split(): |
142 | if pkg not in blacklist and localepkg not in pkg: | 142 | if pkg not in blacklist and localepkg not in pkg: |
143 | pkgs.append(pkg) | 143 | pkgs.append(pkg) |
144 | return pkgs | 144 | return pkgs |
@@ -231,7 +231,7 @@ def format_pkg_list(pkg_dict, ret_format=None): | |||
231 | return '\n'.join(output) | 231 | return '\n'.join(output) |
232 | 232 | ||
233 | def host_gcc_version(d): | 233 | def host_gcc_version(d): |
234 | compiler = d.getVar("BUILD_CC", True) | 234 | compiler = d.getVar("BUILD_CC") |
235 | retval, output = getstatusoutput("%s --version" % compiler) | 235 | retval, output = getstatusoutput("%s --version" % compiler) |
236 | if retval: | 236 | if retval: |
237 | bb.fatal("Error running %s --version: %s" % (compiler, output)) | 237 | bb.fatal("Error running %s --version: %s" % (compiler, output)) |
@@ -316,8 +316,8 @@ def write_ld_so_conf(d): | |||
316 | bb.utils.remove(ldsoconf) | 316 | bb.utils.remove(ldsoconf) |
317 | bb.utils.mkdirhier(os.path.dirname(ldsoconf)) | 317 | bb.utils.mkdirhier(os.path.dirname(ldsoconf)) |
318 | with open(ldsoconf, "w") as f: | 318 | with open(ldsoconf, "w") as f: |
319 | f.write(d.getVar("base_libdir", True) + '\n') | 319 | f.write(d.getVar("base_libdir") + '\n') |
320 | f.write(d.getVar("libdir", True) + '\n') | 320 | f.write(d.getVar("libdir") + '\n') |
321 | 321 | ||
322 | class ImageQAFailed(bb.build.FuncFailed): | 322 | class ImageQAFailed(bb.build.FuncFailed): |
323 | def __init__(self, description, name=None, logfile=None): | 323 | def __init__(self, description, name=None, logfile=None): |