diff options
Diffstat (limited to 'meta/classes/base.bbclass')
-rw-r--r-- | meta/classes/base.bbclass | 654 |
1 files changed, 654 insertions, 0 deletions
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass new file mode 100644 index 0000000000..dfa580c583 --- /dev/null +++ b/meta/classes/base.bbclass | |||
@@ -0,0 +1,654 @@ | |||
1 | BB_DEFAULT_TASK ?= "build" | ||
2 | CLASSOVERRIDE ?= "class-target" | ||
3 | |||
4 | inherit patch | ||
5 | inherit staging | ||
6 | |||
7 | inherit mirrors | ||
8 | inherit utils | ||
9 | inherit utility-tasks | ||
10 | inherit metadata_scm | ||
11 | inherit logging | ||
12 | |||
13 | OE_IMPORTS += "os sys time oe.path oe.utils oe.data oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath" | ||
14 | OE_IMPORTS[type] = "list" | ||
15 | |||
16 | def oe_import(d): | ||
17 | import sys | ||
18 | |||
19 | bbpath = d.getVar("BBPATH", True).split(":") | ||
20 | sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath] | ||
21 | |||
22 | def inject(name, value): | ||
23 | """Make a python object accessible from the metadata""" | ||
24 | if hasattr(bb.utils, "_context"): | ||
25 | bb.utils._context[name] = value | ||
26 | else: | ||
27 | __builtins__[name] = value | ||
28 | |||
29 | import oe.data | ||
30 | for toimport in oe.data.typed_value("OE_IMPORTS", d): | ||
31 | imported = __import__(toimport) | ||
32 | inject(toimport.split(".", 1)[0], imported) | ||
33 | |||
34 | python oe_import_eh () { | ||
35 | oe_import(e.data) | ||
36 | e.data.setVar("NATIVELSBSTRING", lsb_distro_identifier(e.data)) | ||
37 | } | ||
38 | |||
39 | addhandler oe_import_eh | ||
40 | oe_import_eh[eventmask] = "bb.event.ConfigParsed" | ||
41 | |||
42 | def lsb_distro_identifier(d): | ||
43 | adjust = d.getVar('LSB_DISTRO_ADJUST', True) | ||
44 | adjust_func = None | ||
45 | if adjust: | ||
46 | try: | ||
47 | adjust_func = globals()[adjust] | ||
48 | except KeyError: | ||
49 | pass | ||
50 | return oe.lsb.distro_identifier(adjust_func) | ||
51 | |||
52 | die() { | ||
53 | bbfatal "$*" | ||
54 | } | ||
55 | |||
56 | oe_runmake() { | ||
57 | bbnote ${MAKE} ${EXTRA_OEMAKE} "$@" | ||
58 | ${MAKE} ${EXTRA_OEMAKE} "$@" || die "oe_runmake failed" | ||
59 | } | ||
60 | |||
61 | |||
62 | def base_dep_prepend(d): | ||
63 | # | ||
64 | # Ideally this will check a flag so we will operate properly in | ||
65 | # the case where host == build == target, for now we don't work in | ||
66 | # that case though. | ||
67 | # | ||
68 | |||
69 | deps = "" | ||
70 | # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not | ||
71 | # we need that built is the responsibility of the patch function / class, not | ||
72 | # the application. | ||
73 | if not d.getVar('INHIBIT_DEFAULT_DEPS'): | ||
74 | if (d.getVar('HOST_SYS', True) != d.getVar('BUILD_SYS', True)): | ||
75 | deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc " | ||
76 | return deps | ||
77 | |||
78 | BASEDEPENDS = "${@base_dep_prepend(d)}" | ||
79 | |||
80 | DEPENDS_prepend="${BASEDEPENDS} " | ||
81 | |||
82 | FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}" | ||
83 | # THISDIR only works properly with imediate expansion as it has to run | ||
84 | # in the context of the location its used (:=) | ||
85 | THISDIR = "${@os.path.dirname(d.getVar('FILE', True))}" | ||
86 | |||
87 | def extra_path_elements(d): | ||
88 | path = "" | ||
89 | elements = (d.getVar('EXTRANATIVEPATH', True) or "").split() | ||
90 | for e in elements: | ||
91 | path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":" | ||
92 | return path | ||
93 | |||
94 | PATH_prepend = "${@extra_path_elements(d)}" | ||
95 | |||
96 | addtask fetch | ||
97 | do_fetch[dirs] = "${DL_DIR}" | ||
98 | do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}" | ||
99 | python base_do_fetch() { | ||
100 | |||
101 | src_uri = (d.getVar('SRC_URI', True) or "").split() | ||
102 | if len(src_uri) == 0: | ||
103 | return | ||
104 | |||
105 | localdata = bb.data.createCopy(d) | ||
106 | bb.data.update_data(localdata) | ||
107 | |||
108 | try: | ||
109 | fetcher = bb.fetch2.Fetch(src_uri, localdata) | ||
110 | fetcher.download() | ||
111 | except bb.fetch2.BBFetchException as e: | ||
112 | raise bb.build.FuncFailed(e) | ||
113 | } | ||
114 | |||
115 | addtask unpack after do_fetch | ||
116 | do_unpack[dirs] = "${WORKDIR}" | ||
117 | do_unpack[cleandirs] = "${S}/patches" | ||
118 | python base_do_unpack() { | ||
119 | src_uri = (d.getVar('SRC_URI', True) or "").split() | ||
120 | if len(src_uri) == 0: | ||
121 | return | ||
122 | |||
123 | localdata = bb.data.createCopy(d) | ||
124 | bb.data.update_data(localdata) | ||
125 | |||
126 | rootdir = localdata.getVar('WORKDIR', True) | ||
127 | |||
128 | try: | ||
129 | fetcher = bb.fetch2.Fetch(src_uri, localdata) | ||
130 | fetcher.unpack(rootdir) | ||
131 | except bb.fetch2.BBFetchException as e: | ||
132 | raise bb.build.FuncFailed(e) | ||
133 | } | ||
134 | |||
135 | def pkgarch_mapping(d): | ||
136 | # Compatibility mappings of TUNE_PKGARCH (opt in) | ||
137 | if d.getVar("PKGARCHCOMPAT_ARMV7A", True): | ||
138 | if d.getVar("TUNE_PKGARCH", True) == "armv7a-vfp-neon": | ||
139 | d.setVar("TUNE_PKGARCH", "armv7a") | ||
140 | |||
141 | def preferred_ml_updates(d): | ||
142 | # If any PREFERRED_PROVIDER or PREFERRED_VERSION are set, | ||
143 | # we need to mirror these variables in the multilib case; | ||
144 | multilibs = d.getVar('MULTILIBS', True) or "" | ||
145 | if not multilibs: | ||
146 | return | ||
147 | |||
148 | prefixes = [] | ||
149 | for ext in multilibs.split(): | ||
150 | eext = ext.split(':') | ||
151 | if len(eext) > 1 and eext[0] == 'multilib': | ||
152 | prefixes.append(eext[1]) | ||
153 | |||
154 | versions = [] | ||
155 | providers = [] | ||
156 | for v in d.keys(): | ||
157 | if v.startswith("PREFERRED_VERSION_"): | ||
158 | versions.append(v) | ||
159 | if v.startswith("PREFERRED_PROVIDER_"): | ||
160 | providers.append(v) | ||
161 | |||
162 | for v in versions: | ||
163 | val = d.getVar(v, False) | ||
164 | pkg = v.replace("PREFERRED_VERSION_", "") | ||
165 | if pkg.endswith(("-native", "-crosssdk")) or pkg.startswith(("nativesdk-", "virtual/nativesdk-")): | ||
166 | continue | ||
167 | if 'cross-canadian' in pkg: | ||
168 | for p in prefixes: | ||
169 | localdata = bb.data.createCopy(d) | ||
170 | override = ":virtclass-multilib-" + p | ||
171 | localdata.setVar("OVERRIDES", localdata.getVar("OVERRIDES", False) + override) | ||
172 | bb.data.update_data(localdata) | ||
173 | newname = localdata.expand(v) | ||
174 | if newname != v: | ||
175 | newval = localdata.expand(val) | ||
176 | d.setVar(newname, newval) | ||
177 | # Avoid future variable key expansion | ||
178 | vexp = d.expand(v) | ||
179 | if v != vexp and d.getVar(v, False): | ||
180 | d.renameVar(v, vexp) | ||
181 | continue | ||
182 | for p in prefixes: | ||
183 | newname = "PREFERRED_VERSION_" + p + "-" + pkg | ||
184 | if not d.getVar(newname, False): | ||
185 | d.setVar(newname, val) | ||
186 | |||
187 | for prov in providers: | ||
188 | val = d.getVar(prov, False) | ||
189 | pkg = prov.replace("PREFERRED_PROVIDER_", "") | ||
190 | if pkg.endswith(("-native", "-crosssdk")) or pkg.startswith(("nativesdk-", "virtual/nativesdk-")): | ||
191 | continue | ||
192 | if 'cross-canadian' in pkg: | ||
193 | for p in prefixes: | ||
194 | localdata = bb.data.createCopy(d) | ||
195 | override = ":virtclass-multilib-" + p | ||
196 | localdata.setVar("OVERRIDES", localdata.getVar("OVERRIDES", False) + override) | ||
197 | bb.data.update_data(localdata) | ||
198 | newname = localdata.expand(prov) | ||
199 | if newname != prov: | ||
200 | newval = localdata.expand(val) | ||
201 | d.setVar(newname, newval) | ||
202 | # Avoid future variable key expansion | ||
203 | provexp = d.expand(prov) | ||
204 | if prov != provexp and d.getVar(prov, False): | ||
205 | d.renameVar(prov, provexp) | ||
206 | continue | ||
207 | virt = "" | ||
208 | if pkg.startswith("virtual/"): | ||
209 | pkg = pkg.replace("virtual/", "") | ||
210 | virt = "virtual/" | ||
211 | for p in prefixes: | ||
212 | if pkg != "kernel": | ||
213 | val = p + "-" + val | ||
214 | |||
215 | # implement variable keys | ||
216 | localdata = bb.data.createCopy(d) | ||
217 | override = ":virtclass-multilib-" + p | ||
218 | localdata.setVar("OVERRIDES", localdata.getVar("OVERRIDES", False) + override) | ||
219 | bb.data.update_data(localdata) | ||
220 | newname = localdata.expand(prov) | ||
221 | if newname != prov and not d.getVar(newname, False): | ||
222 | d.setVar(newname, localdata.expand(val)) | ||
223 | |||
224 | # implement alternative multilib name | ||
225 | newname = localdata.expand("PREFERRED_PROVIDER_" + virt + p + "-" + pkg) | ||
226 | if not d.getVar(newname, False): | ||
227 | d.setVar(newname, val) | ||
228 | # Avoid future variable key expansion | ||
229 | provexp = d.expand(prov) | ||
230 | if prov != provexp and d.getVar(prov, False): | ||
231 | d.renameVar(prov, provexp) | ||
232 | |||
233 | |||
234 | mp = (d.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split() | ||
235 | extramp = [] | ||
236 | for p in mp: | ||
237 | if p.endswith(("-native", "-crosssdk")) or p.startswith(("nativesdk-", "virtual/nativesdk-")) or 'cross-canadian' in p: | ||
238 | continue | ||
239 | virt = "" | ||
240 | if p.startswith("virtual/"): | ||
241 | p = p.replace("virtual/", "") | ||
242 | virt = "virtual/" | ||
243 | for pref in prefixes: | ||
244 | extramp.append(virt + pref + "-" + p) | ||
245 | d.setVar("MULTI_PROVIDER_WHITELIST", " ".join(mp + extramp)) | ||
246 | |||
247 | |||
248 | def get_layers_branch_rev(d): | ||
249 | layers = (d.getVar("BBLAYERS", True) or "").split() | ||
250 | layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \ | ||
251 | base_get_metadata_git_branch(i, None).strip(), \ | ||
252 | base_get_metadata_git_revision(i, None)) \ | ||
253 | for i in layers] | ||
254 | i = len(layers_branch_rev)-1 | ||
255 | p1 = layers_branch_rev[i].find("=") | ||
256 | s1 = layers_branch_rev[i][p1:] | ||
257 | while i > 0: | ||
258 | p2 = layers_branch_rev[i-1].find("=") | ||
259 | s2= layers_branch_rev[i-1][p2:] | ||
260 | if s1 == s2: | ||
261 | layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2] | ||
262 | i -= 1 | ||
263 | else: | ||
264 | i -= 1 | ||
265 | p1 = layers_branch_rev[i].find("=") | ||
266 | s1= layers_branch_rev[i][p1:] | ||
267 | return layers_branch_rev | ||
268 | |||
269 | |||
270 | BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars" | ||
271 | BUILDCFG_FUNCS[type] = "list" | ||
272 | |||
273 | def buildcfg_vars(d): | ||
274 | statusvars = oe.data.typed_value('BUILDCFG_VARS', d) | ||
275 | for var in statusvars: | ||
276 | value = d.getVar(var, True) | ||
277 | if value is not None: | ||
278 | yield '%-17s = "%s"' % (var, value) | ||
279 | |||
280 | def buildcfg_neededvars(d): | ||
281 | needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d) | ||
282 | pesteruser = [] | ||
283 | for v in needed_vars: | ||
284 | val = d.getVar(v, True) | ||
285 | if not val or val == 'INVALID': | ||
286 | pesteruser.append(v) | ||
287 | |||
288 | if pesteruser: | ||
289 | bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser)) | ||
290 | |||
291 | addhandler base_eventhandler | ||
292 | base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.BuildStarted" | ||
293 | python base_eventhandler() { | ||
294 | if isinstance(e, bb.event.ConfigParsed): | ||
295 | e.data.setVar('BB_VERSION', bb.__version__) | ||
296 | pkgarch_mapping(e.data) | ||
297 | preferred_ml_updates(e.data) | ||
298 | oe.utils.features_backfill("DISTRO_FEATURES", e.data) | ||
299 | oe.utils.features_backfill("MACHINE_FEATURES", e.data) | ||
300 | |||
301 | if isinstance(e, bb.event.BuildStarted): | ||
302 | localdata = bb.data.createCopy(e.data) | ||
303 | bb.data.update_data(localdata) | ||
304 | statuslines = [] | ||
305 | for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata): | ||
306 | g = globals() | ||
307 | if func not in g: | ||
308 | bb.warn("Build configuration function '%s' does not exist" % func) | ||
309 | else: | ||
310 | flines = g[func](localdata) | ||
311 | if flines: | ||
312 | statuslines.extend(flines) | ||
313 | |||
314 | statusheader = e.data.getVar('BUILDCFG_HEADER', True) | ||
315 | bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) | ||
316 | } | ||
317 | |||
318 | addtask configure after do_patch | ||
319 | do_configure[dirs] = "${S} ${B}" | ||
320 | do_configure[deptask] = "do_populate_sysroot" | ||
321 | base_do_configure() { | ||
322 | : | ||
323 | } | ||
324 | |||
325 | addtask compile after do_configure | ||
326 | do_compile[dirs] = "${S} ${B}" | ||
327 | base_do_compile() { | ||
328 | if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then | ||
329 | oe_runmake || die "make failed" | ||
330 | else | ||
331 | bbnote "nothing to compile" | ||
332 | fi | ||
333 | } | ||
334 | |||
335 | addtask install after do_compile | ||
336 | do_install[dirs] = "${D} ${S} ${B}" | ||
337 | # Remove and re-create ${D} so that is it guaranteed to be empty | ||
338 | do_install[cleandirs] = "${D}" | ||
339 | |||
340 | base_do_install() { | ||
341 | : | ||
342 | } | ||
343 | |||
344 | base_do_package() { | ||
345 | : | ||
346 | } | ||
347 | |||
348 | addtask build after do_populate_sysroot | ||
349 | do_build = "" | ||
350 | do_build[func] = "1" | ||
351 | do_build[noexec] = "1" | ||
352 | do_build[recrdeptask] += "do_deploy" | ||
353 | do_build () { | ||
354 | : | ||
355 | } | ||
356 | |||
357 | def set_packagetriplet(d): | ||
358 | archs = [] | ||
359 | tos = [] | ||
360 | tvs = [] | ||
361 | |||
362 | archs.append(d.getVar("PACKAGE_ARCHS", True).split()) | ||
363 | tos.append(d.getVar("TARGET_OS", True)) | ||
364 | tvs.append(d.getVar("TARGET_VENDOR", True)) | ||
365 | |||
366 | def settriplet(d, varname, archs, tos, tvs): | ||
367 | triplets = [] | ||
368 | for i in range(len(archs)): | ||
369 | for arch in archs[i]: | ||
370 | triplets.append(arch + tvs[i] + "-" + tos[i]) | ||
371 | triplets.reverse() | ||
372 | d.setVar(varname, " ".join(triplets)) | ||
373 | |||
374 | settriplet(d, "PKGTRIPLETS", archs, tos, tvs) | ||
375 | |||
376 | variants = d.getVar("MULTILIB_VARIANTS", True) or "" | ||
377 | for item in variants.split(): | ||
378 | localdata = bb.data.createCopy(d) | ||
379 | overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item | ||
380 | localdata.setVar("OVERRIDES", overrides) | ||
381 | bb.data.update_data(localdata) | ||
382 | |||
383 | archs.append(localdata.getVar("PACKAGE_ARCHS", True).split()) | ||
384 | tos.append(localdata.getVar("TARGET_OS", True)) | ||
385 | tvs.append(localdata.getVar("TARGET_VENDOR", True)) | ||
386 | |||
387 | settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs) | ||
388 | |||
389 | python () { | ||
390 | import string, re | ||
391 | |||
392 | # Handle PACKAGECONFIG | ||
393 | # | ||
394 | # These take the form: | ||
395 | # | ||
396 | # PACKAGECONFIG ??= "<default options>" | ||
397 | # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends" | ||
398 | pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {} | ||
399 | if pkgconfigflags: | ||
400 | pkgconfig = (d.getVar('PACKAGECONFIG', True) or "").split() | ||
401 | pn = d.getVar("PN", True) | ||
402 | mlprefix = d.getVar("MLPREFIX", True) | ||
403 | |||
404 | def expandFilter(appends, extension, prefix): | ||
405 | appends = bb.utils.explode_deps(d.expand(" ".join(appends))) | ||
406 | newappends = [] | ||
407 | for a in appends: | ||
408 | if a.endswith("-native") or a.endswith("-cross"): | ||
409 | newappends.append(a) | ||
410 | elif a.startswith("virtual/"): | ||
411 | subs = a.split("/", 1)[1] | ||
412 | newappends.append("virtual/" + prefix + subs + extension) | ||
413 | else: | ||
414 | if a.startswith(prefix): | ||
415 | newappends.append(a + extension) | ||
416 | else: | ||
417 | newappends.append(prefix + a + extension) | ||
418 | return newappends | ||
419 | |||
420 | def appendVar(varname, appends): | ||
421 | if not appends: | ||
422 | return | ||
423 | if varname.find("DEPENDS") != -1: | ||
424 | if pn.startswith("nativesdk-"): | ||
425 | appends = expandFilter(appends, "", "nativesdk-") | ||
426 | if pn.endswith("-native"): | ||
427 | appends = expandFilter(appends, "-native", "") | ||
428 | if mlprefix: | ||
429 | appends = expandFilter(appends, "", mlprefix) | ||
430 | varname = d.expand(varname) | ||
431 | d.appendVar(varname, " " + " ".join(appends)) | ||
432 | |||
433 | extradeps = [] | ||
434 | extrardeps = [] | ||
435 | extraconf = [] | ||
436 | for flag, flagval in pkgconfigflags.items(): | ||
437 | if flag == "defaultval": | ||
438 | continue | ||
439 | items = flagval.split(",") | ||
440 | num = len(items) | ||
441 | if num > 4: | ||
442 | bb.error("Only enable,disable,depend,rdepend can be specified!") | ||
443 | |||
444 | if flag in pkgconfig: | ||
445 | if num >= 3 and items[2]: | ||
446 | extradeps.append(items[2]) | ||
447 | if num >= 4 and items[3]: | ||
448 | extrardeps.append(items[3]) | ||
449 | if num >= 1 and items[0]: | ||
450 | extraconf.append(items[0]) | ||
451 | elif num >= 2 and items[1]: | ||
452 | extraconf.append(items[1]) | ||
453 | appendVar('DEPENDS', extradeps) | ||
454 | appendVar('RDEPENDS_${PN}', extrardeps) | ||
455 | if bb.data.inherits_class('cmake', d): | ||
456 | appendVar('EXTRA_OECMAKE', extraconf) | ||
457 | else: | ||
458 | appendVar('EXTRA_OECONF', extraconf) | ||
459 | |||
460 | # If PRINC is set, try and increase the PR value by the amount specified | ||
461 | princ = d.getVar('PRINC', True) | ||
462 | if princ and princ != "0": | ||
463 | pr = d.getVar('PR', True) | ||
464 | pr_prefix = re.search("\D+",pr) | ||
465 | prval = re.search("\d+",pr) | ||
466 | if pr_prefix is None or prval is None: | ||
467 | bb.error("Unable to analyse format of PR variable: %s" % pr) | ||
468 | nval = int(prval.group(0)) + int(princ) | ||
469 | pr = pr_prefix.group(0) + str(nval) + pr[prval.end():] | ||
470 | d.setVar('PR', pr) | ||
471 | |||
472 | pn = d.getVar('PN', True) | ||
473 | license = d.getVar('LICENSE', True) | ||
474 | if license == "INVALID": | ||
475 | bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn) | ||
476 | |||
477 | if bb.data.inherits_class('license', d): | ||
478 | unmatched_license_flag = check_license_flags(d) | ||
479 | if unmatched_license_flag: | ||
480 | bb.debug(1, "Skipping %s because it has a restricted license not" | ||
481 | " whitelisted in LICENSE_FLAGS_WHITELIST" % pn) | ||
482 | raise bb.parse.SkipPackage("because it has a restricted license not" | ||
483 | " whitelisted in LICENSE_FLAGS_WHITELIST") | ||
484 | |||
485 | # If we're building a target package we need to use fakeroot (pseudo) | ||
486 | # in order to capture permissions, owners, groups and special files | ||
487 | if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d): | ||
488 | d.setVarFlag('do_configure', 'umask', 022) | ||
489 | d.setVarFlag('do_compile', 'umask', 022) | ||
490 | d.appendVarFlag('do_install', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') | ||
491 | d.setVarFlag('do_install', 'fakeroot', 1) | ||
492 | d.setVarFlag('do_install', 'umask', 022) | ||
493 | d.appendVarFlag('do_package', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') | ||
494 | d.setVarFlag('do_package', 'fakeroot', 1) | ||
495 | d.setVarFlag('do_package', 'umask', 022) | ||
496 | d.setVarFlag('do_package_setscene', 'fakeroot', 1) | ||
497 | d.appendVarFlag('do_package_setscene', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') | ||
498 | d.setVarFlag('do_devshell', 'fakeroot', 1) | ||
499 | d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') | ||
500 | source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', 0) | ||
501 | if not source_mirror_fetch: | ||
502 | need_host = d.getVar('COMPATIBLE_HOST', True) | ||
503 | if need_host: | ||
504 | import re | ||
505 | this_host = d.getVar('HOST_SYS', True) | ||
506 | if not re.match(need_host, this_host): | ||
507 | raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host) | ||
508 | |||
509 | need_machine = d.getVar('COMPATIBLE_MACHINE', True) | ||
510 | if need_machine: | ||
511 | import re | ||
512 | compat_machines = (d.getVar('MACHINEOVERRIDES', True) or "").split(":") | ||
513 | for m in compat_machines: | ||
514 | if re.match(need_machine, m): | ||
515 | break | ||
516 | else: | ||
517 | raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE', True)) | ||
518 | |||
519 | |||
520 | bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE', True) or "").split() | ||
521 | |||
522 | check_license = False if pn.startswith("nativesdk-") else True | ||
523 | for t in ["-native", "-cross", "-cross-initial", "-cross-intermediate", | ||
524 | "-crosssdk-intermediate", "-crosssdk", "-crosssdk-initial", | ||
525 | "-cross-canadian-" + d.getVar('TRANSLATED_TARGET_ARCH', True)]: | ||
526 | if pn.endswith(t): | ||
527 | check_license = False | ||
528 | |||
529 | if check_license and bad_licenses: | ||
530 | whitelist = [] | ||
531 | for lic in bad_licenses: | ||
532 | for w in ["HOSTTOOLS_WHITELIST_", "LGPLv2_WHITELIST_", "WHITELIST_"]: | ||
533 | whitelist.extend((d.getVar(w + lic, True) or "").split()) | ||
534 | spdx_license = return_spdx(d, lic) | ||
535 | if spdx_license: | ||
536 | whitelist.extend((d.getVar('HOSTTOOLS_WHITELIST_%s' % spdx_license, True) or "").split()) | ||
537 | if not pn in whitelist: | ||
538 | recipe_license = d.getVar('LICENSE', True) | ||
539 | pkgs = d.getVar('PACKAGES', True).split() | ||
540 | skipped_pkgs = [] | ||
541 | unskipped_pkgs = [] | ||
542 | for pkg in pkgs: | ||
543 | if incompatible_license(d, bad_licenses, pkg): | ||
544 | skipped_pkgs.append(pkg) | ||
545 | else: | ||
546 | unskipped_pkgs.append(pkg) | ||
547 | all_skipped = skipped_pkgs and not unskipped_pkgs | ||
548 | if unskipped_pkgs: | ||
549 | for pkg in skipped_pkgs: | ||
550 | bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + recipe_license) | ||
551 | d.setVar('LICENSE_EXCLUSION-' + pkg, 1) | ||
552 | for pkg in unskipped_pkgs: | ||
553 | bb.debug(1, "INCLUDING the package " + pkg) | ||
554 | elif all_skipped or incompatible_license(d, bad_licenses): | ||
555 | bb.debug(1, "SKIPPING recipe %s because it's %s" % (pn, recipe_license)) | ||
556 | raise bb.parse.SkipPackage("incompatible with license %s" % recipe_license) | ||
557 | |||
558 | srcuri = d.getVar('SRC_URI', True) | ||
559 | # Svn packages should DEPEND on subversion-native | ||
560 | if "svn://" in srcuri: | ||
561 | d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot') | ||
562 | |||
563 | # Git packages should DEPEND on git-native | ||
564 | if "git://" in srcuri: | ||
565 | d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot') | ||
566 | |||
567 | # Mercurial packages should DEPEND on mercurial-native | ||
568 | elif "hg://" in srcuri: | ||
569 | d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot') | ||
570 | |||
571 | # OSC packages should DEPEND on osc-native | ||
572 | elif "osc://" in srcuri: | ||
573 | d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot') | ||
574 | |||
575 | # *.xz should depends on xz-native for unpacking | ||
576 | # Not endswith because of "*.patch.xz;patch=1". Need bb.fetch.decodeurl in future | ||
577 | if '.xz' in srcuri: | ||
578 | d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot') | ||
579 | |||
580 | # unzip-native should already be staged before unpacking ZIP recipes | ||
581 | if ".zip" in srcuri: | ||
582 | d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot') | ||
583 | |||
584 | set_packagetriplet(d) | ||
585 | |||
586 | # 'multimachine' handling | ||
587 | mach_arch = d.getVar('MACHINE_ARCH', True) | ||
588 | pkg_arch = d.getVar('PACKAGE_ARCH', True) | ||
589 | |||
590 | if (pkg_arch == mach_arch): | ||
591 | # Already machine specific - nothing further to do | ||
592 | return | ||
593 | |||
594 | # | ||
595 | # We always try to scan SRC_URI for urls with machine overrides | ||
596 | # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0 | ||
597 | # | ||
598 | override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', True) | ||
599 | if override != '0': | ||
600 | paths = [] | ||
601 | fpaths = (d.getVar('FILESPATH', True) or '').split(':') | ||
602 | machine = d.getVar('MACHINE', True) | ||
603 | for p in fpaths: | ||
604 | if os.path.basename(p) == machine and os.path.isdir(p): | ||
605 | paths.append(p) | ||
606 | |||
607 | if len(paths) != 0: | ||
608 | for s in srcuri.split(): | ||
609 | if not s.startswith("file://"): | ||
610 | continue | ||
611 | fetcher = bb.fetch2.Fetch([s], d) | ||
612 | local = fetcher.localpath(s) | ||
613 | for mp in paths: | ||
614 | if local.startswith(mp): | ||
615 | #bb.note("overriding PACKAGE_ARCH from %s to %s for %s" % (pkg_arch, mach_arch, pn)) | ||
616 | d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") | ||
617 | return | ||
618 | |||
619 | packages = d.getVar('PACKAGES', True).split() | ||
620 | for pkg in packages: | ||
621 | pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg, True) | ||
622 | |||
623 | # We could look for != PACKAGE_ARCH here but how to choose | ||
624 | # if multiple differences are present? | ||
625 | # Look through PACKAGE_ARCHS for the priority order? | ||
626 | if pkgarch and pkgarch == mach_arch: | ||
627 | d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") | ||
628 | bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN", True)) | ||
629 | } | ||
630 | |||
631 | addtask cleansstate after do_clean | ||
632 | python do_cleansstate() { | ||
633 | sstate_clean_cachefiles(d) | ||
634 | } | ||
635 | |||
636 | addtask cleanall after do_cleansstate | ||
637 | python do_cleanall() { | ||
638 | src_uri = (d.getVar('SRC_URI', True) or "").split() | ||
639 | if len(src_uri) == 0: | ||
640 | return | ||
641 | |||
642 | localdata = bb.data.createCopy(d) | ||
643 | bb.data.update_data(localdata) | ||
644 | |||
645 | try: | ||
646 | fetcher = bb.fetch2.Fetch(src_uri, localdata) | ||
647 | fetcher.clean() | ||
648 | except bb.fetch2.BBFetchException, e: | ||
649 | raise bb.build.FuncFailed(e) | ||
650 | } | ||
651 | do_cleanall[nostamp] = "1" | ||
652 | |||
653 | |||
654 | EXPORT_FUNCTIONS do_fetch do_unpack do_configure do_compile do_install do_package | ||