summaryrefslogtreecommitdiffstats
path: root/meta/classes/base.bbclass
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes/base.bbclass')
-rw-r--r--meta/classes/base.bbclass735
1 files changed, 0 insertions, 735 deletions
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass
deleted file mode 100644
index b4160402f0..0000000000
--- a/meta/classes/base.bbclass
+++ /dev/null
@@ -1,735 +0,0 @@
1BB_DEFAULT_TASK ?= "build"
2CLASSOVERRIDE ?= "class-target"
3
4inherit patch
5inherit staging
6
7inherit mirrors
8inherit utils
9inherit utility-tasks
10inherit metadata_scm
11inherit logging
12
13OE_EXTRA_IMPORTS ?= ""
14
15OE_IMPORTS += "os sys time oe.path oe.utils oe.types oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath oe.license ${OE_EXTRA_IMPORTS}"
16OE_IMPORTS[type] = "list"
17
18PACKAGECONFIG_CONFARGS ??= ""
19
20def oe_import(d):
21 import sys
22
23 bbpath = d.getVar("BBPATH").split(":")
24 sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath]
25
26 def inject(name, value):
27 """Make a python object accessible from the metadata"""
28 if hasattr(bb.utils, "_context"):
29 bb.utils._context[name] = value
30 else:
31 __builtins__[name] = value
32
33 import oe.data
34 for toimport in oe.data.typed_value("OE_IMPORTS", d):
35 try:
36 imported = __import__(toimport)
37 inject(toimport.split(".", 1)[0], imported)
38 except AttributeError as e:
39 bb.error("Error importing OE modules: %s" % str(e))
40 return ""
41
42# We need the oe module name space early (before INHERITs get added)
43OE_IMPORTED := "${@oe_import(d)}"
44
45def lsb_distro_identifier(d):
46 adjust = d.getVar('LSB_DISTRO_ADJUST')
47 adjust_func = None
48 if adjust:
49 try:
50 adjust_func = globals()[adjust]
51 except KeyError:
52 pass
53 return oe.lsb.distro_identifier(adjust_func)
54
55die() {
56 bbfatal_log "$*"
57}
58
59oe_runmake_call() {
60 bbnote ${MAKE} ${EXTRA_OEMAKE} "$@"
61 ${MAKE} ${EXTRA_OEMAKE} "$@"
62}
63
64oe_runmake() {
65 oe_runmake_call "$@" || die "oe_runmake failed"
66}
67
68
69def base_dep_prepend(d):
70 if d.getVar('INHIBIT_DEFAULT_DEPS', False):
71 return ""
72 return "${BASE_DEFAULT_DEPS}"
73
74BASE_DEFAULT_DEPS = "virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc"
75
76BASEDEPENDS = ""
77BASEDEPENDS_class-target = "${@base_dep_prepend(d)}"
78BASEDEPENDS_class-nativesdk = "${@base_dep_prepend(d)}"
79
80DEPENDS_prepend="${BASEDEPENDS} "
81
82FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}"
83# THISDIR only works properly with imediate expansion as it has to run
84# in the context of the location its used (:=)
85THISDIR = "${@os.path.dirname(d.getVar('FILE'))}"
86
87def extra_path_elements(d):
88 path = ""
89 elements = (d.getVar('EXTRANATIVEPATH') or "").split()
90 for e in elements:
91 path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":"
92 return path
93
94PATH_prepend = "${@extra_path_elements(d)}"
95
96def get_lic_checksum_file_list(d):
97 filelist = []
98 lic_files = d.getVar("LIC_FILES_CHKSUM") or ''
99 tmpdir = d.getVar("TMPDIR")
100 s = d.getVar("S")
101 b = d.getVar("B")
102 workdir = d.getVar("WORKDIR")
103
104 urls = lic_files.split()
105 for url in urls:
106 # We only care about items that are absolute paths since
107 # any others should be covered by SRC_URI.
108 try:
109 (method, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
110 if method != "file" or not path:
111 raise bb.fetch.MalformedUrl(url)
112
113 if path[0] == '/':
114 if path.startswith((tmpdir, s, b, workdir)):
115 continue
116 filelist.append(path + ":" + str(os.path.exists(path)))
117 except bb.fetch.MalformedUrl:
118 bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url)
119 return " ".join(filelist)
120
121def setup_hosttools_dir(dest, toolsvar, d, fatal=True):
122 tools = d.getVar(toolsvar).split()
123 origbbenv = d.getVar("BB_ORIGENV", False)
124 path = origbbenv.getVar("PATH")
125 bb.utils.mkdirhier(dest)
126 notfound = []
127 for tool in tools:
128 desttool = os.path.join(dest, tool)
129 if not os.path.exists(desttool):
130 # clean up dead symlink
131 if os.path.islink(desttool):
132 os.unlink(desttool)
133 srctool = bb.utils.which(path, tool, executable=True)
134 # gcc/g++ may link to ccache on some hosts, e.g.,
135 # /usr/local/bin/ccache/gcc -> /usr/bin/ccache, then which(gcc)
136 # would return /usr/local/bin/ccache/gcc, but what we need is
137 # /usr/bin/gcc, this code can check and fix that.
138 if "ccache" in srctool:
139 srctool = bb.utils.which(path, tool, executable=True, direction=1)
140 if srctool:
141 os.symlink(srctool, desttool)
142 else:
143 notfound.append(tool)
144
145 if notfound and fatal:
146 bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n %s" % " ".join(notfound))
147
148addtask fetch
149do_fetch[dirs] = "${DL_DIR}"
150do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
151do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
152do_fetch[vardeps] += "SRCREV"
153python base_do_fetch() {
154
155 src_uri = (d.getVar('SRC_URI') or "").split()
156 if len(src_uri) == 0:
157 return
158
159 try:
160 fetcher = bb.fetch2.Fetch(src_uri, d)
161 fetcher.download()
162 except bb.fetch2.BBFetchException as e:
163 bb.fatal(str(e))
164}
165
166addtask unpack after do_fetch
167do_unpack[dirs] = "${WORKDIR}"
168
169do_unpack[cleandirs] = "${@d.getVar('S') if os.path.normpath(d.getVar('S')) != os.path.normpath(d.getVar('WORKDIR')) else os.path.join('${S}', 'patches')}"
170
171python base_do_unpack() {
172 src_uri = (d.getVar('SRC_URI') or "").split()
173 if len(src_uri) == 0:
174 return
175
176 try:
177 fetcher = bb.fetch2.Fetch(src_uri, d)
178 fetcher.unpack(d.getVar('WORKDIR'))
179 except bb.fetch2.BBFetchException as e:
180 bb.fatal(str(e))
181}
182
183def get_layers_branch_rev(d):
184 layers = (d.getVar("BBLAYERS") or "").split()
185 layers_branch_rev = ["%-20s = \"%s:%s\"" % (os.path.basename(i), \
186 base_get_metadata_git_branch(i, None).strip(), \
187 base_get_metadata_git_revision(i, None)) \
188 for i in layers]
189 i = len(layers_branch_rev)-1
190 p1 = layers_branch_rev[i].find("=")
191 s1 = layers_branch_rev[i][p1:]
192 while i > 0:
193 p2 = layers_branch_rev[i-1].find("=")
194 s2= layers_branch_rev[i-1][p2:]
195 if s1 == s2:
196 layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2]
197 i -= 1
198 else:
199 i -= 1
200 p1 = layers_branch_rev[i].find("=")
201 s1= layers_branch_rev[i][p1:]
202 return layers_branch_rev
203
204
205BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars"
206BUILDCFG_FUNCS[type] = "list"
207
208def buildcfg_vars(d):
209 statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
210 for var in statusvars:
211 value = d.getVar(var)
212 if value is not None:
213 yield '%-20s = "%s"' % (var, value)
214
215def buildcfg_neededvars(d):
216 needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
217 pesteruser = []
218 for v in needed_vars:
219 val = d.getVar(v)
220 if not val or val == 'INVALID':
221 pesteruser.append(v)
222
223 if pesteruser:
224 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
225
226addhandler base_eventhandler
227base_eventhandler[eventmask] = "bb.event.ConfigParsed bb.event.MultiConfigParsed bb.event.BuildStarted bb.event.RecipePreFinalise bb.event.RecipeParsed"
228python base_eventhandler() {
229 import bb.runqueue
230
231 if isinstance(e, bb.event.ConfigParsed):
232 if not d.getVar("NATIVELSBSTRING", False):
233 d.setVar("NATIVELSBSTRING", lsb_distro_identifier(d))
234 d.setVar("ORIGNATIVELSBSTRING", d.getVar("NATIVELSBSTRING", False))
235 d.setVar('BB_VERSION', bb.__version__)
236
237 # There might be no bb.event.ConfigParsed event if bitbake server is
238 # running, so check bb.event.BuildStarted too to make sure ${HOSTTOOLS_DIR}
239 # exists.
240 if isinstance(e, bb.event.ConfigParsed) or \
241 (isinstance(e, bb.event.BuildStarted) and not os.path.exists(d.getVar('HOSTTOOLS_DIR'))):
242 # Works with the line in layer.conf which changes PATH to point here
243 setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS', d)
244 setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS_NONFATAL', d, fatal=False)
245
246 if isinstance(e, bb.event.MultiConfigParsed):
247 # We need to expand SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS in each of the multiconfig data stores
248 # own contexts so the variables get expanded correctly for that arch, then inject back into
249 # the main data store.
250 deps = []
251 for config in e.mcdata:
252 deps.append(e.mcdata[config].getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS"))
253 deps = " ".join(deps)
254 e.mcdata[''].setVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", deps)
255
256 if isinstance(e, bb.event.BuildStarted):
257 localdata = bb.data.createCopy(d)
258 statuslines = []
259 for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata):
260 g = globals()
261 if func not in g:
262 bb.warn("Build configuration function '%s' does not exist" % func)
263 else:
264 flines = g[func](localdata)
265 if flines:
266 statuslines.extend(flines)
267
268 statusheader = d.getVar('BUILDCFG_HEADER')
269 if statusheader:
270 bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
271
272 # This code is to silence warnings where the SDK variables overwrite the
273 # target ones and we'd see dulpicate key names overwriting each other
274 # for various PREFERRED_PROVIDERS
275 if isinstance(e, bb.event.RecipePreFinalise):
276 if d.getVar("TARGET_PREFIX") == d.getVar("SDK_PREFIX"):
277 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils")
278 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc")
279 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}g++")
280 d.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}compilerlibs")
281
282 if isinstance(e, bb.event.RecipeParsed):
283 #
284 # If we have multiple providers of virtual/X and a PREFERRED_PROVIDER_virtual/X is set
285 # skip parsing for all the other providers which will mean they get uninstalled from the
286 # sysroot since they're now "unreachable". This makes switching virtual/kernel work in
287 # particular.
288 #
289 pn = d.getVar('PN')
290 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
291 if not source_mirror_fetch:
292 provs = (d.getVar("PROVIDES") or "").split()
293 multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST") or "").split()
294 for p in provs:
295 if p.startswith("virtual/") and p not in multiwhitelist:
296 profprov = d.getVar("PREFERRED_PROVIDER_" + p)
297 if profprov and pn != profprov:
298 raise bb.parse.SkipRecipe("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn))
299}
300
301CONFIGURESTAMPFILE = "${WORKDIR}/configure.sstate"
302CLEANBROKEN = "0"
303
304addtask configure after do_patch
305do_configure[dirs] = "${B}"
306base_do_configure() {
307 if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then
308 if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then
309 cd ${B}
310 if [ "${CLEANBROKEN}" != "1" -a \( -e Makefile -o -e makefile -o -e GNUmakefile \) ]; then
311 oe_runmake clean
312 fi
313 # -ignore_readdir_race does not work correctly with -delete;
314 # use xargs to avoid spurious build failures
315 find ${B} -ignore_readdir_race -name \*.la -type f -print0 | xargs -0 rm -f
316 fi
317 fi
318 if [ -n "${CONFIGURESTAMPFILE}" ]; then
319 mkdir -p `dirname ${CONFIGURESTAMPFILE}`
320 echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE}
321 fi
322}
323
324addtask compile after do_configure
325do_compile[dirs] = "${B}"
326base_do_compile() {
327 if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then
328 oe_runmake || die "make failed"
329 else
330 bbnote "nothing to compile"
331 fi
332}
333
334addtask install after do_compile
335do_install[dirs] = "${B}"
336# Remove and re-create ${D} so that is it guaranteed to be empty
337do_install[cleandirs] = "${D}"
338
339base_do_install() {
340 :
341}
342
343base_do_package() {
344 :
345}
346
347addtask build after do_populate_sysroot
348do_build[noexec] = "1"
349do_build[recrdeptask] += "do_deploy"
350do_build () {
351 :
352}
353
354def set_packagetriplet(d):
355 archs = []
356 tos = []
357 tvs = []
358
359 archs.append(d.getVar("PACKAGE_ARCHS").split())
360 tos.append(d.getVar("TARGET_OS"))
361 tvs.append(d.getVar("TARGET_VENDOR"))
362
363 def settriplet(d, varname, archs, tos, tvs):
364 triplets = []
365 for i in range(len(archs)):
366 for arch in archs[i]:
367 triplets.append(arch + tvs[i] + "-" + tos[i])
368 triplets.reverse()
369 d.setVar(varname, " ".join(triplets))
370
371 settriplet(d, "PKGTRIPLETS", archs, tos, tvs)
372
373 variants = d.getVar("MULTILIB_VARIANTS") or ""
374 for item in variants.split():
375 localdata = bb.data.createCopy(d)
376 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
377 localdata.setVar("OVERRIDES", overrides)
378
379 archs.append(localdata.getVar("PACKAGE_ARCHS").split())
380 tos.append(localdata.getVar("TARGET_OS"))
381 tvs.append(localdata.getVar("TARGET_VENDOR"))
382
383 settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs)
384
385python () {
386 import string, re
387
388 # Handle backfilling
389 oe.utils.features_backfill("DISTRO_FEATURES", d)
390 oe.utils.features_backfill("MACHINE_FEATURES", d)
391
392 if d.getVar("S")[-1] == '/':
393 bb.warn("Recipe %s sets S variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("S")))
394 if d.getVar("B")[-1] == '/':
395 bb.warn("Recipe %s sets B variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("B")))
396
397 if os.path.normpath(d.getVar("WORKDIR")) != os.path.normpath(d.getVar("S")):
398 d.appendVar("PSEUDO_IGNORE_PATHS", ",${S}")
399 if os.path.normpath(d.getVar("WORKDIR")) != os.path.normpath(d.getVar("B")):
400 d.appendVar("PSEUDO_IGNORE_PATHS", ",${B}")
401
402 # Handle PACKAGECONFIG
403 #
404 # These take the form:
405 #
406 # PACKAGECONFIG ??= "<default options>"
407 # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends,foo_runtime_recommends,foo_conflict_packageconfig"
408 pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
409 if pkgconfigflags:
410 pkgconfig = (d.getVar('PACKAGECONFIG') or "").split()
411 pn = d.getVar("PN")
412
413 mlprefix = d.getVar("MLPREFIX")
414
415 def expandFilter(appends, extension, prefix):
416 appends = bb.utils.explode_deps(d.expand(" ".join(appends)))
417 newappends = []
418 for a in appends:
419 if a.endswith("-native") or ("-cross-" in a):
420 newappends.append(a)
421 elif a.startswith("virtual/"):
422 subs = a.split("/", 1)[1]
423 if subs.startswith(prefix):
424 newappends.append(a + extension)
425 else:
426 newappends.append("virtual/" + prefix + subs + extension)
427 else:
428 if a.startswith(prefix):
429 newappends.append(a + extension)
430 else:
431 newappends.append(prefix + a + extension)
432 return newappends
433
434 def appendVar(varname, appends):
435 if not appends:
436 return
437 if varname.find("DEPENDS") != -1:
438 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d) :
439 appends = expandFilter(appends, "", "nativesdk-")
440 elif bb.data.inherits_class('native', d):
441 appends = expandFilter(appends, "-native", "")
442 elif mlprefix:
443 appends = expandFilter(appends, "", mlprefix)
444 varname = d.expand(varname)
445 d.appendVar(varname, " " + " ".join(appends))
446
447 extradeps = []
448 extrardeps = []
449 extrarrecs = []
450 extraconf = []
451 for flag, flagval in sorted(pkgconfigflags.items()):
452 items = flagval.split(",")
453 num = len(items)
454 if num > 6:
455 bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend,rrecommend,conflict_packageconfig can be specified!"
456 % (d.getVar('PN'), flag))
457
458 if flag in pkgconfig:
459 if num >= 3 and items[2]:
460 extradeps.append(items[2])
461 if num >= 4 and items[3]:
462 extrardeps.append(items[3])
463 if num >= 5 and items[4]:
464 extrarrecs.append(items[4])
465 if num >= 1 and items[0]:
466 extraconf.append(items[0])
467 elif num >= 2 and items[1]:
468 extraconf.append(items[1])
469
470 if num >= 6 and items[5]:
471 conflicts = set(items[5].split())
472 invalid = conflicts.difference(set(pkgconfigflags.keys()))
473 if invalid:
474 bb.error("%s: PACKAGECONFIG[%s] Invalid conflict package config%s '%s' specified."
475 % (d.getVar('PN'), flag, 's' if len(invalid) > 1 else '', ' '.join(invalid)))
476
477 if flag in pkgconfig:
478 intersec = conflicts.intersection(set(pkgconfig))
479 if intersec:
480 bb.fatal("%s: PACKAGECONFIG[%s] Conflict package config%s '%s' set in PACKAGECONFIG."
481 % (d.getVar('PN'), flag, 's' if len(intersec) > 1 else '', ' '.join(intersec)))
482
483 appendVar('DEPENDS', extradeps)
484 appendVar('RDEPENDS_${PN}', extrardeps)
485 appendVar('RRECOMMENDS_${PN}', extrarrecs)
486 appendVar('PACKAGECONFIG_CONFARGS', extraconf)
487
488 pn = d.getVar('PN')
489 license = d.getVar('LICENSE')
490 if license == "INVALID" and pn != "defaultpkgname":
491 bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
492
493 if bb.data.inherits_class('license', d):
494 check_license_format(d)
495 unmatched_license_flags = check_license_flags(d)
496 if unmatched_license_flags:
497 if len(unmatched_license_flags) == 1:
498 message = "because it has a restricted license '{0}'. Which is not whitelisted in LICENSE_FLAGS_WHITELIST".format(unmatched_license_flags[0])
499 else:
500 message = "because it has restricted licenses {0}. Which are not whitelisted in LICENSE_FLAGS_WHITELIST".format(
501 ", ".join("'{0}'".format(f) for f in unmatched_license_flags))
502 bb.debug(1, "Skipping %s %s" % (pn, message))
503 raise bb.parse.SkipRecipe(message)
504
505 # If we're building a target package we need to use fakeroot (pseudo)
506 # in order to capture permissions, owners, groups and special files
507 if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d):
508 d.appendVarFlag('do_prepare_recipe_sysroot', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
509 d.appendVarFlag('do_install', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
510 d.setVarFlag('do_install', 'fakeroot', '1')
511 d.appendVarFlag('do_package', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
512 d.setVarFlag('do_package', 'fakeroot', '1')
513 d.setVarFlag('do_package_setscene', 'fakeroot', '1')
514 d.appendVarFlag('do_package_setscene', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
515 d.setVarFlag('do_devshell', 'fakeroot', '1')
516 d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
517
518 need_machine = d.getVar('COMPATIBLE_MACHINE')
519 if need_machine and not d.getVar('PARSE_ALL_RECIPES', False):
520 import re
521 compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":")
522 for m in compat_machines:
523 if re.match(need_machine, m):
524 break
525 else:
526 raise bb.parse.SkipRecipe("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE'))
527
528 source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) or d.getVar('PARSE_ALL_RECIPES', False)
529 if not source_mirror_fetch:
530 need_host = d.getVar('COMPATIBLE_HOST')
531 if need_host:
532 import re
533 this_host = d.getVar('HOST_SYS')
534 if not re.match(need_host, this_host):
535 raise bb.parse.SkipRecipe("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host)
536
537 bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
538
539 check_license = False if pn.startswith("nativesdk-") else True
540 for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}",
541 "-crosssdk-${SDK_SYS}", "-crosssdk-initial-${SDK_SYS}",
542 "-cross-canadian-${TRANSLATED_TARGET_ARCH}"]:
543 if pn.endswith(d.expand(t)):
544 check_license = False
545 if pn.startswith("gcc-source-"):
546 check_license = False
547
548 if check_license and bad_licenses:
549 bad_licenses = expand_wildcard_licenses(d, bad_licenses)
550
551 whitelist = []
552 for lic in bad_licenses:
553 spdx_license = return_spdx(d, lic)
554 whitelist.extend((d.getVar("WHITELIST_" + lic) or "").split())
555 if spdx_license:
556 whitelist.extend((d.getVar("WHITELIST_" + spdx_license) or "").split())
557
558 if pn in whitelist:
559 '''
560 We need to track what we are whitelisting and why. If pn is
561 incompatible we need to be able to note that the image that
562 is created may infact contain incompatible licenses despite
563 INCOMPATIBLE_LICENSE being set.
564 '''
565 bb.note("Including %s as buildable despite it having an incompatible license because it has been whitelisted" % pn)
566 else:
567 pkgs = d.getVar('PACKAGES').split()
568 skipped_pkgs = {}
569 unskipped_pkgs = []
570 for pkg in pkgs:
571 incompatible_lic = incompatible_license(d, bad_licenses, pkg)
572 if incompatible_lic:
573 skipped_pkgs[pkg] = incompatible_lic
574 else:
575 unskipped_pkgs.append(pkg)
576 if unskipped_pkgs:
577 for pkg in skipped_pkgs:
578 bb.debug(1, "Skipping the package %s at do_rootfs because of incompatible license(s): %s" % (pkg, ' '.join(skipped_pkgs[pkg])))
579 d.setVar('LICENSE_EXCLUSION-' + pkg, ' '.join(skipped_pkgs[pkg]))
580 for pkg in unskipped_pkgs:
581 bb.debug(1, "Including the package %s" % pkg)
582 else:
583 incompatible_lic = incompatible_license(d, bad_licenses)
584 for pkg in skipped_pkgs:
585 incompatible_lic += skipped_pkgs[pkg]
586 incompatible_lic = sorted(list(set(incompatible_lic)))
587
588 if incompatible_lic:
589 bb.debug(1, "Skipping recipe %s because of incompatible license(s): %s" % (pn, ' '.join(incompatible_lic)))
590 raise bb.parse.SkipRecipe("it has incompatible license(s): %s" % ' '.join(incompatible_lic))
591
592 needsrcrev = False
593 srcuri = d.getVar('SRC_URI')
594 for uri_string in srcuri.split():
595 uri = bb.fetch.URI(uri_string)
596
597 # HTTP/FTP use the wget fetcher
598 if uri.scheme in ("http", "https", "ftp"):
599 d.appendVarFlag('do_fetch', 'depends', ' wget-native:do_populate_sysroot')
600
601 # Svn packages should DEPEND on subversion-native
602 if uri.scheme == "svn":
603 needsrcrev = True
604 d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot')
605
606 # Git packages should DEPEND on git-native
607 elif uri.scheme in ("git", "gitsm"):
608 needsrcrev = True
609 d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot')
610
611 # Mercurial packages should DEPEND on mercurial-native
612 elif uri.scheme == "hg":
613 needsrcrev = True
614 d.appendVar("EXTRANATIVEPATH", ' python3-native ')
615 d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot')
616
617 # Perforce packages support SRCREV = "${AUTOREV}"
618 elif uri.scheme == "p4":
619 needsrcrev = True
620
621 # OSC packages should DEPEND on osc-native
622 elif uri.scheme == "osc":
623 d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot')
624
625 elif uri.scheme == "npm":
626 d.appendVarFlag('do_fetch', 'depends', ' nodejs-native:do_populate_sysroot')
627
628 # *.lz4 should DEPEND on lz4-native for unpacking
629 if uri.path.endswith('.lz4'):
630 d.appendVarFlag('do_unpack', 'depends', ' lz4-native:do_populate_sysroot')
631
632 # *.lz should DEPEND on lzip-native for unpacking
633 elif uri.path.endswith('.lz'):
634 d.appendVarFlag('do_unpack', 'depends', ' lzip-native:do_populate_sysroot')
635
636 # *.xz should DEPEND on xz-native for unpacking
637 elif uri.path.endswith('.xz') or uri.path.endswith('.txz'):
638 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
639
640 # .zip should DEPEND on unzip-native for unpacking
641 elif uri.path.endswith('.zip') or uri.path.endswith('.jar'):
642 d.appendVarFlag('do_unpack', 'depends', ' unzip-native:do_populate_sysroot')
643
644 # Some rpm files may be compressed internally using xz (for example, rpms from Fedora)
645 elif uri.path.endswith('.rpm'):
646 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
647
648 # *.deb should DEPEND on xz-native for unpacking
649 elif uri.path.endswith('.deb'):
650 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
651
652 if needsrcrev:
653 d.setVar("SRCPV", "${@bb.fetch2.get_srcrev(d)}")
654
655 # Gather all named SRCREVs to add to the sstate hash calculation
656 # This anonymous python snippet is called multiple times so we
657 # need to be careful to not double up the appends here and cause
658 # the base hash to mismatch the task hash
659 for uri in srcuri.split():
660 parm = bb.fetch.decodeurl(uri)[5]
661 uri_names = parm.get("name", "").split(",")
662 for uri_name in filter(None, uri_names):
663 srcrev_name = "SRCREV_{}".format(uri_name)
664 if srcrev_name not in (d.getVarFlag("do_fetch", "vardeps") or "").split():
665 d.appendVarFlag("do_fetch", "vardeps", " {}".format(srcrev_name))
666
667 set_packagetriplet(d)
668
669 # 'multimachine' handling
670 mach_arch = d.getVar('MACHINE_ARCH')
671 pkg_arch = d.getVar('PACKAGE_ARCH')
672
673 if (pkg_arch == mach_arch):
674 # Already machine specific - nothing further to do
675 return
676
677 #
678 # We always try to scan SRC_URI for urls with machine overrides
679 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
680 #
681 override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH')
682 if override != '0':
683 paths = []
684 fpaths = (d.getVar('FILESPATH') or '').split(':')
685 machine = d.getVar('MACHINE')
686 for p in fpaths:
687 if os.path.basename(p) == machine and os.path.isdir(p):
688 paths.append(p)
689
690 if len(paths) != 0:
691 for s in srcuri.split():
692 if not s.startswith("file://"):
693 continue
694 fetcher = bb.fetch2.Fetch([s], d)
695 local = fetcher.localpath(s)
696 for mp in paths:
697 if local.startswith(mp):
698 #bb.note("overriding PACKAGE_ARCH from %s to %s for %s" % (pkg_arch, mach_arch, pn))
699 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
700 return
701
702 packages = d.getVar('PACKAGES').split()
703 for pkg in packages:
704 pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg)
705
706 # We could look for != PACKAGE_ARCH here but how to choose
707 # if multiple differences are present?
708 # Look through PACKAGE_ARCHS for the priority order?
709 if pkgarch and pkgarch == mach_arch:
710 d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
711 bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN"))
712}
713
714addtask cleansstate after do_clean
715python do_cleansstate() {
716 sstate_clean_cachefiles(d)
717}
718addtask cleanall after do_cleansstate
719do_cleansstate[nostamp] = "1"
720
721python do_cleanall() {
722 src_uri = (d.getVar('SRC_URI') or "").split()
723 if len(src_uri) == 0:
724 return
725
726 try:
727 fetcher = bb.fetch2.Fetch(src_uri, d)
728 fetcher.clean()
729 except bb.fetch2.BBFetchException as e:
730 bb.fatal(str(e))
731}
732do_cleanall[nostamp] = "1"
733
734
735EXPORT_FUNCTIONS do_fetch do_unpack do_configure do_compile do_install do_package