summaryrefslogtreecommitdiffstats
path: root/meta/classes/package.bbclass
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes/package.bbclass')
-rw-r--r--meta/classes/package.bbclass2060
1 files changed, 2060 insertions, 0 deletions
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass
new file mode 100644
index 0000000000..b81f4f9281
--- /dev/null
+++ b/meta/classes/package.bbclass
@@ -0,0 +1,2060 @@
1#
2# Packaging process
3#
4# Executive summary: This class iterates over the functions listed in PACKAGEFUNCS
5# Taking D and splitting it up into the packages listed in PACKAGES, placing the
6# resulting output in PKGDEST.
7#
8# There are the following default steps but PACKAGEFUNCS can be extended:
9#
10# a) package_get_auto_pr - get PRAUTO from remote PR service
11#
12# b) perform_packagecopy - Copy D into PKGD
13#
14# c) package_do_split_locales - Split out the locale files, updates FILES and PACKAGES
15#
16# d) split_and_strip_files - split the files into runtime and debug and strip them.
17# Debug files include debug info split, and associated sources that end up in -dbg packages
18#
19# e) fixup_perms - Fix up permissions in the package before we split it.
20#
21# f) populate_packages - Split the files in PKGD into separate packages in PKGDEST/<pkgname>
22# Also triggers the binary stripping code to put files in -dbg packages.
23#
24# g) package_do_filedeps - Collect perfile run-time dependency metadata
25# The data is stores in FILER{PROVIDES,DEPENDS}_file_pkg variables with
26# a list of affected files in FILER{PROVIDES,DEPENDS}FLIST_pkg
27#
28# h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any
29# depenedencies found. Also stores the package name so anyone else using this library
30# knows which package to depend on.
31#
32# i) package_do_pkgconfig - Keep track of which packages need and provide which .pc files
33#
34# j) read_shlibdeps - Reads the stored shlibs information into the metadata
35#
36# k) package_depchains - Adds automatic dependencies to -dbg and -dev packages
37#
38# l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later
39# packaging steps
40
41inherit packagedata
42inherit prserv
43inherit chrpath
44
45# Need the package_qa_handle_error() in insane.bbclass
46inherit insane
47
48PKGD = "${WORKDIR}/package"
49PKGDEST = "${WORKDIR}/packages-split"
50
51LOCALE_SECTION ?= ''
52
53ALL_MULTILIB_PACKAGE_ARCHS = "${@all_multilib_tune_values(d, 'PACKAGE_ARCHS')}"
54
55# rpm is used for the per-file dependency identification
56PACKAGE_DEPENDS += "rpm-native"
57
58def legitimize_package_name(s):
59 """
60 Make sure package names are legitimate strings
61 """
62 import re
63
64 def fixutf(m):
65 cp = m.group(1)
66 if cp:
67 return ('\u%s' % cp).decode('unicode_escape').encode('utf-8')
68
69 # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
70 s = re.sub('<U([0-9A-Fa-f]{1,4})>', fixutf, s)
71
72 # Remaining package name validity fixes
73 return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
74
75def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False, summary=None):
76 """
77 Used in .bb files to split up dynamically generated subpackages of a
78 given package, usually plugins or modules.
79
80 Arguments:
81 root -- the path in which to search
82 file_regex -- regular expression to match searched files. Use
83 parentheses () to mark the part of this expression
84 that should be used to derive the module name (to be
85 substituted where %s is used in other function
86 arguments as noted below)
87 output_pattern -- pattern to use for the package names. Must include %s.
88 description -- description to set for each package. Must include %s.
89 postinst -- postinstall script to use for all packages (as a
90 string)
91 recursive -- True to perform a recursive search - default False
92 hook -- a hook function to be called for every match. The
93 function will be called with the following arguments
94 (in the order listed):
95 f: full path to the file/directory match
96 pkg: the package name
97 file_regex: as above
98 output_pattern: as above
99 modulename: the module name derived using file_regex
100 extra_depends -- extra runtime dependencies (RDEPENDS) to be set for
101 all packages. The default value of None causes a
102 dependency on the main package (${PN}) - if you do
103 not want this, pass '' for this parameter.
104 aux_files_pattern -- extra item(s) to be added to FILES for each
105 package. Can be a single string item or a list of
106 strings for multiple items. Must include %s.
107 postrm -- postrm script to use for all packages (as a string)
108 allow_dirs -- True allow directories to be matched - default False
109 prepend -- if True, prepend created packages to PACKAGES instead
110 of the default False which appends them
111 match_path -- match file_regex on the whole relative path to the
112 root rather than just the file name
113 aux_files_pattern_verbatim -- extra item(s) to be added to FILES for
114 each package, using the actual derived module name
115 rather than converting it to something legal for a
116 package name. Can be a single string item or a list
117 of strings for multiple items. Must include %s.
118 allow_links -- True to allow symlinks to be matched - default False
119 summary -- Summary to set for each package. Must include %s;
120 defaults to description if not set.
121
122 """
123
124 dvar = d.getVar('PKGD', True)
125
126 # If the root directory doesn't exist, don't error out later but silently do
127 # no splitting.
128 if not os.path.exists(dvar + root):
129 return []
130
131 ml = d.getVar("MLPREFIX", True)
132 if ml:
133 if not output_pattern.startswith(ml):
134 output_pattern = ml + output_pattern
135
136 newdeps = []
137 for dep in (extra_depends or "").split():
138 if dep.startswith(ml):
139 newdeps.append(dep)
140 else:
141 newdeps.append(ml + dep)
142 if newdeps:
143 extra_depends = " ".join(newdeps)
144
145
146 packages = d.getVar('PACKAGES', True).split()
147 split_packages = []
148
149 if postinst:
150 postinst = '#!/bin/sh\n' + postinst + '\n'
151 if postrm:
152 postrm = '#!/bin/sh\n' + postrm + '\n'
153 if not recursive:
154 objs = os.listdir(dvar + root)
155 else:
156 objs = []
157 for walkroot, dirs, files in os.walk(dvar + root):
158 for file in files:
159 relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1)
160 if relpath:
161 objs.append(relpath)
162
163 if extra_depends == None:
164 extra_depends = d.getVar("PN", True)
165
166 if not summary:
167 summary = description
168
169 for o in sorted(objs):
170 import re, stat
171 if match_path:
172 m = re.match(file_regex, o)
173 else:
174 m = re.match(file_regex, os.path.basename(o))
175
176 if not m:
177 continue
178 f = os.path.join(dvar + root, o)
179 mode = os.lstat(f).st_mode
180 if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))):
181 continue
182 on = legitimize_package_name(m.group(1))
183 pkg = output_pattern % on
184 split_packages.append(pkg)
185 if not pkg in packages:
186 if prepend:
187 packages = [pkg] + packages
188 else:
189 packages.append(pkg)
190 oldfiles = d.getVar('FILES_' + pkg, True)
191 newfile = os.path.join(root, o)
192 # These names will be passed through glob() so if the filename actually
193 # contains * or ? (rare, but possible) we need to handle that specially
194 newfile = newfile.replace('*', '[*]')
195 newfile = newfile.replace('?', '[?]')
196 if not oldfiles:
197 the_files = [newfile]
198 if aux_files_pattern:
199 if type(aux_files_pattern) is list:
200 for fp in aux_files_pattern:
201 the_files.append(fp % on)
202 else:
203 the_files.append(aux_files_pattern % on)
204 if aux_files_pattern_verbatim:
205 if type(aux_files_pattern_verbatim) is list:
206 for fp in aux_files_pattern_verbatim:
207 the_files.append(fp % m.group(1))
208 else:
209 the_files.append(aux_files_pattern_verbatim % m.group(1))
210 d.setVar('FILES_' + pkg, " ".join(the_files))
211 else:
212 d.setVar('FILES_' + pkg, oldfiles + " " + newfile)
213 if extra_depends != '':
214 d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends)
215 if not d.getVar('DESCRIPTION_' + pkg, True):
216 d.setVar('DESCRIPTION_' + pkg, description % on)
217 if not d.getVar('SUMMARY_' + pkg, True):
218 d.setVar('SUMMARY_' + pkg, summary % on)
219 if postinst:
220 d.setVar('pkg_postinst_' + pkg, postinst)
221 if postrm:
222 d.setVar('pkg_postrm_' + pkg, postrm)
223 if callable(hook):
224 hook(f, pkg, file_regex, output_pattern, m.group(1))
225
226 d.setVar('PACKAGES', ' '.join(packages))
227 return split_packages
228
229PACKAGE_DEPENDS += "file-native"
230
231python () {
232 if d.getVar('PACKAGES', True) != '':
233 deps = ""
234 for dep in (d.getVar('PACKAGE_DEPENDS', True) or "").split():
235 deps += " %s:do_populate_sysroot" % dep
236 d.appendVarFlag('do_package', 'depends', deps)
237
238 # shlibs requires any DEPENDS to have already packaged for the *.list files
239 d.appendVarFlag('do_package', 'deptask', " do_packagedata")
240}
241
242def splitdebuginfo(file, debugfile, debugsrcdir, sourcefile, d):
243 # Function to split a single file into two components, one is the stripped
244 # target system binary, the other contains any debugging information. The
245 # two files are linked to reference each other.
246 #
247 # sourcefile is also generated containing a list of debugsources
248
249 import stat
250
251 dvar = d.getVar('PKGD', True)
252 objcopy = d.getVar("OBJCOPY", True)
253 debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
254 workdir = d.getVar("WORKDIR", True)
255 workparentdir = d.getVar("DEBUGSRC_OVERRIDE_PATH", True) or os.path.dirname(os.path.dirname(workdir))
256
257 # We ignore kernel modules, we don't generate debug info files.
258 if file.find("/lib/modules/") != -1 and file.endswith(".ko"):
259 return 1
260
261 newmode = None
262 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
263 origmode = os.stat(file)[stat.ST_MODE]
264 newmode = origmode | stat.S_IWRITE | stat.S_IREAD
265 os.chmod(file, newmode)
266
267 # We need to extract the debug src information here...
268 if debugsrcdir:
269 cmd = "'%s' -b '%s' -d '%s' -i -l '%s' '%s'" % (debugedit, workparentdir, debugsrcdir, sourcefile, file)
270 (retval, output) = oe.utils.getstatusoutput(cmd)
271 if retval:
272 bb.fatal("debugedit failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
273
274 bb.utils.mkdirhier(os.path.dirname(debugfile))
275
276 cmd = "'%s' --only-keep-debug '%s' '%s'" % (objcopy, file, debugfile)
277 (retval, output) = oe.utils.getstatusoutput(cmd)
278 if retval:
279 bb.fatal("objcopy failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
280
281 # Set the debuglink to have the view of the file path on the target
282 cmd = "'%s' --add-gnu-debuglink='%s' '%s'" % (objcopy, debugfile, file)
283 (retval, output) = oe.utils.getstatusoutput(cmd)
284 if retval:
285 bb.fatal("objcopy failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
286
287 if newmode:
288 os.chmod(file, origmode)
289
290 return 0
291
292def copydebugsources(debugsrcdir, d):
293 # The debug src information written out to sourcefile is further procecessed
294 # and copied to the destination here.
295
296 import stat
297
298 sourcefile = d.expand("${WORKDIR}/debugsources.list")
299 if debugsrcdir and os.path.isfile(sourcefile):
300 dvar = d.getVar('PKGD', True)
301 strip = d.getVar("STRIP", True)
302 objcopy = d.getVar("OBJCOPY", True)
303 debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
304 workdir = d.getVar("WORKDIR", True)
305 workparentdir = os.path.dirname(os.path.dirname(workdir))
306 workbasedir = os.path.basename(os.path.dirname(workdir)) + "/" + os.path.basename(workdir)
307
308 nosuchdir = []
309 basepath = dvar
310 for p in debugsrcdir.split("/"):
311 basepath = basepath + "/" + p
312 if not cpath.exists(basepath):
313 nosuchdir.append(basepath)
314 bb.utils.mkdirhier(basepath)
315 cpath.updatecache(basepath)
316
317 processdebugsrc = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '(<internal>|<built-in>)$' | "
318 # We need to ignore files that are not actually ours
319 # we do this by only paying attention to items from this package
320 processdebugsrc += "fgrep -zw '%s' | "
321 processdebugsrc += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)"
322
323 cmd = processdebugsrc % (sourcefile, workbasedir, workparentdir, dvar, debugsrcdir)
324 (retval, output) = oe.utils.getstatusoutput(cmd)
325 # Can "fail" if internal headers/transient sources are attempted
326 #if retval:
327 # bb.fatal("debug source copy failed with exit code %s (cmd was %s)" % (retval, cmd))
328
329 # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
330 # Work around this by manually finding and copying any symbolic links that made it through.
331 cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s' 2>/dev/null)" % (dvar, debugsrcdir, dvar, debugsrcdir, workparentdir, dvar, debugsrcdir)
332 (retval, output) = oe.utils.getstatusoutput(cmd)
333 if retval:
334 bb.fatal("debugsrc symlink fixup failed with exit code %s (cmd was %s)" % (retval, cmd))
335
336 # The copy by cpio may have resulted in some empty directories! Remove these
337 cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
338 (retval, output) = oe.utils.getstatusoutput(cmd)
339 if retval:
340 bb.fatal("empty directory removal failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
341
342 # Also remove debugsrcdir if its empty
343 for p in nosuchdir[::-1]:
344 if os.path.exists(p) and not os.listdir(p):
345 os.rmdir(p)
346
347#
348# Package data handling routines
349#
350
351def get_package_mapping (pkg, basepkg, d):
352 import oe.packagedata
353
354 data = oe.packagedata.read_subpkgdata(pkg, d)
355 key = "PKG_%s" % pkg
356
357 if key in data:
358 # Have to avoid undoing the write_extra_pkgs(global_variants...)
359 if bb.data.inherits_class('allarch', d) and data[key] == basepkg:
360 return pkg
361 return data[key]
362
363 return pkg
364
365def get_package_additional_metadata (pkg_type, d):
366 base_key = "PACKAGE_ADD_METADATA"
367 for key in ("%s_%s" % (base_key, pkg_type.upper()), base_key):
368 if d.getVar(key) is None:
369 continue
370 d.setVarFlag(key, "type", "list")
371 if d.getVarFlag(key, "separator") is None:
372 d.setVarFlag(key, "separator", "\\n")
373 metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)]
374 return "\n".join(metadata_fields).strip()
375
376def runtime_mapping_rename (varname, pkg, d):
377 #bb.note("%s before: %s" % (varname, d.getVar(varname, True)))
378
379 if bb.data.inherits_class('packagegroup', d):
380 return
381
382 new_depends = {}
383 deps = bb.utils.explode_dep_versions2(d.getVar(varname, True) or "")
384 for depend in deps:
385 new_depend = get_package_mapping(depend, pkg, d)
386 new_depends[new_depend] = deps[depend]
387
388 d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
389
390 #bb.note("%s after: %s" % (varname, d.getVar(varname, True)))
391
392#
393# Package functions suitable for inclusion in PACKAGEFUNCS
394#
395
396python package_get_auto_pr() {
397 import oe.prservice
398 import re
399
400 # Support per recipe PRSERV_HOST
401 pn = d.getVar('PN', True)
402 host = d.getVar("PRSERV_HOST_" + pn, True)
403 if not (host is None):
404 d.setVar("PRSERV_HOST", host)
405
406 pkgv = d.getVar("PKGV", True)
407
408 # PR Server not active, handle AUTOINC
409 if not d.getVar('PRSERV_HOST', True):
410 if 'AUTOINC' in pkgv:
411 d.setVar("PKGV", pkgv.replace("AUTOINC", "0"))
412 return
413
414 auto_pr = None
415 pv = d.getVar("PV", True)
416 version = d.getVar("PRAUTOINX", True)
417 pkgarch = d.getVar("PACKAGE_ARCH", True)
418 checksum = d.getVar("BB_TASKHASH", True)
419
420 if d.getVar('PRSERV_LOCKDOWN', True):
421 auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch, True) or d.getVar('PRAUTO_' + version, True) or None
422 if auto_pr is None:
423 bb.fatal("Can NOT get PRAUTO from lockdown exported file")
424 d.setVar('PRAUTO',str(auto_pr))
425 return
426
427 try:
428 conn = d.getVar("__PRSERV_CONN", True)
429 if conn is None:
430 conn = oe.prservice.prserv_make_conn(d)
431 if conn is not None:
432 if "AUTOINC" in pkgv:
433 srcpv = bb.fetch2.get_srcrev(d)
434 base_ver = "AUTOINC-%s" % version[:version.find(srcpv)]
435 value = conn.getPR(base_ver, pkgarch, srcpv)
436 d.setVar("PKGV", pkgv.replace("AUTOINC", str(value)))
437
438 auto_pr = conn.getPR(version, pkgarch, checksum)
439 except Exception as e:
440 bb.fatal("Can NOT get PRAUTO, exception %s" % str(e))
441 if auto_pr is None:
442 bb.fatal("Can NOT get PRAUTO from remote PR service")
443 d.setVar('PRAUTO',str(auto_pr))
444}
445
446LOCALEBASEPN ??= "${PN}"
447
448python package_do_split_locales() {
449 if (d.getVar('PACKAGE_NO_LOCALE', True) == '1'):
450 bb.debug(1, "package requested not splitting locales")
451 return
452
453 packages = (d.getVar('PACKAGES', True) or "").split()
454
455 datadir = d.getVar('datadir', True)
456 if not datadir:
457 bb.note("datadir not defined")
458 return
459
460 dvar = d.getVar('PKGD', True)
461 pn = d.getVar('LOCALEBASEPN', True)
462
463 if pn + '-locale' in packages:
464 packages.remove(pn + '-locale')
465
466 localedir = os.path.join(dvar + datadir, 'locale')
467
468 if not cpath.isdir(localedir):
469 bb.debug(1, "No locale files in this package")
470 return
471
472 locales = os.listdir(localedir)
473
474 summary = d.getVar('SUMMARY', True) or pn
475 description = d.getVar('DESCRIPTION', True) or ""
476 locale_section = d.getVar('LOCALE_SECTION', True)
477 mlprefix = d.getVar('MLPREFIX', True) or ""
478 for l in sorted(locales):
479 ln = legitimize_package_name(l)
480 pkg = pn + '-locale-' + ln
481 packages.append(pkg)
482 d.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l))
483 d.setVar('RRECOMMENDS_' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
484 d.setVar('RPROVIDES_' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
485 d.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l))
486 d.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l))
487 if locale_section:
488 d.setVar('SECTION_' + pkg, locale_section)
489
490 d.setVar('PACKAGES', ' '.join(packages))
491
492 # Disabled by RP 18/06/07
493 # Wildcards aren't supported in debian
494 # They break with ipkg since glibc-locale* will mean that
495 # glibc-localedata-translit* won't install as a dependency
496 # for some other package which breaks meta-toolchain
497 # Probably breaks since virtual-locale- isn't provided anywhere
498 #rdep = (d.getVar('RDEPENDS_%s' % pn, True) or "").split()
499 #rdep.append('%s-locale*' % pn)
500 #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep))
501}
502
503python perform_packagecopy () {
504 dest = d.getVar('D', True)
505 dvar = d.getVar('PKGD', True)
506
507 # Start by package population by taking a copy of the installed
508 # files to operate on
509 # Preserve sparse files and hard links
510 cmd = 'tar -cf - -C %s -p . | tar -xf - -C %s' % (dest, dvar)
511 (retval, output) = oe.utils.getstatusoutput(cmd)
512 if retval:
513 bb.fatal("file copy failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
514
515 # replace RPATHs for the nativesdk binaries, to make them relocatable
516 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d):
517 rpath_replace (dvar, d)
518}
519perform_packagecopy[cleandirs] = "${PKGD}"
520perform_packagecopy[dirs] = "${PKGD}"
521
522# We generate a master list of directories to process, we start by
523# seeding this list with reasonable defaults, then load from
524# the fs-perms.txt files
525python fixup_perms () {
526 import pwd, grp
527
528 # init using a string with the same format as a line as documented in
529 # the fs-perms.txt file
530 # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
531 # <path> link <link target>
532 #
533 # __str__ can be used to print out an entry in the input format
534 #
535 # if fs_perms_entry.path is None:
536 # an error occured
537 # if fs_perms_entry.link, you can retrieve:
538 # fs_perms_entry.path = path
539 # fs_perms_entry.link = target of link
540 # if not fs_perms_entry.link, you can retrieve:
541 # fs_perms_entry.path = path
542 # fs_perms_entry.mode = expected dir mode or None
543 # fs_perms_entry.uid = expected uid or -1
544 # fs_perms_entry.gid = expected gid or -1
545 # fs_perms_entry.walk = 'true' or something else
546 # fs_perms_entry.fmode = expected file mode or None
547 # fs_perms_entry.fuid = expected file uid or -1
548 # fs_perms_entry_fgid = expected file gid or -1
549 class fs_perms_entry():
550 def __init__(self, line):
551 lsplit = line.split()
552 if len(lsplit) == 3 and lsplit[1].lower() == "link":
553 self._setlink(lsplit[0], lsplit[2])
554 elif len(lsplit) == 8:
555 self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
556 else:
557 msg = "Fixup Perms: invalid config line %s" % line
558 package_qa_handle_error("perm-config", msg, d)
559 self.path = None
560 self.link = None
561
562 def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
563 self.path = os.path.normpath(path)
564 self.link = None
565 self.mode = self._procmode(mode)
566 self.uid = self._procuid(uid)
567 self.gid = self._procgid(gid)
568 self.walk = walk.lower()
569 self.fmode = self._procmode(fmode)
570 self.fuid = self._procuid(fuid)
571 self.fgid = self._procgid(fgid)
572
573 def _setlink(self, path, link):
574 self.path = os.path.normpath(path)
575 self.link = link
576
577 def _procmode(self, mode):
578 if not mode or (mode and mode == "-"):
579 return None
580 else:
581 return int(mode,8)
582
583 # Note uid/gid -1 has special significance in os.lchown
584 def _procuid(self, uid):
585 if uid is None or uid == "-":
586 return -1
587 elif uid.isdigit():
588 return int(uid)
589 else:
590 return pwd.getpwnam(uid).pw_uid
591
592 def _procgid(self, gid):
593 if gid is None or gid == "-":
594 return -1
595 elif gid.isdigit():
596 return int(gid)
597 else:
598 return grp.getgrnam(gid).gr_gid
599
600 # Use for debugging the entries
601 def __str__(self):
602 if self.link:
603 return "%s link %s" % (self.path, self.link)
604 else:
605 mode = "-"
606 if self.mode:
607 mode = "0%o" % self.mode
608 fmode = "-"
609 if self.fmode:
610 fmode = "0%o" % self.fmode
611 uid = self._mapugid(self.uid)
612 gid = self._mapugid(self.gid)
613 fuid = self._mapugid(self.fuid)
614 fgid = self._mapugid(self.fgid)
615 return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
616
617 def _mapugid(self, id):
618 if id is None or id == -1:
619 return "-"
620 else:
621 return "%d" % id
622
623 # Fix the permission, owner and group of path
624 def fix_perms(path, mode, uid, gid, dir):
625 if mode and not os.path.islink(path):
626 #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
627 os.chmod(path, mode)
628 # -1 is a special value that means don't change the uid/gid
629 # if they are BOTH -1, don't bother to lchown
630 if not (uid == -1 and gid == -1):
631 #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
632 os.lchown(path, uid, gid)
633
634 # Return a list of configuration files based on either the default
635 # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
636 # paths are resolved via BBPATH
637 def get_fs_perms_list(d):
638 str = ""
639 bbpath = d.getVar('BBPATH', True)
640 fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES', True)
641 if not fs_perms_tables:
642 fs_perms_tables = 'files/fs-perms.txt'
643 for conf_file in fs_perms_tables.split():
644 str += " %s" % bb.utils.which(bbpath, conf_file)
645 return str
646
647
648
649 dvar = d.getVar('PKGD', True)
650
651 fs_perms_table = {}
652
653 # By default all of the standard directories specified in
654 # bitbake.conf will get 0755 root:root.
655 target_path_vars = [ 'base_prefix',
656 'prefix',
657 'exec_prefix',
658 'base_bindir',
659 'base_sbindir',
660 'base_libdir',
661 'datadir',
662 'sysconfdir',
663 'servicedir',
664 'sharedstatedir',
665 'localstatedir',
666 'infodir',
667 'mandir',
668 'docdir',
669 'bindir',
670 'sbindir',
671 'libexecdir',
672 'libdir',
673 'includedir',
674 'oldincludedir' ]
675
676 for path in target_path_vars:
677 dir = d.getVar(path, True) or ""
678 if dir == "":
679 continue
680 fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d))
681
682 # Now we actually load from the configuration files
683 for conf in get_fs_perms_list(d).split():
684 if os.path.exists(conf):
685 f = open(conf)
686 for line in f:
687 if line.startswith('#'):
688 continue
689 lsplit = line.split()
690 if len(lsplit) == 0:
691 continue
692 if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
693 msg = "Fixup perms: %s invalid line: %s" % (conf, line)
694 package_qa_handle_error("perm-line", msg, d)
695 continue
696 entry = fs_perms_entry(d.expand(line))
697 if entry and entry.path:
698 fs_perms_table[entry.path] = entry
699 f.close()
700
701 # Debug -- list out in-memory table
702 #for dir in fs_perms_table:
703 # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
704
705 # We process links first, so we can go back and fixup directory ownership
706 # for any newly created directories
707 for dir in fs_perms_table:
708 if not fs_perms_table[dir].link:
709 continue
710
711 origin = dvar + dir
712 if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
713 continue
714
715 link = fs_perms_table[dir].link
716 if link[0] == "/":
717 target = dvar + link
718 ptarget = link
719 else:
720 target = os.path.join(os.path.dirname(origin), link)
721 ptarget = os.path.join(os.path.dirname(dir), link)
722 if os.path.exists(target):
723 msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
724 package_qa_handle_error("perm-link", msg, d)
725 continue
726
727 # Create path to move directory to, move it, and then setup the symlink
728 bb.utils.mkdirhier(os.path.dirname(target))
729 #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
730 os.rename(origin, target)
731 #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
732 os.symlink(link, origin)
733
734 for dir in fs_perms_table:
735 if fs_perms_table[dir].link:
736 continue
737
738 origin = dvar + dir
739 if not (cpath.exists(origin) and cpath.isdir(origin)):
740 continue
741
742 fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
743
744 if fs_perms_table[dir].walk == 'true':
745 for root, dirs, files in os.walk(origin):
746 for dr in dirs:
747 each_dir = os.path.join(root, dr)
748 fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
749 for f in files:
750 each_file = os.path.join(root, f)
751 fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
752}
753
754python split_and_strip_files () {
755 import stat, errno
756
757 dvar = d.getVar('PKGD', True)
758 pn = d.getVar('PN', True)
759
760 # We default to '.debug' style
761 if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory':
762 # Single debug-file-directory style debug info
763 debugappend = ".debug"
764 debugdir = ""
765 debuglibdir = "/usr/lib/debug"
766 debugsrcdir = "/usr/src/debug"
767 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-without-src':
768 # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
769 debugappend = ""
770 debugdir = "/.debug"
771 debuglibdir = ""
772 debugsrcdir = ""
773 else:
774 # Original OE-core, a.k.a. ".debug", style debug info
775 debugappend = ""
776 debugdir = "/.debug"
777 debuglibdir = ""
778 debugsrcdir = "/usr/src/debug"
779
780 sourcefile = d.expand("${WORKDIR}/debugsources.list")
781 bb.utils.remove(sourcefile)
782
783 os.chdir(dvar)
784
785 # Return type (bits):
786 # 0 - not elf
787 # 1 - ELF
788 # 2 - stripped
789 # 4 - executable
790 # 8 - shared library
791 # 16 - kernel module
792 def isELF(path):
793 type = 0
794 ret, result = oe.utils.getstatusoutput("file \"%s\"" % path.replace("\"", "\\\""))
795
796 if ret:
797 msg = "split_and_strip_files: 'file %s' failed" % path
798 package_qa_handle_error("split-strip", msg, d)
799 return type
800
801 # Not stripped
802 if "ELF" in result:
803 type |= 1
804 if "not stripped" not in result:
805 type |= 2
806 if "executable" in result:
807 type |= 4
808 if "shared" in result:
809 type |= 8
810 return type
811
812
813 #
814 # First lets figure out all of the files we may have to process ... do this only once!
815 #
816 elffiles = {}
817 symlinks = {}
818 kernmods = []
819 inodes = {}
820 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir", True))
821 baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir", True))
822 if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
823 for root, dirs, files in cpath.walk(dvar):
824 for f in files:
825 file = os.path.join(root, f)
826 if file.endswith(".ko") and file.find("/lib/modules/") != -1:
827 kernmods.append(file)
828 continue
829
830 # Skip debug files
831 if debugappend and file.endswith(debugappend):
832 continue
833 if debugdir and debugdir in os.path.dirname(file[len(dvar):]):
834 continue
835
836 try:
837 ltarget = cpath.realpath(file, dvar, False)
838 s = cpath.lstat(ltarget)
839 except OSError as e:
840 (err, strerror) = e.args
841 if err != errno.ENOENT:
842 raise
843 # Skip broken symlinks
844 continue
845 if not s:
846 continue
847 # Check its an excutable
848 if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) or (s[stat.ST_MODE] & stat.S_IXOTH) \
849 or ((file.startswith(libdir) or file.startswith(baselibdir)) and ".so" in f):
850 # If it's a symlink, and points to an ELF file, we capture the readlink target
851 if cpath.islink(file):
852 target = os.readlink(file)
853 if isELF(ltarget):
854 #bb.note("Sym: %s (%d)" % (ltarget, isELF(ltarget)))
855 symlinks[file] = target
856 continue
857
858 # It's a file (or hardlink), not a link
859 # ...but is it ELF, and is it already stripped?
860 elf_file = isELF(file)
861 if elf_file & 1:
862 if elf_file & 2:
863 if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn, True) or "").split():
864 bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
865 else:
866 msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
867 package_qa_handle_error("already-stripped", msg, d)
868 continue
869
870 # At this point we have an unstripped elf file. We need to:
871 # a) Make sure any file we strip is not hardlinked to anything else outside this tree
872 # b) Only strip any hardlinked file once (no races)
873 # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks
874
875 # Use a reference of device ID and inode number to indentify files
876 file_reference = "%d_%d" % (s.st_dev, s.st_ino)
877 if file_reference in inodes:
878 os.unlink(file)
879 os.link(inodes[file_reference][0], file)
880 inodes[file_reference].append(file)
881 else:
882 inodes[file_reference] = [file]
883 # break hardlink
884 bb.utils.copyfile(file, file)
885 elffiles[file] = elf_file
886 # Modified the file so clear the cache
887 cpath.updatecache(file)
888
889 #
890 # First lets process debug splitting
891 #
892 if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'):
893 for file in elffiles:
894 src = file[len(dvar):]
895 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
896 fpath = dvar + dest
897
898 # Split the file...
899 bb.utils.mkdirhier(os.path.dirname(fpath))
900 #bb.note("Split %s -> %s" % (file, fpath))
901 # Only store off the hard link reference if we successfully split!
902 splitdebuginfo(file, fpath, debugsrcdir, sourcefile, d)
903
904 # Hardlink our debug symbols to the other hardlink copies
905 for ref in inodes:
906 if len(inodes[ref]) == 1:
907 continue
908 for file in inodes[ref][1:]:
909 src = file[len(dvar):]
910 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
911 fpath = dvar + dest
912 target = inodes[ref][0][len(dvar):]
913 ftarget = dvar + debuglibdir + os.path.dirname(target) + debugdir + "/" + os.path.basename(target) + debugappend
914 bb.utils.mkdirhier(os.path.dirname(fpath))
915 #bb.note("Link %s -> %s" % (fpath, ftarget))
916 os.link(ftarget, fpath)
917
918 # Create symlinks for all cases we were able to split symbols
919 for file in symlinks:
920 src = file[len(dvar):]
921 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
922 fpath = dvar + dest
923 # Skip it if the target doesn't exist
924 try:
925 s = os.stat(fpath)
926 except OSError as e:
927 (err, strerror) = e.args
928 if err != errno.ENOENT:
929 raise
930 continue
931
932 ltarget = symlinks[file]
933 lpath = os.path.dirname(ltarget)
934 lbase = os.path.basename(ltarget)
935 ftarget = ""
936 if lpath and lpath != ".":
937 ftarget += lpath + debugdir + "/"
938 ftarget += lbase + debugappend
939 if lpath.startswith(".."):
940 ftarget = os.path.join("..", ftarget)
941 bb.utils.mkdirhier(os.path.dirname(fpath))
942 #bb.note("Symlink %s -> %s" % (fpath, ftarget))
943 os.symlink(ftarget, fpath)
944
945 # Process the debugsrcdir if requested...
946 # This copies and places the referenced sources for later debugging...
947 copydebugsources(debugsrcdir, d)
948 #
949 # End of debug splitting
950 #
951
952 #
953 # Now lets go back over things and strip them
954 #
955 if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
956 strip = d.getVar("STRIP", True)
957 sfiles = []
958 for file in elffiles:
959 elf_file = int(elffiles[file])
960 #bb.note("Strip %s" % file)
961 sfiles.append((file, elf_file, strip))
962 for f in kernmods:
963 sfiles.append((f, 16, strip))
964
965 oe.utils.multiprocess_exec(sfiles, oe.package.runstrip)
966
967 #
968 # End of strip
969 #
970}
971
972python populate_packages () {
973 import glob, re
974
975 workdir = d.getVar('WORKDIR', True)
976 outdir = d.getVar('DEPLOY_DIR', True)
977 dvar = d.getVar('PKGD', True)
978 packages = d.getVar('PACKAGES', True)
979 pn = d.getVar('PN', True)
980
981 bb.utils.mkdirhier(outdir)
982 os.chdir(dvar)
983
984 # Sanity check PACKAGES for duplicates and for LICENSE_EXCLUSION
985 # Sanity should be moved to sanity.bbclass once we have the infrastucture
986 package_list = []
987
988 for pkg in packages.split():
989 if d.getVar('LICENSE_EXCLUSION-' + pkg, True):
990 msg = "%s has an incompatible license. Excluding from packaging." % pkg
991 package_qa_handle_error("incompatible-license", msg, d)
992 if pkg in package_list:
993 msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
994 package_qa_handle_error("packages-list", msg, d)
995 else:
996 package_list.append(pkg)
997 d.setVar('PACKAGES', ' '.join(package_list))
998 pkgdest = d.getVar('PKGDEST', True)
999
1000 seen = []
1001
1002 # os.mkdir masks the permissions with umask so we have to unset it first
1003 oldumask = os.umask(0)
1004
1005 for pkg in package_list:
1006 root = os.path.join(pkgdest, pkg)
1007 bb.utils.mkdirhier(root)
1008
1009 filesvar = d.getVar('FILES_%s' % pkg, True) or ""
1010 if "//" in filesvar:
1011 msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
1012 package_qa_handle_error("files-invalid", msg, d)
1013 filesvar.replace("//", "/")
1014
1015 origfiles = filesvar.split()
1016 files = []
1017 for file in origfiles:
1018 if os.path.isabs(file):
1019 file = '.' + file
1020 if not file.startswith("./"):
1021 file = './' + file
1022 globbed = glob.glob(file)
1023 if globbed:
1024 if [ file ] != globbed:
1025 files += globbed
1026 continue
1027 files.append(file)
1028
1029 for file in files:
1030 if not cpath.islink(file):
1031 if cpath.isdir(file):
1032 newfiles = [ os.path.join(file,x) for x in os.listdir(file) ]
1033 if newfiles:
1034 files += newfiles
1035 continue
1036 if (not cpath.islink(file)) and (not cpath.exists(file)):
1037 continue
1038 if file in seen:
1039 continue
1040 seen.append(file)
1041
1042 if d.getVar('LICENSE_EXCLUSION-' + pkg, True):
1043 continue
1044
1045 def mkdir(src, dest, p):
1046 src = os.path.join(src, p)
1047 dest = os.path.join(dest, p)
1048 fstat = cpath.stat(src)
1049 os.mkdir(dest, fstat.st_mode)
1050 os.chown(dest, fstat.st_uid, fstat.st_gid)
1051 if p not in seen:
1052 seen.append(p)
1053 cpath.updatecache(dest)
1054
1055 def mkdir_recurse(src, dest, paths):
1056 if cpath.exists(dest + '/' + paths):
1057 return
1058 while paths.startswith("./"):
1059 paths = paths[2:]
1060 p = "."
1061 for c in paths.split("/"):
1062 p = os.path.join(p, c)
1063 if not cpath.exists(os.path.join(dest, p)):
1064 mkdir(src, dest, p)
1065
1066 if cpath.isdir(file) and not cpath.islink(file):
1067 mkdir_recurse(dvar, root, file)
1068 continue
1069
1070 mkdir_recurse(dvar, root, os.path.dirname(file))
1071 fpath = os.path.join(root,file)
1072 if not cpath.islink(file):
1073 os.link(file, fpath)
1074 fstat = cpath.stat(file)
1075 os.chmod(fpath, fstat.st_mode)
1076 os.chown(fpath, fstat.st_uid, fstat.st_gid)
1077 continue
1078 ret = bb.utils.copyfile(file, fpath)
1079 if ret is False or ret == 0:
1080 raise bb.build.FuncFailed("File population failed")
1081
1082 os.umask(oldumask)
1083 os.chdir(workdir)
1084
1085 unshipped = []
1086 for root, dirs, files in cpath.walk(dvar):
1087 dir = root[len(dvar):]
1088 if not dir:
1089 dir = os.sep
1090 for f in (files + dirs):
1091 path = os.path.join(dir, f)
1092 if ('.' + path) not in seen:
1093 unshipped.append(path)
1094
1095 if unshipped != []:
1096 msg = pn + ": Files/directories were installed but not shipped"
1097 if "installed-vs-shipped" in (d.getVar('INSANE_SKIP_' + pn, True) or "").split():
1098 bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
1099 else:
1100 for f in unshipped:
1101 msg = msg + "\n " + f
1102 package_qa_handle_error("installed-vs-shipped", msg, d)
1103}
1104populate_packages[dirs] = "${D}"
1105
1106python package_fixsymlinks () {
1107 import errno
1108 pkgdest = d.getVar('PKGDEST', True)
1109 packages = d.getVar("PACKAGES").split()
1110
1111 dangling_links = {}
1112 pkg_files = {}
1113 for pkg in packages:
1114 dangling_links[pkg] = []
1115 pkg_files[pkg] = []
1116 inst_root = os.path.join(pkgdest, pkg)
1117 for path in pkgfiles[pkg]:
1118 rpath = path[len(inst_root):]
1119 pkg_files[pkg].append(rpath)
1120 rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
1121 if not cpath.lexists(rtarget):
1122 dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
1123
1124 newrdepends = {}
1125 for pkg in dangling_links:
1126 for l in dangling_links[pkg]:
1127 found = False
1128 bb.debug(1, "%s contains dangling link %s" % (pkg, l))
1129 for p in packages:
1130 if l in pkg_files[p]:
1131 found = True
1132 bb.debug(1, "target found in %s" % p)
1133 if p == pkg:
1134 break
1135 if pkg not in newrdepends:
1136 newrdepends[pkg] = []
1137 newrdepends[pkg].append(p)
1138 break
1139 if found == False:
1140 bb.note("%s contains dangling symlink to %s" % (pkg, l))
1141
1142 for pkg in newrdepends:
1143 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "")
1144 for p in newrdepends[pkg]:
1145 if p not in rdepends:
1146 rdepends[p] = []
1147 d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1148}
1149
1150
1151python package_package_name_hook() {
1152 """
1153 A package_name_hook function can be used to rewrite the package names by
1154 changing PKG. For an example, see debian.bbclass.
1155 """
1156 pass
1157}
1158
1159EXPORT_FUNCTIONS package_name_hook
1160
1161
1162PKGDESTWORK = "${WORKDIR}/pkgdata"
1163
1164python emit_pkgdata() {
1165 from glob import glob
1166 import json
1167
1168 def write_if_exists(f, pkg, var):
1169 def encode(str):
1170 import codecs
1171 c = codecs.getencoder("string_escape")
1172 return c(str)[0]
1173
1174 val = d.getVar('%s_%s' % (var, pkg), True)
1175 if val:
1176 f.write('%s_%s: %s\n' % (var, pkg, encode(val)))
1177 return val
1178 val = d.getVar('%s' % (var), True)
1179 if val:
1180 f.write('%s: %s\n' % (var, encode(val)))
1181 return val
1182
1183 def write_extra_pkgs(variants, pn, packages, pkgdatadir):
1184 for variant in variants:
1185 with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd:
1186 fd.write("PACKAGES: %s\n" % ' '.join(
1187 map(lambda pkg: '%s-%s' % (variant, pkg), packages.split())))
1188
1189 def write_extra_runtime_pkgs(variants, packages, pkgdatadir):
1190 for variant in variants:
1191 for pkg in packages.split():
1192 ml_pkg = "%s-%s" % (variant, pkg)
1193 subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg)
1194 with open(subdata_file, 'w') as fd:
1195 fd.write("PKG_%s: %s" % (ml_pkg, pkg))
1196
1197 packages = d.getVar('PACKAGES', True)
1198 pkgdest = d.getVar('PKGDEST', True)
1199 pkgdatadir = d.getVar('PKGDESTWORK', True)
1200
1201 # Take shared lock since we're only reading, not writing
1202 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True)
1203
1204 data_file = pkgdatadir + d.expand("/${PN}" )
1205 f = open(data_file, 'w')
1206 f.write("PACKAGES: %s\n" % packages)
1207 f.close()
1208
1209 pn = d.getVar('PN', True)
1210 global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS', True) or "").split()
1211 variants = (d.getVar('MULTILIB_VARIANTS', True) or "").split()
1212
1213 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
1214 write_extra_pkgs(variants, pn, packages, pkgdatadir)
1215
1216 if (bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d)):
1217 write_extra_pkgs(global_variants, pn, packages, pkgdatadir)
1218
1219 workdir = d.getVar('WORKDIR', True)
1220
1221 for pkg in packages.split():
1222 pkgval = d.getVar('PKG_%s' % pkg, True)
1223 if pkgval is None:
1224 pkgval = pkg
1225 d.setVar('PKG_%s' % pkg, pkg)
1226
1227 pkgdestpkg = os.path.join(pkgdest, pkg)
1228 files = {}
1229 total_size = 0
1230 for f in pkgfiles[pkg]:
1231 relpth = os.path.relpath(f, pkgdestpkg)
1232 fstat = os.lstat(f)
1233 total_size += fstat.st_size
1234 files[os.sep + relpth] = fstat.st_size
1235 d.setVar('FILES_INFO', json.dumps(files))
1236
1237 subdata_file = pkgdatadir + "/runtime/%s" % pkg
1238 sf = open(subdata_file, 'w')
1239 write_if_exists(sf, pkg, 'PN')
1240 write_if_exists(sf, pkg, 'PE')
1241 write_if_exists(sf, pkg, 'PV')
1242 write_if_exists(sf, pkg, 'PR')
1243 write_if_exists(sf, pkg, 'PKGE')
1244 write_if_exists(sf, pkg, 'PKGV')
1245 write_if_exists(sf, pkg, 'PKGR')
1246 write_if_exists(sf, pkg, 'LICENSE')
1247 write_if_exists(sf, pkg, 'DESCRIPTION')
1248 write_if_exists(sf, pkg, 'SUMMARY')
1249 write_if_exists(sf, pkg, 'RDEPENDS')
1250 rprov = write_if_exists(sf, pkg, 'RPROVIDES')
1251 write_if_exists(sf, pkg, 'RRECOMMENDS')
1252 write_if_exists(sf, pkg, 'RSUGGESTS')
1253 write_if_exists(sf, pkg, 'RREPLACES')
1254 write_if_exists(sf, pkg, 'RCONFLICTS')
1255 write_if_exists(sf, pkg, 'SECTION')
1256 write_if_exists(sf, pkg, 'PKG')
1257 write_if_exists(sf, pkg, 'ALLOW_EMPTY')
1258 write_if_exists(sf, pkg, 'FILES')
1259 write_if_exists(sf, pkg, 'pkg_postinst')
1260 write_if_exists(sf, pkg, 'pkg_postrm')
1261 write_if_exists(sf, pkg, 'pkg_preinst')
1262 write_if_exists(sf, pkg, 'pkg_prerm')
1263 write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
1264 write_if_exists(sf, pkg, 'FILES_INFO')
1265 for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg, True) or "").split():
1266 write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile)
1267
1268 write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
1269 for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg, True) or "").split():
1270 write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile)
1271
1272 sf.write('%s_%s: %d\n' % ('PKGSIZE', pkg, total_size))
1273 sf.close()
1274
1275 # Symlinks needed for rprovides lookup
1276 if rprov:
1277 for p in rprov.strip().split():
1278 subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg)
1279 bb.utils.mkdirhier(os.path.dirname(subdata_sym))
1280 oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True)
1281
1282 allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg, True)
1283 if not allow_empty:
1284 allow_empty = d.getVar('ALLOW_EMPTY', True)
1285 root = "%s/%s" % (pkgdest, pkg)
1286 os.chdir(root)
1287 g = glob('*')
1288 if g or allow_empty == "1":
1289 # Symlinks needed for reverse lookups (from the final package name)
1290 subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval
1291 oe.path.symlink("../runtime/%s" % pkg, subdata_sym, True)
1292
1293 packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
1294 open(packagedfile, 'w').close()
1295
1296 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
1297 write_extra_runtime_pkgs(variants, packages, pkgdatadir)
1298
1299 if bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d):
1300 write_extra_runtime_pkgs(global_variants, packages, pkgdatadir)
1301
1302 bb.utils.unlockfile(lf)
1303}
1304emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime ${PKGDESTWORK}/runtime-reverse ${PKGDESTWORK}/runtime-rprovides"
1305
1306ldconfig_postinst_fragment() {
1307if [ x"$D" = "x" ]; then
1308 if [ -x /sbin/ldconfig ]; then /sbin/ldconfig ; fi
1309fi
1310}
1311
1312RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/bin/rpmdeps-oecore --macros ${STAGING_LIBDIR_NATIVE}/rpm/macros --define '_rpmfc_magic_path ${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc' --rpmpopt ${STAGING_LIBDIR_NATIVE}/rpm/rpmpopt"
1313
1314# Collect perfile run-time dependency metadata
1315# Output:
1316# FILERPROVIDESFLIST_pkg - list of all files w/ deps
1317# FILERPROVIDES_filepath_pkg - per file dep
1318#
1319# FILERDEPENDSFLIST_pkg - list of all files w/ deps
1320# FILERDEPENDS_filepath_pkg - per file dep
1321
1322python package_do_filedeps() {
1323 if d.getVar('SKIP_FILEDEPS', True) == '1':
1324 return
1325
1326 pkgdest = d.getVar('PKGDEST', True)
1327 packages = d.getVar('PACKAGES', True)
1328 rpmdeps = d.getVar('RPMDEPS', True)
1329
1330 def chunks(files, n):
1331 return [files[i:i+n] for i in range(0, len(files), n)]
1332
1333 pkglist = []
1334 for pkg in packages.split():
1335 if d.getVar('SKIP_FILEDEPS_' + pkg, True) == '1':
1336 continue
1337 if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-'):
1338 continue
1339 for files in chunks(pkgfiles[pkg], 100):
1340 pkglist.append((pkg, files, rpmdeps, pkgdest))
1341
1342 processed = oe.utils.multiprocess_exec( pkglist, oe.package.filedeprunner)
1343
1344 provides_files = {}
1345 requires_files = {}
1346
1347 for result in processed:
1348 (pkg, provides, requires) = result
1349
1350 if pkg not in provides_files:
1351 provides_files[pkg] = []
1352 if pkg not in requires_files:
1353 requires_files[pkg] = []
1354
1355 for file in provides:
1356 provides_files[pkg].append(file)
1357 key = "FILERPROVIDES_" + file + "_" + pkg
1358 d.setVar(key, " ".join(provides[file]))
1359
1360 for file in requires:
1361 requires_files[pkg].append(file)
1362 key = "FILERDEPENDS_" + file + "_" + pkg
1363 d.setVar(key, " ".join(requires[file]))
1364
1365 for pkg in requires_files:
1366 d.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files[pkg]))
1367 for pkg in provides_files:
1368 d.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files[pkg]))
1369}
1370
1371SHLIBSDIRS = "${PKGDATA_DIR}/${MLPREFIX}shlibs2"
1372SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs2"
1373
1374python package_do_shlibs() {
1375 import re, pipes
1376 import subprocess as sub
1377
1378 exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', 0)
1379 if exclude_shlibs:
1380 bb.note("not generating shlibs")
1381 return
1382
1383 lib_re = re.compile("^.*\.so")
1384 libdir_re = re.compile(".*/%s$" % d.getVar('baselib', True))
1385
1386 packages = d.getVar('PACKAGES', True)
1387 targetos = d.getVar('TARGET_OS', True)
1388
1389 workdir = d.getVar('WORKDIR', True)
1390
1391 ver = d.getVar('PKGV', True)
1392 if not ver:
1393 msg = "PKGV not defined"
1394 package_qa_handle_error("pkgv-undefined", msg, d)
1395 return
1396
1397 pkgdest = d.getVar('PKGDEST', True)
1398
1399 shlibs_dirs = d.getVar('SHLIBSDIRS', True).split()
1400 shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
1401
1402 # Take shared lock since we're only reading, not writing
1403 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
1404
1405 def read_shlib_providers():
1406 list_re = re.compile('^(.*)\.list$')
1407 # Go from least to most specific since the last one found wins
1408 for dir in reversed(shlibs_dirs):
1409 bb.debug(2, "Reading shlib providers in %s" % (dir))
1410 if not os.path.exists(dir):
1411 continue
1412 for file in os.listdir(dir):
1413 m = list_re.match(file)
1414 if m:
1415 dep_pkg = m.group(1)
1416 fd = open(os.path.join(dir, file))
1417 lines = fd.readlines()
1418 fd.close()
1419 for l in lines:
1420 s = l.strip().split(":")
1421 if s[0] not in shlib_provider:
1422 shlib_provider[s[0]] = {}
1423 shlib_provider[s[0]][s[1]] = (dep_pkg, s[2])
1424
1425 def linux_so(file, needed, sonames, renames, pkgver):
1426 needs_ldconfig = False
1427 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1428 cmd = d.getVar('OBJDUMP', True) + " -p " + pipes.quote(file) + " 2>/dev/null"
1429 fd = os.popen(cmd)
1430 lines = fd.readlines()
1431 fd.close()
1432 rpath = []
1433 for l in lines:
1434 m = re.match("\s+RPATH\s+([^\s]*)", l)
1435 if m:
1436 rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
1437 rpath = map(os.path.normpath, rpaths)
1438 for l in lines:
1439 m = re.match("\s+NEEDED\s+([^\s]*)", l)
1440 if m:
1441 dep = m.group(1)
1442 if dep not in needed[pkg]:
1443 needed[pkg].append((dep, file, rpath))
1444 m = re.match("\s+SONAME\s+([^\s]*)", l)
1445 if m:
1446 this_soname = m.group(1)
1447 prov = (this_soname, ldir, pkgver)
1448 if not prov in sonames:
1449 # if library is private (only used by package) then do not build shlib for it
1450 if not private_libs or this_soname not in private_libs:
1451 sonames.append(prov)
1452 if libdir_re.match(os.path.dirname(file)):
1453 needs_ldconfig = True
1454 if snap_symlinks and (os.path.basename(file) != this_soname):
1455 renames.append((file, os.path.join(os.path.dirname(file), this_soname)))
1456 return needs_ldconfig
1457
1458 def darwin_so(file, needed, sonames, renames, pkgver):
1459 if not os.path.exists(file):
1460 return
1461 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1462
1463 def get_combinations(base):
1464 #
1465 # Given a base library name, find all combinations of this split by "." and "-"
1466 #
1467 combos = []
1468 options = base.split(".")
1469 for i in range(1, len(options) + 1):
1470 combos.append(".".join(options[0:i]))
1471 options = base.split("-")
1472 for i in range(1, len(options) + 1):
1473 combos.append("-".join(options[0:i]))
1474 return combos
1475
1476 if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg'):
1477 # Drop suffix
1478 name = os.path.basename(file).rsplit(".",1)[0]
1479 # Find all combinations
1480 combos = get_combinations(name)
1481 for combo in combos:
1482 if not combo in sonames:
1483 prov = (combo, ldir, pkgver)
1484 sonames.append(prov)
1485 if file.endswith('.dylib') or file.endswith('.so'):
1486 rpath = []
1487 p = sub.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file],stdout=sub.PIPE,stderr=sub.PIPE)
1488 err, out = p.communicate()
1489 # If returned succesfully, process stderr for results
1490 if p.returncode == 0:
1491 for l in err.split("\n"):
1492 l = l.strip()
1493 if l.startswith('path '):
1494 rpath.append(l.split()[1])
1495
1496 p = sub.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file],stdout=sub.PIPE,stderr=sub.PIPE)
1497 err, out = p.communicate()
1498 # If returned succesfully, process stderr for results
1499 if p.returncode == 0:
1500 for l in err.split("\n"):
1501 l = l.strip()
1502 if not l or l.endswith(":"):
1503 continue
1504 if "is not an object file" in l:
1505 continue
1506 name = os.path.basename(l.split()[0]).rsplit(".", 1)[0]
1507 if name and name not in needed[pkg]:
1508 needed[pkg].append((name, file, []))
1509
1510 if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS', True) == "1":
1511 snap_symlinks = True
1512 else:
1513 snap_symlinks = False
1514
1515 if (d.getVar('USE_LDCONFIG', True) or "1") == "1":
1516 use_ldconfig = True
1517 else:
1518 use_ldconfig = False
1519
1520 needed = {}
1521 shlib_provider = {}
1522 read_shlib_providers()
1523
1524 for pkg in packages.split():
1525 private_libs = d.getVar('PRIVATE_LIBS_' + pkg, True) or d.getVar('PRIVATE_LIBS', True) or ""
1526 private_libs = private_libs.split()
1527 needs_ldconfig = False
1528 bb.debug(2, "calculating shlib provides for %s" % pkg)
1529
1530 pkgver = d.getVar('PKGV_' + pkg, True)
1531 if not pkgver:
1532 pkgver = d.getVar('PV_' + pkg, True)
1533 if not pkgver:
1534 pkgver = ver
1535
1536 needed[pkg] = []
1537 sonames = list()
1538 renames = list()
1539 for file in pkgfiles[pkg]:
1540 soname = None
1541 if cpath.islink(file):
1542 continue
1543 if targetos == "darwin" or targetos == "darwin8":
1544 darwin_so(file, needed, sonames, renames, pkgver)
1545 elif os.access(file, os.X_OK) or lib_re.match(file):
1546 ldconfig = linux_so(file, needed, sonames, renames, pkgver)
1547 needs_ldconfig = needs_ldconfig or ldconfig
1548 for (old, new) in renames:
1549 bb.note("Renaming %s to %s" % (old, new))
1550 os.rename(old, new)
1551 pkgfiles[pkg].remove(old)
1552
1553 shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
1554 if len(sonames):
1555 fd = open(shlibs_file, 'w')
1556 for s in sonames:
1557 if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]:
1558 (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]]
1559 if old_pkg != pkg:
1560 bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver))
1561 bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0]))
1562 fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n')
1563 if s[0] not in shlib_provider:
1564 shlib_provider[s[0]] = {}
1565 shlib_provider[s[0]][s[1]] = (pkg, pkgver)
1566 fd.close()
1567 if needs_ldconfig and use_ldconfig:
1568 bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
1569 postinst = d.getVar('pkg_postinst_%s' % pkg, True)
1570 if not postinst:
1571 postinst = '#!/bin/sh\n'
1572 postinst += d.getVar('ldconfig_postinst_fragment', True)
1573 d.setVar('pkg_postinst_%s' % pkg, postinst)
1574 bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
1575
1576 bb.utils.unlockfile(lf)
1577
1578 assumed_libs = d.getVar('ASSUME_SHLIBS', True)
1579 if assumed_libs:
1580 libdir = d.getVar("libdir", True)
1581 for e in assumed_libs.split():
1582 l, dep_pkg = e.split(":")
1583 lib_ver = None
1584 dep_pkg = dep_pkg.rsplit("_", 1)
1585 if len(dep_pkg) == 2:
1586 lib_ver = dep_pkg[1]
1587 dep_pkg = dep_pkg[0]
1588 if l not in shlib_provider:
1589 shlib_provider[l] = {}
1590 shlib_provider[l][libdir] = (dep_pkg, lib_ver)
1591
1592 libsearchpath = [d.getVar('libdir', True), d.getVar('base_libdir', True)]
1593
1594 for pkg in packages.split():
1595 bb.debug(2, "calculating shlib requirements for %s" % pkg)
1596
1597 deps = list()
1598 for n in needed[pkg]:
1599 # if n is in private libraries, don't try to search provider for it
1600 # this could cause problem in case some abc.bb provides private
1601 # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
1602 # but skipping it is still better alternative than providing own
1603 # version and then adding runtime dependency for the same system library
1604 if private_libs and n[0] in private_libs:
1605 bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
1606 continue
1607 if n[0] in shlib_provider.keys():
1608 shlib_provider_path = list()
1609 for k in shlib_provider[n[0]].keys():
1610 shlib_provider_path.append(k)
1611 match = None
1612 for p in n[2] + shlib_provider_path + libsearchpath:
1613 if p in shlib_provider[n[0]]:
1614 match = p
1615 break
1616 if match:
1617 (dep_pkg, ver_needed) = shlib_provider[n[0]][match]
1618
1619 bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1]))
1620
1621 if dep_pkg == pkg:
1622 continue
1623
1624 if ver_needed:
1625 dep = "%s (>= %s)" % (dep_pkg, ver_needed)
1626 else:
1627 dep = dep_pkg
1628 if not dep in deps:
1629 deps.append(dep)
1630 continue
1631 bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1]))
1632
1633 deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
1634 if os.path.exists(deps_file):
1635 os.remove(deps_file)
1636 if len(deps):
1637 fd = open(deps_file, 'w')
1638 for dep in deps:
1639 fd.write(dep + '\n')
1640 fd.close()
1641}
1642
1643python package_do_pkgconfig () {
1644 import re
1645
1646 packages = d.getVar('PACKAGES', True)
1647 workdir = d.getVar('WORKDIR', True)
1648 pkgdest = d.getVar('PKGDEST', True)
1649
1650 shlibs_dirs = d.getVar('SHLIBSDIRS', True).split()
1651 shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
1652
1653 pc_re = re.compile('(.*)\.pc$')
1654 var_re = re.compile('(.*)=(.*)')
1655 field_re = re.compile('(.*): (.*)')
1656
1657 pkgconfig_provided = {}
1658 pkgconfig_needed = {}
1659 for pkg in packages.split():
1660 pkgconfig_provided[pkg] = []
1661 pkgconfig_needed[pkg] = []
1662 for file in pkgfiles[pkg]:
1663 m = pc_re.match(file)
1664 if m:
1665 pd = bb.data.init()
1666 name = m.group(1)
1667 pkgconfig_provided[pkg].append(name)
1668 if not os.access(file, os.R_OK):
1669 continue
1670 f = open(file, 'r')
1671 lines = f.readlines()
1672 f.close()
1673 for l in lines:
1674 m = var_re.match(l)
1675 if m:
1676 name = m.group(1)
1677 val = m.group(2)
1678 pd.setVar(name, pd.expand(val))
1679 continue
1680 m = field_re.match(l)
1681 if m:
1682 hdr = m.group(1)
1683 exp = bb.data.expand(m.group(2), pd)
1684 if hdr == 'Requires':
1685 pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
1686
1687 # Take shared lock since we're only reading, not writing
1688 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
1689
1690 for pkg in packages.split():
1691 pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
1692 if pkgconfig_provided[pkg] != []:
1693 f = open(pkgs_file, 'w')
1694 for p in pkgconfig_provided[pkg]:
1695 f.write('%s\n' % p)
1696 f.close()
1697
1698 # Go from least to most specific since the last one found wins
1699 for dir in reversed(shlibs_dirs):
1700 if not os.path.exists(dir):
1701 continue
1702 for file in os.listdir(dir):
1703 m = re.match('^(.*)\.pclist$', file)
1704 if m:
1705 pkg = m.group(1)
1706 fd = open(os.path.join(dir, file))
1707 lines = fd.readlines()
1708 fd.close()
1709 pkgconfig_provided[pkg] = []
1710 for l in lines:
1711 pkgconfig_provided[pkg].append(l.rstrip())
1712
1713 for pkg in packages.split():
1714 deps = []
1715 for n in pkgconfig_needed[pkg]:
1716 found = False
1717 for k in pkgconfig_provided.keys():
1718 if n in pkgconfig_provided[k]:
1719 if k != pkg and not (k in deps):
1720 deps.append(k)
1721 found = True
1722 if found == False:
1723 bb.note("couldn't find pkgconfig module '%s' in any package" % n)
1724 deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
1725 if len(deps):
1726 fd = open(deps_file, 'w')
1727 for dep in deps:
1728 fd.write(dep + '\n')
1729 fd.close()
1730
1731 bb.utils.unlockfile(lf)
1732}
1733
1734def read_libdep_files(d):
1735 pkglibdeps = {}
1736 packages = d.getVar('PACKAGES', True).split()
1737 for pkg in packages:
1738 pkglibdeps[pkg] = {}
1739 for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
1740 depsfile = d.expand("${PKGDEST}/" + pkg + extension)
1741 if os.access(depsfile, os.R_OK):
1742 fd = open(depsfile)
1743 lines = fd.readlines()
1744 fd.close()
1745 for l in lines:
1746 l.rstrip()
1747 deps = bb.utils.explode_dep_versions2(l)
1748 for dep in deps:
1749 if not dep in pkglibdeps[pkg]:
1750 pkglibdeps[pkg][dep] = deps[dep]
1751 return pkglibdeps
1752
1753python read_shlibdeps () {
1754 pkglibdeps = read_libdep_files(d)
1755
1756 packages = d.getVar('PACKAGES', True).split()
1757 for pkg in packages:
1758 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "")
1759 for dep in pkglibdeps[pkg]:
1760 # Add the dep if it's not already there, or if no comparison is set
1761 if dep not in rdepends:
1762 rdepends[dep] = []
1763 for v in pkglibdeps[pkg][dep]:
1764 if v not in rdepends[dep]:
1765 rdepends[dep].append(v)
1766 d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1767}
1768
1769python package_depchains() {
1770 """
1771 For a given set of prefix and postfix modifiers, make those packages
1772 RRECOMMENDS on the corresponding packages for its RDEPENDS.
1773
1774 Example: If package A depends upon package B, and A's .bb emits an
1775 A-dev package, this would make A-dev Recommends: B-dev.
1776
1777 If only one of a given suffix is specified, it will take the RRECOMMENDS
1778 based on the RDEPENDS of *all* other packages. If more than one of a given
1779 suffix is specified, its will only use the RDEPENDS of the single parent
1780 package.
1781 """
1782
1783 packages = d.getVar('PACKAGES', True)
1784 postfixes = (d.getVar('DEPCHAIN_POST', True) or '').split()
1785 prefixes = (d.getVar('DEPCHAIN_PRE', True) or '').split()
1786
1787 def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
1788
1789 #bb.note('depends for %s is %s' % (base, depends))
1790 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg, True) or "")
1791
1792 for depend in depends:
1793 if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
1794 #bb.note("Skipping %s" % depend)
1795 continue
1796 if depend.endswith('-dev'):
1797 depend = depend[:-4]
1798 if depend.endswith('-dbg'):
1799 depend = depend[:-4]
1800 pkgname = getname(depend, suffix)
1801 #bb.note("Adding %s for %s" % (pkgname, depend))
1802 if pkgname not in rreclist and pkgname != pkg:
1803 rreclist[pkgname] = []
1804
1805 #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
1806 d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
1807
1808 def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
1809
1810 #bb.note('rdepends for %s is %s' % (base, rdepends))
1811 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg, True) or "")
1812
1813 for depend in rdepends:
1814 if depend.find('virtual-locale-') != -1:
1815 #bb.note("Skipping %s" % depend)
1816 continue
1817 if depend.endswith('-dev'):
1818 depend = depend[:-4]
1819 if depend.endswith('-dbg'):
1820 depend = depend[:-4]
1821 pkgname = getname(depend, suffix)
1822 #bb.note("Adding %s for %s" % (pkgname, depend))
1823 if pkgname not in rreclist and pkgname != pkg:
1824 rreclist[pkgname] = []
1825
1826 #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
1827 d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
1828
1829 def add_dep(list, dep):
1830 if dep not in list:
1831 list.append(dep)
1832
1833 depends = []
1834 for dep in bb.utils.explode_deps(d.getVar('DEPENDS', True) or ""):
1835 add_dep(depends, dep)
1836
1837 rdepends = []
1838 for pkg in packages.split():
1839 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg, True) or ""):
1840 add_dep(rdepends, dep)
1841
1842 #bb.note('rdepends is %s' % rdepends)
1843
1844 def post_getname(name, suffix):
1845 return '%s%s' % (name, suffix)
1846 def pre_getname(name, suffix):
1847 return '%s%s' % (suffix, name)
1848
1849 pkgs = {}
1850 for pkg in packages.split():
1851 for postfix in postfixes:
1852 if pkg.endswith(postfix):
1853 if not postfix in pkgs:
1854 pkgs[postfix] = {}
1855 pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
1856
1857 for prefix in prefixes:
1858 if pkg.startswith(prefix):
1859 if not prefix in pkgs:
1860 pkgs[prefix] = {}
1861 pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
1862
1863 if "-dbg" in pkgs:
1864 pkglibdeps = read_libdep_files(d)
1865 pkglibdeplist = []
1866 for pkg in pkglibdeps:
1867 for k in pkglibdeps[pkg]:
1868 add_dep(pkglibdeplist, k)
1869 # FIXME this should not look at PN once all task recipes inherit from task.bbclass
1870 dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS', True) == '1') or (d.getVar('PN', True) or '').startswith('packagegroup-'))
1871
1872 for suffix in pkgs:
1873 for pkg in pkgs[suffix]:
1874 if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs'):
1875 continue
1876 (base, func) = pkgs[suffix][pkg]
1877 if suffix == "-dev":
1878 pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
1879 elif suffix == "-dbg":
1880 if not dbgdefaultdeps:
1881 pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
1882 continue
1883 if len(pkgs[suffix]) == 1:
1884 pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
1885 else:
1886 rdeps = []
1887 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base, True) or ""):
1888 add_dep(rdeps, dep)
1889 pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
1890}
1891
1892# Since bitbake can't determine which variables are accessed during package
1893# iteration, we need to list them here:
1894PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR USERADD_PARAM GROUPADD_PARAM CONFFILES"
1895
1896def gen_packagevar(d):
1897 ret = []
1898 pkgs = (d.getVar("PACKAGES", True) or "").split()
1899 vars = (d.getVar("PACKAGEVARS", True) or "").split()
1900 for p in pkgs:
1901 for v in vars:
1902 ret.append(v + "_" + p)
1903
1904 # Ensure that changes to INCOMPATIBLE_LICENSE re-run do_package for
1905 # affected recipes.
1906 ret.append('LICENSE_EXCLUSION-%s' % p)
1907 return " ".join(ret)
1908
1909PACKAGE_PREPROCESS_FUNCS ?= ""
1910# Functions for setting up PKGD
1911PACKAGEBUILDPKGD ?= " \
1912 perform_packagecopy \
1913 ${PACKAGE_PREPROCESS_FUNCS} \
1914 split_and_strip_files \
1915 fixup_perms \
1916 "
1917# Functions which split PKGD up into separate packages
1918PACKAGESPLITFUNCS ?= " \
1919 package_do_split_locales \
1920 populate_packages"
1921# Functions which process metadata based on split packages
1922PACKAGEFUNCS += " \
1923 package_fixsymlinks \
1924 package_name_hook \
1925 package_do_filedeps \
1926 package_do_shlibs \
1927 package_do_pkgconfig \
1928 read_shlibdeps \
1929 package_depchains \
1930 emit_pkgdata"
1931
1932python do_package () {
1933 # Change the following version to cause sstate to invalidate the package
1934 # cache. This is useful if an item this class depends on changes in a
1935 # way that the output of this class changes. rpmdeps is a good example
1936 # as any change to rpmdeps requires this to be rerun.
1937 # PACKAGE_BBCLASS_VERSION = "1"
1938
1939 # Init cachedpath
1940 global cpath
1941 cpath = oe.cachedpath.CachedPath()
1942
1943 ###########################################################################
1944 # Sanity test the setup
1945 ###########################################################################
1946
1947 packages = (d.getVar('PACKAGES', True) or "").split()
1948 if len(packages) < 1:
1949 bb.debug(1, "No packages to build, skipping do_package")
1950 return
1951
1952 workdir = d.getVar('WORKDIR', True)
1953 outdir = d.getVar('DEPLOY_DIR', True)
1954 dest = d.getVar('D', True)
1955 dvar = d.getVar('PKGD', True)
1956 pn = d.getVar('PN', True)
1957
1958 if not workdir or not outdir or not dest or not dvar or not pn:
1959 msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package"
1960 package_qa_handle_error("var-undefined", msg, d)
1961 return
1962
1963 bb.build.exec_func("package_get_auto_pr", d)
1964
1965 ###########################################################################
1966 # Optimisations
1967 ###########################################################################
1968
1969 # Continually expanding complex expressions is inefficient, particularly
1970 # when we write to the datastore and invalidate the expansion cache. This
1971 # code pre-expands some frequently used variables
1972
1973 def expandVar(x, d):
1974 d.setVar(x, d.getVar(x, True))
1975
1976 for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO':
1977 expandVar(x, d)
1978
1979 ###########################################################################
1980 # Setup PKGD (from D)
1981 ###########################################################################
1982
1983 for f in (d.getVar('PACKAGEBUILDPKGD', True) or '').split():
1984 bb.build.exec_func(f, d)
1985
1986 ###########################################################################
1987 # Split up PKGD into PKGDEST
1988 ###########################################################################
1989
1990 cpath = oe.cachedpath.CachedPath()
1991
1992 for f in (d.getVar('PACKAGESPLITFUNCS', True) or '').split():
1993 bb.build.exec_func(f, d)
1994
1995 ###########################################################################
1996 # Process PKGDEST
1997 ###########################################################################
1998
1999 # Build global list of files in each split package
2000 global pkgfiles
2001 pkgfiles = {}
2002 packages = d.getVar('PACKAGES', True).split()
2003 pkgdest = d.getVar('PKGDEST', True)
2004 for pkg in packages:
2005 pkgfiles[pkg] = []
2006 for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg):
2007 for file in files:
2008 pkgfiles[pkg].append(walkroot + os.sep + file)
2009
2010 for f in (d.getVar('PACKAGEFUNCS', True) or '').split():
2011 bb.build.exec_func(f, d)
2012}
2013
2014do_package[dirs] = "${SHLIBSWORKDIR} ${PKGDESTWORK} ${D}"
2015do_package[vardeps] += "${PACKAGEBUILDPKGD} ${PACKAGESPLITFUNCS} ${PACKAGEFUNCS} ${@gen_packagevar(d)}"
2016addtask package after do_install
2017
2018PACKAGELOCK = "${STAGING_DIR}/package-output.lock"
2019SSTATETASKS += "do_package"
2020do_package[cleandirs] = "${PKGDEST} ${PKGDESTWORK}"
2021do_package[sstate-plaindirs] = "${PKGD} ${PKGDEST} ${PKGDESTWORK}"
2022do_package[sstate-lockfile-shared] = "${PACKAGELOCK}"
2023do_package_setscene[dirs] = "${STAGING_DIR}"
2024
2025python do_package_setscene () {
2026 sstate_setscene(d)
2027}
2028addtask do_package_setscene
2029
2030do_packagedata () {
2031 :
2032}
2033
2034addtask packagedata before do_build after do_package
2035
2036SSTATETASKS += "do_packagedata"
2037do_packagedata[sstate-inputdirs] = "${PKGDESTWORK}"
2038do_packagedata[sstate-outputdirs] = "${PKGDATA_DIR}"
2039do_packagedata[sstate-lockfile-shared] = "${PACKAGELOCK}"
2040do_packagedata[stamp-extra-info] = "${MACHINE}"
2041
2042python do_packagedata_setscene () {
2043 sstate_setscene(d)
2044}
2045addtask do_packagedata_setscene
2046
2047#
2048# Helper functions for the package writing classes
2049#
2050
2051def mapping_rename_hook(d):
2052 """
2053 Rewrite variables to account for package renaming in things
2054 like debian.bbclass or manual PKG variable name changes
2055 """
2056 pkg = d.getVar("PKG", True)
2057 runtime_mapping_rename("RDEPENDS", pkg, d)
2058 runtime_mapping_rename("RRECOMMENDS", pkg, d)
2059 runtime_mapping_rename("RSUGGESTS", pkg, d)
2060