summaryrefslogtreecommitdiffstats
path: root/meta/classes/package.bbclass
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes/package.bbclass')
-rw-r--r--meta/classes/package.bbclass1983
1 files changed, 1983 insertions, 0 deletions
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass
new file mode 100644
index 0000000000..5b1e902c07
--- /dev/null
+++ b/meta/classes/package.bbclass
@@ -0,0 +1,1983 @@
1#
2# Packaging process
3#
4# Executive summary: This class iterates over the functions listed in PACKAGEFUNCS
5# Taking D and splitting it up into the packages listed in PACKAGES, placing the
6# resulting output in PKGDEST.
7#
8# There are the following default steps but PACKAGEFUNCS can be extended:
9#
10# a) package_get_auto_pr - get PRAUTO from remote PR service
11#
12# b) perform_packagecopy - Copy D into PKGD
13#
14# c) package_do_split_locales - Split out the locale files, updates FILES and PACKAGES
15#
16# d) split_and_strip_files - split the files into runtime and debug and strip them.
17# Debug files include debug info split, and associated sources that end up in -dbg packages
18#
19# e) fixup_perms - Fix up permissions in the package before we split it.
20#
21# f) populate_packages - Split the files in PKGD into separate packages in PKGDEST/<pkgname>
22# Also triggers the binary stripping code to put files in -dbg packages.
23#
24# g) package_do_filedeps - Collect perfile run-time dependency metadata
25# The data is stores in FILER{PROVIDES,DEPENDS}_file_pkg variables with
26# a list of affected files in FILER{PROVIDES,DEPENDS}FLIST_pkg
27#
28# h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any
29# depenedencies found. Also stores the package name so anyone else using this library
30# knows which package to depend on.
31#
32# i) package_do_pkgconfig - Keep track of which packages need and provide which .pc files
33#
34# j) read_shlibdeps - Reads the stored shlibs information into the metadata
35#
36# k) package_depchains - Adds automatic dependencies to -dbg and -dev packages
37#
38# l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later
39# packaging steps
40
41inherit packagedata
42inherit prserv
43inherit chrpath
44
45# Need the package_qa_handle_error() in insane.bbclass
46inherit insane
47
48PKGD = "${WORKDIR}/package"
49PKGDEST = "${WORKDIR}/packages-split"
50
51LOCALE_SECTION ?= ''
52
53ALL_MULTILIB_PACKAGE_ARCHS = "${@all_multilib_tune_values(d, 'PACKAGE_ARCHS')}"
54
55# rpm is used for the per-file dependency identification
56PACKAGE_DEPENDS += "rpm-native"
57
58def legitimize_package_name(s):
59 """
60 Make sure package names are legitimate strings
61 """
62 import re
63
64 def fixutf(m):
65 cp = m.group(1)
66 if cp:
67 return ('\u%s' % cp).decode('unicode_escape').encode('utf-8')
68
69 # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
70 s = re.sub('<U([0-9A-Fa-f]{1,4})>', fixutf, s)
71
72 # Remaining package name validity fixes
73 return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
74
75def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False):
76 """
77 Used in .bb files to split up dynamically generated subpackages of a
78 given package, usually plugins or modules.
79
80 Arguments:
81 root -- the path in which to search
82 file_regex -- regular expression to match searched files. Use
83 parentheses () to mark the part of this expression
84 that should be used to derive the module name (to be
85 substituted where %s is used in other function
86 arguments as noted below)
87 output_pattern -- pattern to use for the package names. Must include %s.
88 description -- description to set for each package. Must include %s.
89 postinst -- postinstall script to use for all packages (as a
90 string)
91 recursive -- True to perform a recursive search - default False
92 hook -- a hook function to be called for every match. The
93 function will be called with the following arguments
94 (in the order listed):
95 f: full path to the file/directory match
96 pkg: the package name
97 file_regex: as above
98 output_pattern: as above
99 modulename: the module name derived using file_regex
100 extra_depends -- extra runtime dependencies (RDEPENDS) to be set for
101 all packages. The default value of None causes a
102 dependency on the main package (${PN}) - if you do
103 not want this, pass '' for this parameter.
104 aux_files_pattern -- extra item(s) to be added to FILES for each
105 package. Can be a single string item or a list of
106 strings for multiple items. Must include %s.
107 postrm -- postrm script to use for all packages (as a string)
108 allow_dirs -- True allow directories to be matched - default False
109 prepend -- if True, prepend created packages to PACKAGES instead
110 of the default False which appends them
111 match_path -- match file_regex on the whole relative path to the
112 root rather than just the file name
113 aux_files_pattern_verbatim -- extra item(s) to be added to FILES for
114 each package, using the actual derived module name
115 rather than converting it to something legal for a
116 package name. Can be a single string item or a list
117 of strings for multiple items. Must include %s.
118 allow_links -- True to allow symlinks to be matched - default False
119
120 """
121
122 dvar = d.getVar('PKGD', True)
123
124 # If the root directory doesn't exist, don't error out later but silently do
125 # no splitting.
126 if not os.path.exists(dvar + root):
127 return
128
129 ml = d.getVar("MLPREFIX", True)
130 if ml:
131 if not output_pattern.startswith(ml):
132 output_pattern = ml + output_pattern
133
134 newdeps = []
135 for dep in (extra_depends or "").split():
136 if dep.startswith(ml):
137 newdeps.append(dep)
138 else:
139 newdeps.append(ml + dep)
140 if newdeps:
141 extra_depends = " ".join(newdeps)
142
143
144 packages = d.getVar('PACKAGES', True).split()
145 split_packages = []
146
147 if postinst:
148 postinst = '#!/bin/sh\n' + postinst + '\n'
149 if postrm:
150 postrm = '#!/bin/sh\n' + postrm + '\n'
151 if not recursive:
152 objs = os.listdir(dvar + root)
153 else:
154 objs = []
155 for walkroot, dirs, files in os.walk(dvar + root):
156 for file in files:
157 relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1)
158 if relpath:
159 objs.append(relpath)
160
161 if extra_depends == None:
162 extra_depends = d.getVar("PN", True)
163
164 for o in sorted(objs):
165 import re, stat
166 if match_path:
167 m = re.match(file_regex, o)
168 else:
169 m = re.match(file_regex, os.path.basename(o))
170
171 if not m:
172 continue
173 f = os.path.join(dvar + root, o)
174 mode = os.lstat(f).st_mode
175 if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))):
176 continue
177 on = legitimize_package_name(m.group(1))
178 pkg = output_pattern % on
179 split_packages.append(pkg)
180 if not pkg in packages:
181 if prepend:
182 packages = [pkg] + packages
183 else:
184 packages.append(pkg)
185 oldfiles = d.getVar('FILES_' + pkg, True)
186 if not oldfiles:
187 the_files = [os.path.join(root, o)]
188 if aux_files_pattern:
189 if type(aux_files_pattern) is list:
190 for fp in aux_files_pattern:
191 the_files.append(fp % on)
192 else:
193 the_files.append(aux_files_pattern % on)
194 if aux_files_pattern_verbatim:
195 if type(aux_files_pattern_verbatim) is list:
196 for fp in aux_files_pattern_verbatim:
197 the_files.append(fp % m.group(1))
198 else:
199 the_files.append(aux_files_pattern_verbatim % m.group(1))
200 d.setVar('FILES_' + pkg, " ".join(the_files))
201 if extra_depends != '':
202 d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends)
203 d.setVar('DESCRIPTION_' + pkg, description % on)
204 if postinst:
205 d.setVar('pkg_postinst_' + pkg, postinst)
206 if postrm:
207 d.setVar('pkg_postrm_' + pkg, postrm)
208 else:
209 d.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o))
210 if callable(hook):
211 hook(f, pkg, file_regex, output_pattern, m.group(1))
212
213 d.setVar('PACKAGES', ' '.join(packages))
214 return split_packages
215
216PACKAGE_DEPENDS += "file-native"
217
218python () {
219 if d.getVar('PACKAGES', True) != '':
220 deps = ""
221 for dep in (d.getVar('PACKAGE_DEPENDS', True) or "").split():
222 deps += " %s:do_populate_sysroot" % dep
223 d.appendVarFlag('do_package', 'depends', deps)
224
225 # shlibs requires any DEPENDS to have already packaged for the *.list files
226 d.appendVarFlag('do_package', 'deptask', " do_packagedata")
227
228 elif not bb.data.inherits_class('image', d):
229 d.setVar("PACKAGERDEPTASK", "")
230}
231
232def splitdebuginfo(file, debugfile, debugsrcdir, sourcefile, d):
233 # Function to split a single file into two components, one is the stripped
234 # target system binary, the other contains any debugging information. The
235 # two files are linked to reference each other.
236 #
237 # sourcefile is also generated containing a list of debugsources
238
239 import stat
240
241 dvar = d.getVar('PKGD', True)
242 objcopy = d.getVar("OBJCOPY", True)
243 debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
244 workdir = d.getVar("WORKDIR", True)
245 workparentdir = d.getVar("DEBUGSRC_OVERRIDE_PATH", True) or os.path.dirname(os.path.dirname(workdir))
246
247 # We ignore kernel modules, we don't generate debug info files.
248 if file.find("/lib/modules/") != -1 and file.endswith(".ko"):
249 return 1
250
251 newmode = None
252 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
253 origmode = os.stat(file)[stat.ST_MODE]
254 newmode = origmode | stat.S_IWRITE | stat.S_IREAD
255 os.chmod(file, newmode)
256
257 # We need to extract the debug src information here...
258 if debugsrcdir:
259 cmd = "'%s' -b '%s' -d '%s' -i -l '%s' '%s'" % (debugedit, workparentdir, debugsrcdir, sourcefile, file)
260 (retval, output) = oe.utils.getstatusoutput(cmd)
261 if retval:
262 bb.fatal("debugedit failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
263
264 bb.utils.mkdirhier(os.path.dirname(debugfile))
265
266 cmd = "'%s' --only-keep-debug '%s' '%s'" % (objcopy, file, debugfile)
267 (retval, output) = oe.utils.getstatusoutput(cmd)
268 if retval:
269 bb.fatal("objcopy failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
270
271 # Set the debuglink to have the view of the file path on the target
272 cmd = "'%s' --add-gnu-debuglink='%s' '%s'" % (objcopy, debugfile, file)
273 (retval, output) = oe.utils.getstatusoutput(cmd)
274 if retval:
275 bb.fatal("objcopy failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
276
277 if newmode:
278 os.chmod(file, origmode)
279
280 return 0
281
282def copydebugsources(debugsrcdir, d):
283 # The debug src information written out to sourcefile is further procecessed
284 # and copied to the destination here.
285
286 import stat
287
288 sourcefile = d.expand("${WORKDIR}/debugsources.list")
289 if debugsrcdir and os.path.isfile(sourcefile):
290 dvar = d.getVar('PKGD', True)
291 strip = d.getVar("STRIP", True)
292 objcopy = d.getVar("OBJCOPY", True)
293 debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
294 workdir = d.getVar("WORKDIR", True)
295 workparentdir = os.path.dirname(os.path.dirname(workdir))
296 workbasedir = os.path.basename(os.path.dirname(workdir)) + "/" + os.path.basename(workdir)
297
298 nosuchdir = []
299 basepath = dvar
300 for p in debugsrcdir.split("/"):
301 basepath = basepath + "/" + p
302 if not cpath.exists(basepath):
303 nosuchdir.append(basepath)
304 bb.utils.mkdirhier(basepath)
305 cpath.updatecache(basepath)
306
307 processdebugsrc = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '(<internal>|<built-in>)$' | "
308 # We need to ignore files that are not actually ours
309 # we do this by only paying attention to items from this package
310 processdebugsrc += "fgrep -zw '%s' | "
311 processdebugsrc += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)"
312
313 cmd = processdebugsrc % (sourcefile, workbasedir, workparentdir, dvar, debugsrcdir)
314 (retval, output) = oe.utils.getstatusoutput(cmd)
315 # Can "fail" if internal headers/transient sources are attempted
316 #if retval:
317 # bb.fatal("debug source copy failed with exit code %s (cmd was %s)" % (retval, cmd))
318
319 # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
320 # Work around this by manually finding and copying any symbolic links that made it through.
321 cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s' 2>/dev/null)" % (dvar, debugsrcdir, dvar, debugsrcdir, workparentdir, dvar, debugsrcdir)
322 (retval, output) = oe.utils.getstatusoutput(cmd)
323 if retval:
324 bb.fatal("debugsrc symlink fixup failed with exit code %s (cmd was %s)" % (retval, cmd))
325
326 # The copy by cpio may have resulted in some empty directories! Remove these
327 cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
328 (retval, output) = oe.utils.getstatusoutput(cmd)
329 if retval:
330 bb.fatal("empty directory removal failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
331
332 # Also remove debugsrcdir if its empty
333 for p in nosuchdir[::-1]:
334 if os.path.exists(p) and not os.listdir(p):
335 os.rmdir(p)
336
337#
338# Package data handling routines
339#
340
341def get_package_mapping (pkg, basepkg, d):
342 import oe.packagedata
343
344 data = oe.packagedata.read_subpkgdata(pkg, d)
345 key = "PKG_%s" % pkg
346
347 if key in data:
348 # Have to avoid undoing the write_extra_pkgs(global_variants...)
349 if bb.data.inherits_class('allarch', d) and data[key] == basepkg:
350 return pkg
351 return data[key]
352
353 return pkg
354
355def runtime_mapping_rename (varname, pkg, d):
356 #bb.note("%s before: %s" % (varname, d.getVar(varname, True)))
357
358 new_depends = {}
359 deps = bb.utils.explode_dep_versions2(d.getVar(varname, True) or "")
360 for depend in deps:
361 new_depend = get_package_mapping(depend, pkg, d)
362 new_depends[new_depend] = deps[depend]
363
364 d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
365
366 #bb.note("%s after: %s" % (varname, d.getVar(varname, True)))
367
368#
369# Package functions suitable for inclusion in PACKAGEFUNCS
370#
371
372python package_get_auto_pr() {
373 # per recipe PRSERV_HOST
374 pn = d.getVar('PN', True)
375 host = d.getVar("PRSERV_HOST_" + pn, True)
376 if not (host is None):
377 d.setVar("PRSERV_HOST", host)
378
379 if d.getVar('PRSERV_HOST', True):
380 try:
381 auto_pr=prserv_get_pr_auto(d)
382 except Exception as e:
383 bb.fatal("Can NOT get PRAUTO, exception %s" % str(e))
384 if auto_pr is None:
385 if d.getVar('PRSERV_LOCKDOWN', True):
386 bb.fatal("Can NOT get PRAUTO from lockdown exported file")
387 else:
388 bb.fatal("Can NOT get PRAUTO from remote PR service")
389 return
390 d.setVar('PRAUTO',str(auto_pr))
391 else:
392 pkgv = d.getVar("PKGV", True)
393 if 'AUTOINC' in pkgv:
394 d.setVar("PKGV", pkgv.replace("AUTOINC", "0"))
395}
396
397LOCALEBASEPN ??= "${PN}"
398
399python package_do_split_locales() {
400 if (d.getVar('PACKAGE_NO_LOCALE', True) == '1'):
401 bb.debug(1, "package requested not splitting locales")
402 return
403
404 packages = (d.getVar('PACKAGES', True) or "").split()
405
406 datadir = d.getVar('datadir', True)
407 if not datadir:
408 bb.note("datadir not defined")
409 return
410
411 dvar = d.getVar('PKGD', True)
412 pn = d.getVar('LOCALEBASEPN', True)
413
414 if pn + '-locale' in packages:
415 packages.remove(pn + '-locale')
416
417 localedir = os.path.join(dvar + datadir, 'locale')
418
419 if not cpath.isdir(localedir):
420 bb.debug(1, "No locale files in this package")
421 return
422
423 locales = os.listdir(localedir)
424
425 summary = d.getVar('SUMMARY', True) or pn
426 description = d.getVar('DESCRIPTION', True) or ""
427 locale_section = d.getVar('LOCALE_SECTION', True)
428 mlprefix = d.getVar('MLPREFIX', True) or ""
429 for l in sorted(locales):
430 ln = legitimize_package_name(l)
431 pkg = pn + '-locale-' + ln
432 packages.append(pkg)
433 d.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l))
434 d.setVar('RRECOMMENDS_' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
435 d.setVar('RPROVIDES_' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
436 d.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l))
437 d.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l))
438 if locale_section:
439 d.setVar('SECTION_' + pkg, locale_section)
440
441 d.setVar('PACKAGES', ' '.join(packages))
442
443 # Disabled by RP 18/06/07
444 # Wildcards aren't supported in debian
445 # They break with ipkg since glibc-locale* will mean that
446 # glibc-localedata-translit* won't install as a dependency
447 # for some other package which breaks meta-toolchain
448 # Probably breaks since virtual-locale- isn't provided anywhere
449 #rdep = (d.getVar('RDEPENDS_%s' % pn, True) or "").split()
450 #rdep.append('%s-locale*' % pn)
451 #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep))
452}
453
454python perform_packagecopy () {
455 dest = d.getVar('D', True)
456 dvar = d.getVar('PKGD', True)
457
458 # Start by package population by taking a copy of the installed
459 # files to operate on
460 # Preserve sparse files and hard links
461 cmd = 'tar -cf - -C %s -p . | tar -xf - -C %s' % (dest, dvar)
462 (retval, output) = oe.utils.getstatusoutput(cmd)
463 if retval:
464 bb.fatal("file copy failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
465
466 # replace RPATHs for the nativesdk binaries, to make them relocatable
467 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d):
468 rpath_replace (dvar, d)
469}
470perform_packagecopy[cleandirs] = "${PKGD}"
471perform_packagecopy[dirs] = "${PKGD}"
472
473# We generate a master list of directories to process, we start by
474# seeding this list with reasonable defaults, then load from
475# the fs-perms.txt files
476python fixup_perms () {
477 import pwd, grp
478
479 # init using a string with the same format as a line as documented in
480 # the fs-perms.txt file
481 # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
482 # <path> link <link target>
483 #
484 # __str__ can be used to print out an entry in the input format
485 #
486 # if fs_perms_entry.path is None:
487 # an error occured
488 # if fs_perms_entry.link, you can retrieve:
489 # fs_perms_entry.path = path
490 # fs_perms_entry.link = target of link
491 # if not fs_perms_entry.link, you can retrieve:
492 # fs_perms_entry.path = path
493 # fs_perms_entry.mode = expected dir mode or None
494 # fs_perms_entry.uid = expected uid or -1
495 # fs_perms_entry.gid = expected gid or -1
496 # fs_perms_entry.walk = 'true' or something else
497 # fs_perms_entry.fmode = expected file mode or None
498 # fs_perms_entry.fuid = expected file uid or -1
499 # fs_perms_entry_fgid = expected file gid or -1
500 class fs_perms_entry():
501 def __init__(self, line):
502 lsplit = line.split()
503 if len(lsplit) == 3 and lsplit[1].lower() == "link":
504 self._setlink(lsplit[0], lsplit[2])
505 elif len(lsplit) == 8:
506 self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
507 else:
508 msg = "Fixup Perms: invalid config line %s" % line
509 package_qa_handle_error("perm-config", msg, d)
510 self.path = None
511 self.link = None
512
513 def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
514 self.path = os.path.normpath(path)
515 self.link = None
516 self.mode = self._procmode(mode)
517 self.uid = self._procuid(uid)
518 self.gid = self._procgid(gid)
519 self.walk = walk.lower()
520 self.fmode = self._procmode(fmode)
521 self.fuid = self._procuid(fuid)
522 self.fgid = self._procgid(fgid)
523
524 def _setlink(self, path, link):
525 self.path = os.path.normpath(path)
526 self.link = link
527
528 def _procmode(self, mode):
529 if not mode or (mode and mode == "-"):
530 return None
531 else:
532 return int(mode,8)
533
534 # Note uid/gid -1 has special significance in os.lchown
535 def _procuid(self, uid):
536 if uid is None or uid == "-":
537 return -1
538 elif uid.isdigit():
539 return int(uid)
540 else:
541 return pwd.getpwnam(uid).pw_uid
542
543 def _procgid(self, gid):
544 if gid is None or gid == "-":
545 return -1
546 elif gid.isdigit():
547 return int(gid)
548 else:
549 return grp.getgrnam(gid).gr_gid
550
551 # Use for debugging the entries
552 def __str__(self):
553 if self.link:
554 return "%s link %s" % (self.path, self.link)
555 else:
556 mode = "-"
557 if self.mode:
558 mode = "0%o" % self.mode
559 fmode = "-"
560 if self.fmode:
561 fmode = "0%o" % self.fmode
562 uid = self._mapugid(self.uid)
563 gid = self._mapugid(self.gid)
564 fuid = self._mapugid(self.fuid)
565 fgid = self._mapugid(self.fgid)
566 return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
567
568 def _mapugid(self, id):
569 if id is None or id == -1:
570 return "-"
571 else:
572 return "%d" % id
573
574 # Fix the permission, owner and group of path
575 def fix_perms(path, mode, uid, gid, dir):
576 if mode and not os.path.islink(path):
577 #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
578 os.chmod(path, mode)
579 # -1 is a special value that means don't change the uid/gid
580 # if they are BOTH -1, don't bother to lchown
581 if not (uid == -1 and gid == -1):
582 #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
583 os.lchown(path, uid, gid)
584
585 # Return a list of configuration files based on either the default
586 # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
587 # paths are resolved via BBPATH
588 def get_fs_perms_list(d):
589 str = ""
590 bbpath = d.getVar('BBPATH', True)
591 fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES', True)
592 if not fs_perms_tables:
593 fs_perms_tables = 'files/fs-perms.txt'
594 for conf_file in fs_perms_tables.split():
595 str += " %s" % bb.utils.which(bbpath, conf_file)
596 return str
597
598
599
600 dvar = d.getVar('PKGD', True)
601
602 fs_perms_table = {}
603
604 # By default all of the standard directories specified in
605 # bitbake.conf will get 0755 root:root.
606 target_path_vars = [ 'base_prefix',
607 'prefix',
608 'exec_prefix',
609 'base_bindir',
610 'base_sbindir',
611 'base_libdir',
612 'datadir',
613 'sysconfdir',
614 'servicedir',
615 'sharedstatedir',
616 'localstatedir',
617 'infodir',
618 'mandir',
619 'docdir',
620 'bindir',
621 'sbindir',
622 'libexecdir',
623 'libdir',
624 'includedir',
625 'oldincludedir' ]
626
627 for path in target_path_vars:
628 dir = d.getVar(path, True) or ""
629 if dir == "":
630 continue
631 fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d))
632
633 # Now we actually load from the configuration files
634 for conf in get_fs_perms_list(d).split():
635 if os.path.exists(conf):
636 f = open(conf)
637 for line in f:
638 if line.startswith('#'):
639 continue
640 lsplit = line.split()
641 if len(lsplit) == 0:
642 continue
643 if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
644 msg = "Fixup perms: %s invalid line: %s" % (conf, line)
645 package_qa_handle_error("perm-line", msg, d)
646 continue
647 entry = fs_perms_entry(d.expand(line))
648 if entry and entry.path:
649 fs_perms_table[entry.path] = entry
650 f.close()
651
652 # Debug -- list out in-memory table
653 #for dir in fs_perms_table:
654 # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
655
656 # We process links first, so we can go back and fixup directory ownership
657 # for any newly created directories
658 for dir in fs_perms_table:
659 if not fs_perms_table[dir].link:
660 continue
661
662 origin = dvar + dir
663 if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
664 continue
665
666 link = fs_perms_table[dir].link
667 if link[0] == "/":
668 target = dvar + link
669 ptarget = link
670 else:
671 target = os.path.join(os.path.dirname(origin), link)
672 ptarget = os.path.join(os.path.dirname(dir), link)
673 if os.path.exists(target):
674 msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
675 package_qa_handle_error("perm-link", msg, d)
676 continue
677
678 # Create path to move directory to, move it, and then setup the symlink
679 bb.utils.mkdirhier(os.path.dirname(target))
680 #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
681 os.rename(origin, target)
682 #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
683 os.symlink(link, origin)
684
685 for dir in fs_perms_table:
686 if fs_perms_table[dir].link:
687 continue
688
689 origin = dvar + dir
690 if not (cpath.exists(origin) and cpath.isdir(origin)):
691 continue
692
693 fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
694
695 if fs_perms_table[dir].walk == 'true':
696 for root, dirs, files in os.walk(origin):
697 for dr in dirs:
698 each_dir = os.path.join(root, dr)
699 fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
700 for f in files:
701 each_file = os.path.join(root, f)
702 fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
703}
704
705python split_and_strip_files () {
706 import stat, errno
707
708 dvar = d.getVar('PKGD', True)
709 pn = d.getVar('PN', True)
710
711 # We default to '.debug' style
712 if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory':
713 # Single debug-file-directory style debug info
714 debugappend = ".debug"
715 debugdir = ""
716 debuglibdir = "/usr/lib/debug"
717 debugsrcdir = "/usr/src/debug"
718 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-without-src':
719 # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
720 debugappend = ""
721 debugdir = "/.debug"
722 debuglibdir = ""
723 debugsrcdir = ""
724 else:
725 # Original OE-core, a.k.a. ".debug", style debug info
726 debugappend = ""
727 debugdir = "/.debug"
728 debuglibdir = ""
729 debugsrcdir = "/usr/src/debug"
730
731 sourcefile = d.expand("${WORKDIR}/debugsources.list")
732 bb.utils.remove(sourcefile)
733
734 os.chdir(dvar)
735
736 # Return type (bits):
737 # 0 - not elf
738 # 1 - ELF
739 # 2 - stripped
740 # 4 - executable
741 # 8 - shared library
742 # 16 - kernel module
743 def isELF(path):
744 type = 0
745 ret, result = oe.utils.getstatusoutput("file '%s'" % path)
746
747 if ret:
748 msg = "split_and_strip_files: 'file %s' failed" % path
749 package_qa_handle_error("split-strip", msg, d)
750 return type
751
752 # Not stripped
753 if "ELF" in result:
754 type |= 1
755 if "not stripped" not in result:
756 type |= 2
757 if "executable" in result:
758 type |= 4
759 if "shared" in result:
760 type |= 8
761 return type
762
763
764 #
765 # First lets figure out all of the files we may have to process ... do this only once!
766 #
767 elffiles = {}
768 symlinks = {}
769 hardlinks = {}
770 kernmods = []
771 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir", True))
772 baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir", True))
773 if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1') and \
774 (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
775 for root, dirs, files in cpath.walk(dvar):
776 for f in files:
777 file = os.path.join(root, f)
778 if file.endswith(".ko") and file.find("/lib/modules/") != -1:
779 kernmods.append(file)
780 continue
781
782 # Skip debug files
783 if debugappend and file.endswith(debugappend):
784 continue
785 if debugdir and debugdir in os.path.dirname(file[len(dvar):]):
786 continue
787
788 try:
789 ltarget = cpath.realpath(file, dvar, False)
790 s = cpath.lstat(ltarget)
791 except OSError as e:
792 (err, strerror) = e.args
793 if err != errno.ENOENT:
794 raise
795 # Skip broken symlinks
796 continue
797 if not s:
798 continue
799 # Check its an excutable
800 if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) or (s[stat.ST_MODE] & stat.S_IXOTH) \
801 or ((file.startswith(libdir) or file.startswith(baselibdir)) and ".so" in f):
802 # If it's a symlink, and points to an ELF file, we capture the readlink target
803 if cpath.islink(file):
804 target = os.readlink(file)
805 if isELF(ltarget):
806 #bb.note("Sym: %s (%d)" % (ltarget, isELF(ltarget)))
807 symlinks[file] = target
808 continue
809 # It's a file (or hardlink), not a link
810 # ...but is it ELF, and is it already stripped?
811 elf_file = isELF(file)
812 if elf_file & 1:
813 if elf_file & 2:
814 if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn, True) or "").split():
815 bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
816 else:
817 msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
818 package_qa_handle_error("already-stripped", msg, d)
819 continue
820 # Check if it's a hard link to something else
821 if s.st_nlink > 1:
822 file_reference = "%d_%d" % (s.st_dev, s.st_ino)
823 # Hard link to something else
824 hardlinks[file] = file_reference
825 continue
826 elffiles[file] = elf_file
827
828 #
829 # First lets process debug splitting
830 #
831 if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'):
832 hardlinkmap = {}
833 # For hardlinks, process only one of the files
834 for file in hardlinks:
835 file_reference = hardlinks[file]
836 if file_reference not in hardlinkmap:
837 # If this is a new file, add it as a reference, and
838 # update it's type, so we can fall through and split
839 elffiles[file] = isELF(file)
840 hardlinkmap[file_reference] = file
841
842 for file in elffiles:
843 src = file[len(dvar):]
844 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
845 fpath = dvar + dest
846
847 # Split the file...
848 bb.utils.mkdirhier(os.path.dirname(fpath))
849 #bb.note("Split %s -> %s" % (file, fpath))
850 # Only store off the hard link reference if we successfully split!
851 splitdebuginfo(file, fpath, debugsrcdir, sourcefile, d)
852
853 # Hardlink our debug symbols to the other hardlink copies
854 for file in hardlinks:
855 if file not in elffiles:
856 src = file[len(dvar):]
857 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
858 fpath = dvar + dest
859 file_reference = hardlinks[file]
860 target = hardlinkmap[file_reference][len(dvar):]
861 ftarget = dvar + debuglibdir + os.path.dirname(target) + debugdir + "/" + os.path.basename(target) + debugappend
862 bb.utils.mkdirhier(os.path.dirname(fpath))
863 #bb.note("Link %s -> %s" % (fpath, ftarget))
864 os.link(ftarget, fpath)
865
866 # Create symlinks for all cases we were able to split symbols
867 for file in symlinks:
868 src = file[len(dvar):]
869 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
870 fpath = dvar + dest
871 # Skip it if the target doesn't exist
872 try:
873 s = os.stat(fpath)
874 except OSError as e:
875 (err, strerror) = e.args
876 if err != errno.ENOENT:
877 raise
878 continue
879
880 ltarget = symlinks[file]
881 lpath = os.path.dirname(ltarget)
882 lbase = os.path.basename(ltarget)
883 ftarget = ""
884 if lpath and lpath != ".":
885 ftarget += lpath + debugdir + "/"
886 ftarget += lbase + debugappend
887 if lpath.startswith(".."):
888 ftarget = os.path.join("..", ftarget)
889 bb.utils.mkdirhier(os.path.dirname(fpath))
890 #bb.note("Symlink %s -> %s" % (fpath, ftarget))
891 os.symlink(ftarget, fpath)
892
893 # Process the debugsrcdir if requested...
894 # This copies and places the referenced sources for later debugging...
895 copydebugsources(debugsrcdir, d)
896 #
897 # End of debug splitting
898 #
899
900 #
901 # Now lets go back over things and strip them
902 #
903 if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
904 strip = d.getVar("STRIP", True)
905 sfiles = []
906 for file in elffiles:
907 elf_file = int(elffiles[file])
908 #bb.note("Strip %s" % file)
909 sfiles.append((file, elf_file, strip))
910 for f in kernmods:
911 sfiles.append((f, 16, strip))
912
913
914 import multiprocessing
915 nproc = multiprocessing.cpu_count()
916 pool = bb.utils.multiprocessingpool(nproc)
917 processed = list(pool.imap(oe.package.runstrip, sfiles))
918 pool.close()
919 pool.join()
920
921 #
922 # End of strip
923 #
924}
925
926python populate_packages () {
927 import glob, re
928
929 workdir = d.getVar('WORKDIR', True)
930 outdir = d.getVar('DEPLOY_DIR', True)
931 dvar = d.getVar('PKGD', True)
932 packages = d.getVar('PACKAGES', True)
933 pn = d.getVar('PN', True)
934
935 bb.utils.mkdirhier(outdir)
936 os.chdir(dvar)
937
938 # Sanity check PACKAGES for duplicates and for LICENSE_EXCLUSION
939 # Sanity should be moved to sanity.bbclass once we have the infrastucture
940 package_list = []
941
942 for pkg in packages.split():
943 if d.getVar('LICENSE_EXCLUSION-' + pkg, True):
944 msg = "%s has an incompatible license. Excluding from packaging." % pkg
945 package_qa_handle_error("incompatible-license", msg, d)
946 if pkg in package_list:
947 msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
948 package_qa_handle_error("packages-list", msg, d)
949 else:
950 package_list.append(pkg)
951 d.setVar('PACKAGES', ' '.join(package_list))
952 pkgdest = d.getVar('PKGDEST', True)
953
954 seen = []
955
956 # os.mkdir masks the permissions with umask so we have to unset it first
957 oldumask = os.umask(0)
958
959 for pkg in package_list:
960 root = os.path.join(pkgdest, pkg)
961 bb.utils.mkdirhier(root)
962
963 filesvar = d.getVar('FILES_%s' % pkg, True) or ""
964 if "//" in filesvar:
965 msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
966 package_qa_handle_error("files-invalid", msg, d)
967 filesvar.replace("//", "/")
968 files = filesvar.split()
969 for file in files:
970 if os.path.isabs(file):
971 file = '.' + file
972 if not file.startswith("./"):
973 file = './' + file
974 if not cpath.islink(file):
975 if cpath.isdir(file):
976 newfiles = [ os.path.join(file,x) for x in os.listdir(file) ]
977 if newfiles:
978 files += newfiles
979 continue
980 globbed = glob.glob(file)
981 if globbed:
982 if [ file ] != globbed:
983 files += globbed
984 continue
985 if (not cpath.islink(file)) and (not cpath.exists(file)):
986 continue
987 if file in seen:
988 continue
989 seen.append(file)
990
991 if d.getVar('LICENSE_EXCLUSION-' + pkg, True):
992 continue
993
994 def mkdir(src, dest, p):
995 src = os.path.join(src, p)
996 dest = os.path.join(dest, p)
997 fstat = cpath.stat(src)
998 os.mkdir(dest, fstat.st_mode)
999 os.chown(dest, fstat.st_uid, fstat.st_gid)
1000 if p not in seen:
1001 seen.append(p)
1002 cpath.updatecache(dest)
1003
1004 def mkdir_recurse(src, dest, paths):
1005 if cpath.exists(dest + '/' + paths):
1006 return
1007 while paths.startswith("./"):
1008 paths = paths[2:]
1009 p = "."
1010 for c in paths.split("/"):
1011 p = os.path.join(p, c)
1012 if not cpath.exists(os.path.join(dest, p)):
1013 mkdir(src, dest, p)
1014
1015 if cpath.isdir(file) and not cpath.islink(file):
1016 mkdir_recurse(dvar, root, file)
1017 continue
1018
1019 mkdir_recurse(dvar, root, os.path.dirname(file))
1020 fpath = os.path.join(root,file)
1021 if not cpath.islink(file):
1022 os.link(file, fpath)
1023 fstat = cpath.stat(file)
1024 os.chmod(fpath, fstat.st_mode)
1025 os.chown(fpath, fstat.st_uid, fstat.st_gid)
1026 continue
1027 ret = bb.utils.copyfile(file, fpath)
1028 if ret is False or ret == 0:
1029 raise bb.build.FuncFailed("File population failed")
1030
1031 os.umask(oldumask)
1032 os.chdir(workdir)
1033
1034 unshipped = []
1035 for root, dirs, files in cpath.walk(dvar):
1036 dir = root[len(dvar):]
1037 if not dir:
1038 dir = os.sep
1039 for f in (files + dirs):
1040 path = os.path.join(dir, f)
1041 if ('.' + path) not in seen:
1042 unshipped.append(path)
1043
1044 if unshipped != []:
1045 msg = pn + ": Files/directories were installed but not shipped"
1046 if "installed-vs-shipped" in (d.getVar('INSANE_SKIP_' + pn, True) or "").split():
1047 bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
1048 else:
1049 for f in unshipped:
1050 msg = msg + "\n " + f
1051 package_qa_handle_error("installed-vs-shipped", msg, d)
1052}
1053populate_packages[dirs] = "${D}"
1054
1055python package_fixsymlinks () {
1056 import errno
1057 pkgdest = d.getVar('PKGDEST', True)
1058 packages = d.getVar("PACKAGES").split()
1059
1060 dangling_links = {}
1061 pkg_files = {}
1062 for pkg in packages:
1063 dangling_links[pkg] = []
1064 pkg_files[pkg] = []
1065 inst_root = os.path.join(pkgdest, pkg)
1066 for path in pkgfiles[pkg]:
1067 rpath = path[len(inst_root):]
1068 pkg_files[pkg].append(rpath)
1069 rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
1070 if not cpath.lexists(rtarget):
1071 dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
1072
1073 newrdepends = {}
1074 for pkg in dangling_links:
1075 for l in dangling_links[pkg]:
1076 found = False
1077 bb.debug(1, "%s contains dangling link %s" % (pkg, l))
1078 for p in packages:
1079 if l in pkg_files[p]:
1080 found = True
1081 bb.debug(1, "target found in %s" % p)
1082 if p == pkg:
1083 break
1084 if pkg not in newrdepends:
1085 newrdepends[pkg] = []
1086 newrdepends[pkg].append(p)
1087 break
1088 if found == False:
1089 bb.note("%s contains dangling symlink to %s" % (pkg, l))
1090
1091 for pkg in newrdepends:
1092 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "")
1093 for p in newrdepends[pkg]:
1094 if p not in rdepends:
1095 rdepends[p] = []
1096 d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1097}
1098
1099PKGDESTWORK = "${WORKDIR}/pkgdata"
1100
1101python emit_pkgdata() {
1102 from glob import glob
1103
1104 def write_if_exists(f, pkg, var):
1105 def encode(str):
1106 import codecs
1107 c = codecs.getencoder("string_escape")
1108 return c(str)[0]
1109
1110 val = d.getVar('%s_%s' % (var, pkg), True)
1111 if val:
1112 f.write('%s_%s: %s\n' % (var, pkg, encode(val)))
1113 return
1114 val = d.getVar('%s' % (var), True)
1115 if val:
1116 f.write('%s: %s\n' % (var, encode(val)))
1117 return
1118
1119 def get_directory_size(dir):
1120 if os.listdir(dir):
1121 with os.popen('du -sk %s' % dir) as f:
1122 size = int(f.readlines()[0].split('\t')[0])
1123 else:
1124 size = 0
1125 return size
1126
1127 def write_extra_pkgs(variants, pn, packages, pkgdatadir):
1128 for variant in variants:
1129 with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd:
1130 fd.write("PACKAGES: %s\n" % ' '.join(
1131 map(lambda pkg: '%s-%s' % (variant, pkg), packages.split())))
1132
1133 def write_extra_runtime_pkgs(variants, packages, pkgdatadir):
1134 for variant in variants:
1135 for pkg in packages.split():
1136 ml_pkg = "%s-%s" % (variant, pkg)
1137 subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg)
1138 with open(subdata_file, 'w') as fd:
1139 fd.write("PKG_%s: %s" % (ml_pkg, pkg))
1140
1141 packages = d.getVar('PACKAGES', True)
1142 pkgdest = d.getVar('PKGDEST', True)
1143 pkgdatadir = d.getVar('PKGDESTWORK', True)
1144
1145 # Take shared lock since we're only reading, not writing
1146 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True)
1147
1148 data_file = pkgdatadir + d.expand("/${PN}" )
1149 f = open(data_file, 'w')
1150 f.write("PACKAGES: %s\n" % packages)
1151 f.close()
1152
1153 pn = d.getVar('PN', True)
1154 global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS', True) or "").split()
1155 variants = (d.getVar('MULTILIB_VARIANTS', True) or "").split()
1156
1157 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
1158 write_extra_pkgs(variants, pn, packages, pkgdatadir)
1159
1160 if (bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d)):
1161 write_extra_pkgs(global_variants, pn, packages, pkgdatadir)
1162
1163 workdir = d.getVar('WORKDIR', True)
1164
1165 for pkg in packages.split():
1166 items = {}
1167 for files_list in pkgfiles[pkg]:
1168 item_name = os.path.basename(files_list)
1169 item_path = os.path.dirname(files_list)
1170 if item_path not in items:
1171 items[item_path] = []
1172 items[item_path].append(item_name)
1173 subdata_file = pkgdatadir + "/runtime/%s" % pkg
1174
1175 pkgval = d.getVar('PKG_%s' % pkg, True)
1176 if pkgval is None:
1177 pkgval = pkg
1178 d.setVar('PKG_%s' % pkg, pkg)
1179
1180 d.setVar('FILES_INFO', str(items))
1181
1182 sf = open(subdata_file, 'w')
1183 write_if_exists(sf, pkg, 'PN')
1184 write_if_exists(sf, pkg, 'PV')
1185 write_if_exists(sf, pkg, 'PR')
1186 write_if_exists(sf, pkg, 'PKGV')
1187 write_if_exists(sf, pkg, 'PKGR')
1188 write_if_exists(sf, pkg, 'LICENSE')
1189 write_if_exists(sf, pkg, 'DESCRIPTION')
1190 write_if_exists(sf, pkg, 'SUMMARY')
1191 write_if_exists(sf, pkg, 'RDEPENDS')
1192 write_if_exists(sf, pkg, 'RPROVIDES')
1193 write_if_exists(sf, pkg, 'RRECOMMENDS')
1194 write_if_exists(sf, pkg, 'RSUGGESTS')
1195 write_if_exists(sf, pkg, 'RREPLACES')
1196 write_if_exists(sf, pkg, 'RCONFLICTS')
1197 write_if_exists(sf, pkg, 'SECTION')
1198 write_if_exists(sf, pkg, 'PKG')
1199 write_if_exists(sf, pkg, 'ALLOW_EMPTY')
1200 write_if_exists(sf, pkg, 'FILES')
1201 write_if_exists(sf, pkg, 'pkg_postinst')
1202 write_if_exists(sf, pkg, 'pkg_postrm')
1203 write_if_exists(sf, pkg, 'pkg_preinst')
1204 write_if_exists(sf, pkg, 'pkg_prerm')
1205 write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
1206 write_if_exists(sf, pkg, 'FILES_INFO')
1207 for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg, True) or "").split():
1208 write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile)
1209
1210 write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
1211 for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg, True) or "").split():
1212 write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile)
1213
1214 sf.write('%s_%s: %s\n' % ('PKGSIZE', pkg, get_directory_size(pkgdest + "/%s" % pkg)))
1215 sf.close()
1216
1217 # Symlinks needed for reverse lookups (from the final package name)
1218 subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval
1219 oe.path.symlink("../runtime/%s" % pkg, subdata_sym, True)
1220
1221 allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg, True)
1222 if not allow_empty:
1223 allow_empty = d.getVar('ALLOW_EMPTY', True)
1224 root = "%s/%s" % (pkgdest, pkg)
1225 os.chdir(root)
1226 g = glob('*')
1227 if g or allow_empty == "1":
1228 packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
1229 open(packagedfile, 'w').close()
1230
1231 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
1232 write_extra_runtime_pkgs(variants, packages, pkgdatadir)
1233
1234 if bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d):
1235 write_extra_runtime_pkgs(global_variants, packages, pkgdatadir)
1236
1237 bb.utils.unlockfile(lf)
1238}
1239emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime ${PKGDESTWORK}/runtime-reverse"
1240
1241ldconfig_postinst_fragment() {
1242if [ x"$D" = "x" ]; then
1243 if [ -x /sbin/ldconfig ]; then /sbin/ldconfig ; fi
1244fi
1245}
1246
1247RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/bin/rpmdeps-oecore --macros ${STAGING_LIBDIR_NATIVE}/rpm/macros --define '_rpmfc_magic_path ${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc' --rpmpopt ${STAGING_LIBDIR_NATIVE}/rpm/rpmpopt"
1248
1249# Collect perfile run-time dependency metadata
1250# Output:
1251# FILERPROVIDESFLIST_pkg - list of all files w/ deps
1252# FILERPROVIDES_filepath_pkg - per file dep
1253#
1254# FILERDEPENDSFLIST_pkg - list of all files w/ deps
1255# FILERDEPENDS_filepath_pkg - per file dep
1256
1257python package_do_filedeps() {
1258 if d.getVar('SKIP_FILEDEPS', True) == '1':
1259 return
1260
1261 pkgdest = d.getVar('PKGDEST', True)
1262 packages = d.getVar('PACKAGES', True)
1263 rpmdeps = d.getVar('RPMDEPS', True)
1264
1265 def chunks(files, n):
1266 return [files[i:i+n] for i in range(0, len(files), n)]
1267
1268 pkglist = []
1269 for pkg in packages.split():
1270 if d.getVar('SKIP_FILEDEPS_' + pkg, True) == '1':
1271 continue
1272 if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-'):
1273 continue
1274 for files in chunks(pkgfiles[pkg], 100):
1275 pkglist.append((pkg, files, rpmdeps, pkgdest))
1276
1277 import multiprocessing
1278 nproc = multiprocessing.cpu_count()
1279 pool = bb.utils.multiprocessingpool(nproc)
1280 processed = list(pool.imap(oe.package.filedeprunner, pkglist))
1281 pool.close()
1282 pool.join()
1283
1284 provides_files = {}
1285 requires_files = {}
1286
1287 for result in processed:
1288 (pkg, provides, requires) = result
1289
1290 if pkg not in provides_files:
1291 provides_files[pkg] = []
1292 if pkg not in requires_files:
1293 requires_files[pkg] = []
1294
1295 for file in provides:
1296 provides_files[pkg].append(file)
1297 key = "FILERPROVIDES_" + file + "_" + pkg
1298 d.setVar(key, " ".join(provides[file]))
1299
1300 for file in requires:
1301 requires_files[pkg].append(file)
1302 key = "FILERDEPENDS_" + file + "_" + pkg
1303 d.setVar(key, " ".join(requires[file]))
1304
1305 for pkg in requires_files:
1306 d.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files[pkg]))
1307 for pkg in provides_files:
1308 d.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files[pkg]))
1309}
1310
1311SHLIBSDIRS = "${PKGDATA_DIR}/${MLPREFIX}shlibs"
1312SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs"
1313
1314python package_do_shlibs() {
1315 import re, pipes
1316
1317 exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', 0)
1318 if exclude_shlibs:
1319 bb.note("not generating shlibs")
1320 return
1321
1322 lib_re = re.compile("^.*\.so")
1323 libdir_re = re.compile(".*/%s$" % d.getVar('baselib', True))
1324
1325 packages = d.getVar('PACKAGES', True)
1326 targetos = d.getVar('TARGET_OS', True)
1327
1328 workdir = d.getVar('WORKDIR', True)
1329
1330 ver = d.getVar('PKGV', True)
1331 if not ver:
1332 msg = "PKGV not defined"
1333 package_qa_handle_error("pkgv-undefined", msg, d)
1334 return
1335
1336 pkgdest = d.getVar('PKGDEST', True)
1337
1338 shlibs_dirs = d.getVar('SHLIBSDIRS', True).split()
1339 shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
1340
1341 # Take shared lock since we're only reading, not writing
1342 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
1343
1344 def linux_so(file):
1345 needs_ldconfig = False
1346 cmd = d.getVar('OBJDUMP', True) + " -p " + pipes.quote(file) + " 2>/dev/null"
1347 fd = os.popen(cmd)
1348 lines = fd.readlines()
1349 fd.close()
1350 for l in lines:
1351 m = re.match("\s+NEEDED\s+([^\s]*)", l)
1352 if m:
1353 if m.group(1) not in needed[pkg]:
1354 needed[pkg].append(m.group(1))
1355 m = re.match("\s+SONAME\s+([^\s]*)", l)
1356 if m:
1357 this_soname = m.group(1)
1358 if not this_soname in sonames:
1359 # if library is private (only used by package) then do not build shlib for it
1360 if not private_libs or -1 == private_libs.find(this_soname):
1361 sonames.append(this_soname)
1362 if libdir_re.match(os.path.dirname(file)):
1363 needs_ldconfig = True
1364 if snap_symlinks and (os.path.basename(file) != this_soname):
1365 renames.append((file, os.path.join(os.path.dirname(file), this_soname)))
1366 return needs_ldconfig
1367
1368 def darwin_so(file):
1369 if not os.path.exists(file):
1370 return
1371
1372 def get_combinations(base):
1373 #
1374 # Given a base library name, find all combinations of this split by "." and "-"
1375 #
1376 combos = []
1377 options = base.split(".")
1378 for i in range(1, len(options) + 1):
1379 combos.append(".".join(options[0:i]))
1380 options = base.split("-")
1381 for i in range(1, len(options) + 1):
1382 combos.append("-".join(options[0:i]))
1383 return combos
1384
1385 if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg'):
1386 # Drop suffix
1387 name = os.path.basename(file).rsplit(".",1)[0]
1388 # Find all combinations
1389 combos = get_combinations(name)
1390 for combo in combos:
1391 if not combo in sonames:
1392 sonames.append(combo)
1393 if file.endswith('.dylib') or file.endswith('.so'):
1394 lafile = file.replace(os.path.join(pkgdest, pkg), d.getVar('PKGD', True))
1395 # Drop suffix
1396 lafile = lafile.rsplit(".",1)[0]
1397 lapath = os.path.dirname(lafile)
1398 lafile = os.path.basename(lafile)
1399 # Find all combinations
1400 combos = get_combinations(lafile)
1401 for combo in combos:
1402 if os.path.exists(lapath + '/' + combo + '.la'):
1403 break
1404 lafile = lapath + '/' + combo + '.la'
1405
1406 #bb.note("Foo2: %s" % lafile)
1407 #bb.note("Foo %s" % file)
1408 if os.path.exists(lafile):
1409 fd = open(lafile, 'r')
1410 lines = fd.readlines()
1411 fd.close()
1412 for l in lines:
1413 m = re.match("\s*dependency_libs=\s*'(.*)'", l)
1414 if m:
1415 deps = m.group(1).split(" ")
1416 for dep in deps:
1417 #bb.note("Trying %s for %s" % (dep, pkg))
1418 name = None
1419 if dep.endswith(".la"):
1420 name = os.path.basename(dep).replace(".la", "")
1421 elif dep.startswith("-l"):
1422 name = dep.replace("-l", "lib")
1423 if pkg not in needed:
1424 needed[pkg] = []
1425 if name and name not in needed[pkg]:
1426 needed[pkg].append(name)
1427 #bb.note("Adding %s for %s" % (name, pkg))
1428
1429 if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS', True) == "1":
1430 snap_symlinks = True
1431 else:
1432 snap_symlinks = False
1433
1434 if (d.getVar('USE_LDCONFIG', True) or "1") == "1":
1435 use_ldconfig = True
1436 else:
1437 use_ldconfig = False
1438
1439 needed = {}
1440 shlib_provider = {}
1441 for pkg in packages.split():
1442 private_libs = d.getVar('PRIVATE_LIBS_' + pkg, True) or d.getVar('PRIVATE_LIBS', True)
1443 needs_ldconfig = False
1444 bb.debug(2, "calculating shlib provides for %s" % pkg)
1445
1446 pkgver = d.getVar('PKGV_' + pkg, True)
1447 if not pkgver:
1448 pkgver = d.getVar('PV_' + pkg, True)
1449 if not pkgver:
1450 pkgver = ver
1451
1452 needed[pkg] = []
1453 sonames = list()
1454 renames = list()
1455 for file in pkgfiles[pkg]:
1456 soname = None
1457 if cpath.islink(file):
1458 continue
1459 if targetos == "darwin" or targetos == "darwin8":
1460 darwin_so(file)
1461 elif os.access(file, os.X_OK) or lib_re.match(file):
1462 ldconfig = linux_so(file)
1463 needs_ldconfig = needs_ldconfig or ldconfig
1464 for (old, new) in renames:
1465 bb.note("Renaming %s to %s" % (old, new))
1466 os.rename(old, new)
1467 shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
1468 shver_file = os.path.join(shlibswork_dir, pkg + ".ver")
1469 if len(sonames):
1470 fd = open(shlibs_file, 'w')
1471 for s in sonames:
1472 fd.write(s + '\n')
1473 shlib_provider[s] = (pkg, pkgver)
1474 fd.close()
1475 fd = open(shver_file, 'w')
1476 fd.write(pkgver + '\n')
1477 fd.close()
1478 if needs_ldconfig and use_ldconfig:
1479 bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
1480 postinst = d.getVar('pkg_postinst_%s' % pkg, True)
1481 if not postinst:
1482 postinst = '#!/bin/sh\n'
1483 postinst += d.getVar('ldconfig_postinst_fragment', True)
1484 d.setVar('pkg_postinst_%s' % pkg, postinst)
1485
1486 list_re = re.compile('^(.*)\.list$')
1487 # Go from least to most specific since the last one found wins
1488 for dir in reversed(shlibs_dirs):
1489 if not os.path.exists(dir):
1490 continue
1491 for file in os.listdir(dir):
1492 m = list_re.match(file)
1493 if m:
1494 dep_pkg = m.group(1)
1495 fd = open(os.path.join(dir, file))
1496 lines = fd.readlines()
1497 fd.close()
1498 ver_file = os.path.join(dir, dep_pkg + '.ver')
1499 lib_ver = None
1500 if os.path.exists(ver_file):
1501 fd = open(ver_file)
1502 lib_ver = fd.readline().rstrip()
1503 fd.close()
1504 for l in lines:
1505 shlib_provider[l.rstrip()] = (dep_pkg, lib_ver)
1506
1507 bb.utils.unlockfile(lf)
1508
1509 assumed_libs = d.getVar('ASSUME_SHLIBS', True)
1510 if assumed_libs:
1511 for e in assumed_libs.split():
1512 l, dep_pkg = e.split(":")
1513 lib_ver = None
1514 dep_pkg = dep_pkg.rsplit("_", 1)
1515 if len(dep_pkg) == 2:
1516 lib_ver = dep_pkg[1]
1517 dep_pkg = dep_pkg[0]
1518 shlib_provider[l] = (dep_pkg, lib_ver)
1519
1520 for pkg in packages.split():
1521 bb.debug(2, "calculating shlib requirements for %s" % pkg)
1522
1523 deps = list()
1524 for n in needed[pkg]:
1525 if n in shlib_provider.keys():
1526 (dep_pkg, ver_needed) = shlib_provider[n]
1527
1528 bb.debug(2, '%s: Dependency %s requires package %s' % (pkg, n, dep_pkg))
1529
1530 if dep_pkg == pkg:
1531 continue
1532
1533 if ver_needed:
1534 dep = "%s (>= %s)" % (dep_pkg, ver_needed)
1535 else:
1536 dep = dep_pkg
1537 if not dep in deps:
1538 deps.append(dep)
1539 else:
1540 bb.note("Couldn't find shared library provider for %s" % n)
1541
1542 deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
1543 if os.path.exists(deps_file):
1544 os.remove(deps_file)
1545 if len(deps):
1546 fd = open(deps_file, 'w')
1547 for dep in deps:
1548 fd.write(dep + '\n')
1549 fd.close()
1550}
1551
1552python package_do_pkgconfig () {
1553 import re
1554
1555 packages = d.getVar('PACKAGES', True)
1556 workdir = d.getVar('WORKDIR', True)
1557 pkgdest = d.getVar('PKGDEST', True)
1558
1559 shlibs_dirs = d.getVar('SHLIBSDIRS', True).split()
1560 shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
1561
1562 pc_re = re.compile('(.*)\.pc$')
1563 var_re = re.compile('(.*)=(.*)')
1564 field_re = re.compile('(.*): (.*)')
1565
1566 pkgconfig_provided = {}
1567 pkgconfig_needed = {}
1568 for pkg in packages.split():
1569 pkgconfig_provided[pkg] = []
1570 pkgconfig_needed[pkg] = []
1571 for file in pkgfiles[pkg]:
1572 m = pc_re.match(file)
1573 if m:
1574 pd = bb.data.init()
1575 name = m.group(1)
1576 pkgconfig_provided[pkg].append(name)
1577 if not os.access(file, os.R_OK):
1578 continue
1579 f = open(file, 'r')
1580 lines = f.readlines()
1581 f.close()
1582 for l in lines:
1583 m = var_re.match(l)
1584 if m:
1585 name = m.group(1)
1586 val = m.group(2)
1587 pd.setVar(name, pd.expand(val))
1588 continue
1589 m = field_re.match(l)
1590 if m:
1591 hdr = m.group(1)
1592 exp = bb.data.expand(m.group(2), pd)
1593 if hdr == 'Requires':
1594 pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
1595
1596 # Take shared lock since we're only reading, not writing
1597 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
1598
1599 for pkg in packages.split():
1600 pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
1601 if pkgconfig_provided[pkg] != []:
1602 f = open(pkgs_file, 'w')
1603 for p in pkgconfig_provided[pkg]:
1604 f.write('%s\n' % p)
1605 f.close()
1606
1607 # Go from least to most specific since the last one found wins
1608 for dir in reversed(shlibs_dirs):
1609 if not os.path.exists(dir):
1610 continue
1611 for file in os.listdir(dir):
1612 m = re.match('^(.*)\.pclist$', file)
1613 if m:
1614 pkg = m.group(1)
1615 fd = open(os.path.join(dir, file))
1616 lines = fd.readlines()
1617 fd.close()
1618 pkgconfig_provided[pkg] = []
1619 for l in lines:
1620 pkgconfig_provided[pkg].append(l.rstrip())
1621
1622 for pkg in packages.split():
1623 deps = []
1624 for n in pkgconfig_needed[pkg]:
1625 found = False
1626 for k in pkgconfig_provided.keys():
1627 if n in pkgconfig_provided[k]:
1628 if k != pkg and not (k in deps):
1629 deps.append(k)
1630 found = True
1631 if found == False:
1632 bb.note("couldn't find pkgconfig module '%s' in any package" % n)
1633 deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
1634 if len(deps):
1635 fd = open(deps_file, 'w')
1636 for dep in deps:
1637 fd.write(dep + '\n')
1638 fd.close()
1639
1640 bb.utils.unlockfile(lf)
1641}
1642
1643def read_libdep_files(d):
1644 pkglibdeps = {}
1645 packages = d.getVar('PACKAGES', True).split()
1646 for pkg in packages:
1647 pkglibdeps[pkg] = {}
1648 for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
1649 depsfile = d.expand("${PKGDEST}/" + pkg + extension)
1650 if os.access(depsfile, os.R_OK):
1651 fd = open(depsfile)
1652 lines = fd.readlines()
1653 fd.close()
1654 for l in lines:
1655 l.rstrip()
1656 deps = bb.utils.explode_dep_versions2(l)
1657 for dep in deps:
1658 if not dep in pkglibdeps[pkg]:
1659 pkglibdeps[pkg][dep] = deps[dep]
1660 return pkglibdeps
1661
1662python read_shlibdeps () {
1663 pkglibdeps = read_libdep_files(d)
1664
1665 packages = d.getVar('PACKAGES', True).split()
1666 for pkg in packages:
1667 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "")
1668 for dep in pkglibdeps[pkg]:
1669 # Add the dep if it's not already there, or if no comparison is set
1670 if dep not in rdepends:
1671 rdepends[dep] = []
1672 for v in pkglibdeps[pkg][dep]:
1673 if v not in rdepends[dep]:
1674 rdepends[dep].append(v)
1675 d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1676}
1677
1678python package_depchains() {
1679 """
1680 For a given set of prefix and postfix modifiers, make those packages
1681 RRECOMMENDS on the corresponding packages for its RDEPENDS.
1682
1683 Example: If package A depends upon package B, and A's .bb emits an
1684 A-dev package, this would make A-dev Recommends: B-dev.
1685
1686 If only one of a given suffix is specified, it will take the RRECOMMENDS
1687 based on the RDEPENDS of *all* other packages. If more than one of a given
1688 suffix is specified, its will only use the RDEPENDS of the single parent
1689 package.
1690 """
1691
1692 packages = d.getVar('PACKAGES', True)
1693 postfixes = (d.getVar('DEPCHAIN_POST', True) or '').split()
1694 prefixes = (d.getVar('DEPCHAIN_PRE', True) or '').split()
1695
1696 def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
1697
1698 #bb.note('depends for %s is %s' % (base, depends))
1699 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg, True) or "")
1700
1701 for depend in depends:
1702 if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
1703 #bb.note("Skipping %s" % depend)
1704 continue
1705 if depend.endswith('-dev'):
1706 depend = depend[:-4]
1707 if depend.endswith('-dbg'):
1708 depend = depend[:-4]
1709 pkgname = getname(depend, suffix)
1710 #bb.note("Adding %s for %s" % (pkgname, depend))
1711 if pkgname not in rreclist and pkgname != pkg:
1712 rreclist[pkgname] = []
1713
1714 #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
1715 d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
1716
1717 def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
1718
1719 #bb.note('rdepends for %s is %s' % (base, rdepends))
1720 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg, True) or "")
1721
1722 for depend in rdepends:
1723 if depend.find('virtual-locale-') != -1:
1724 #bb.note("Skipping %s" % depend)
1725 continue
1726 if depend.endswith('-dev'):
1727 depend = depend[:-4]
1728 if depend.endswith('-dbg'):
1729 depend = depend[:-4]
1730 pkgname = getname(depend, suffix)
1731 #bb.note("Adding %s for %s" % (pkgname, depend))
1732 if pkgname not in rreclist and pkgname != pkg:
1733 rreclist[pkgname] = []
1734
1735 #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
1736 d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
1737
1738 def add_dep(list, dep):
1739 if dep not in list:
1740 list.append(dep)
1741
1742 depends = []
1743 for dep in bb.utils.explode_deps(d.getVar('DEPENDS', True) or ""):
1744 add_dep(depends, dep)
1745
1746 rdepends = []
1747 for pkg in packages.split():
1748 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg, True) or ""):
1749 add_dep(rdepends, dep)
1750
1751 #bb.note('rdepends is %s' % rdepends)
1752
1753 def post_getname(name, suffix):
1754 return '%s%s' % (name, suffix)
1755 def pre_getname(name, suffix):
1756 return '%s%s' % (suffix, name)
1757
1758 pkgs = {}
1759 for pkg in packages.split():
1760 for postfix in postfixes:
1761 if pkg.endswith(postfix):
1762 if not postfix in pkgs:
1763 pkgs[postfix] = {}
1764 pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
1765
1766 for prefix in prefixes:
1767 if pkg.startswith(prefix):
1768 if not prefix in pkgs:
1769 pkgs[prefix] = {}
1770 pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
1771
1772 if "-dbg" in pkgs:
1773 pkglibdeps = read_libdep_files(d)
1774 pkglibdeplist = []
1775 for pkg in pkglibdeps:
1776 for k in pkglibdeps[pkg]:
1777 add_dep(pkglibdeplist, k)
1778 # FIXME this should not look at PN once all task recipes inherit from task.bbclass
1779 dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS', True) == '1') or (d.getVar('PN', True) or '').startswith('packagegroup-'))
1780
1781 for suffix in pkgs:
1782 for pkg in pkgs[suffix]:
1783 if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs'):
1784 continue
1785 (base, func) = pkgs[suffix][pkg]
1786 if suffix == "-dev":
1787 pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
1788 elif suffix == "-dbg":
1789 if not dbgdefaultdeps:
1790 pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
1791 continue
1792 if len(pkgs[suffix]) == 1:
1793 pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
1794 else:
1795 rdeps = []
1796 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base, True) or ""):
1797 add_dep(rdeps, dep)
1798 pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
1799}
1800
1801# Since bitbake can't determine which variables are accessed during package
1802# iteration, we need to list them here:
1803PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR USERADD_PARAM GROUPADD_PARAM"
1804
1805def gen_packagevar(d):
1806 ret = []
1807 pkgs = (d.getVar("PACKAGES", True) or "").split()
1808 vars = (d.getVar("PACKAGEVARS", True) or "").split()
1809 for p in pkgs:
1810 for v in vars:
1811 ret.append(v + "_" + p)
1812
1813 # Ensure that changes to INCOMPATIBLE_LICENSE re-run do_package for
1814 # affected recipes.
1815 ret.append('LICENSE_EXCLUSION-%s' % p)
1816 return " ".join(ret)
1817
1818PACKAGE_PREPROCESS_FUNCS ?= ""
1819# Functions for setting up PKGD
1820PACKAGEBUILDPKGD ?= " \
1821 perform_packagecopy \
1822 ${PACKAGE_PREPROCESS_FUNCS} \
1823 split_and_strip_files \
1824 fixup_perms \
1825 "
1826# Functions which split PKGD up into separate packages
1827PACKAGESPLITFUNCS ?= " \
1828 package_do_split_locales \
1829 populate_packages"
1830# Functions which process metadata based on split packages
1831PACKAGEFUNCS += " \
1832 package_fixsymlinks \
1833 package_name_hook \
1834 package_do_filedeps \
1835 package_do_shlibs \
1836 package_do_pkgconfig \
1837 read_shlibdeps \
1838 package_depchains \
1839 emit_pkgdata"
1840
1841python do_package () {
1842 # Change the following version to cause sstate to invalidate the package
1843 # cache. This is useful if an item this class depends on changes in a
1844 # way that the output of this class changes. rpmdeps is a good example
1845 # as any change to rpmdeps requires this to be rerun.
1846 # PACKAGE_BBCLASS_VERSION = "1"
1847
1848 # Init cachedpath
1849 global cpath
1850 cpath = oe.cachedpath.CachedPath()
1851
1852 ###########################################################################
1853 # Sanity test the setup
1854 ###########################################################################
1855
1856 packages = (d.getVar('PACKAGES', True) or "").split()
1857 if len(packages) < 1:
1858 bb.debug(1, "No packages to build, skipping do_package")
1859 return
1860
1861 workdir = d.getVar('WORKDIR', True)
1862 outdir = d.getVar('DEPLOY_DIR', True)
1863 dest = d.getVar('D', True)
1864 dvar = d.getVar('PKGD', True)
1865 pn = d.getVar('PN', True)
1866
1867 if not workdir or not outdir or not dest or not dvar or not pn:
1868 msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package"
1869 package_qa_handle_error("var-undefined", msg, d)
1870 return
1871
1872 bb.build.exec_func("package_get_auto_pr", d)
1873
1874 ###########################################################################
1875 # Optimisations
1876 ###########################################################################
1877
1878 # Contunually rexpanding complex expressions is inefficient, particularly when
1879 # we write to the datastore and invalidate the expansion cache. This code
1880 # pre-expands some frequently used variables
1881
1882 def expandVar(x, d):
1883 d.setVar(x, d.getVar(x, True))
1884
1885 for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO':
1886 expandVar(x, d)
1887
1888 ###########################################################################
1889 # Setup PKGD (from D)
1890 ###########################################################################
1891
1892 for f in (d.getVar('PACKAGEBUILDPKGD', True) or '').split():
1893 bb.build.exec_func(f, d)
1894
1895 ###########################################################################
1896 # Split up PKGD into PKGDEST
1897 ###########################################################################
1898
1899 cpath = oe.cachedpath.CachedPath()
1900
1901 for f in (d.getVar('PACKAGESPLITFUNCS', True) or '').split():
1902 bb.build.exec_func(f, d)
1903
1904 ###########################################################################
1905 # Process PKGDEST
1906 ###########################################################################
1907
1908 # Build global list of files in each split package
1909 global pkgfiles
1910 pkgfiles = {}
1911 packages = d.getVar('PACKAGES', True).split()
1912 pkgdest = d.getVar('PKGDEST', True)
1913 for pkg in packages:
1914 pkgfiles[pkg] = []
1915 for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg):
1916 for file in files:
1917 pkgfiles[pkg].append(walkroot + os.sep + file)
1918
1919 for f in (d.getVar('PACKAGEFUNCS', True) or '').split():
1920 bb.build.exec_func(f, d)
1921}
1922
1923do_package[dirs] = "${SHLIBSWORKDIR} ${PKGDESTWORK} ${D}"
1924do_package[vardeps] += "${PACKAGEBUILDPKGD} ${PACKAGESPLITFUNCS} ${PACKAGEFUNCS} ${@gen_packagevar(d)}"
1925addtask package before do_build after do_install
1926
1927PACKAGELOCK = "${STAGING_DIR}/package-output.lock"
1928SSTATETASKS += "do_package"
1929do_package[sstate-name] = "package"
1930do_package[cleandirs] = "${PKGDEST} ${PKGDESTWORK}"
1931do_package[sstate-plaindirs] = "${PKGD} ${PKGDEST} ${PKGDESTWORK}"
1932do_package[sstate-lockfile-shared] = "${PACKAGELOCK}"
1933do_package_setscene[dirs] = "${STAGING_DIR}"
1934
1935python do_package_setscene () {
1936 sstate_setscene(d)
1937}
1938addtask do_package_setscene
1939
1940do_packagedata () {
1941 :
1942}
1943
1944addtask packagedata before do_build after do_package
1945
1946SSTATETASKS += "do_packagedata"
1947do_packagedata[sstate-name] = "packagedata"
1948do_packagedata[sstate-inputdirs] = "${PKGDESTWORK}"
1949do_packagedata[sstate-outputdirs] = "${PKGDATA_DIR}"
1950do_packagedata[sstate-lockfile-shared] = "${PACKAGELOCK}"
1951do_packagedata[stamp-extra-info] = "${MACHINE}"
1952
1953python do_packagedata_setscene () {
1954 sstate_setscene(d)
1955}
1956addtask do_packagedata_setscene
1957
1958# Dummy task to mark when all packaging is complete
1959do_package_write () {
1960 :
1961}
1962do_package_write[noexec] = "1"
1963PACKAGERDEPTASK = "do_package_write"
1964do_build[recrdeptask] += "${PACKAGERDEPTASK}"
1965addtask package_write before do_build after do_packagedata
1966
1967#
1968# Helper functions for the package writing classes
1969#
1970
1971def mapping_rename_hook(d):
1972 """
1973 Rewrite variables to account for package renaming in things
1974 like debian.bbclass or manual PKG variable name changes
1975 """
1976 pkg = d.getVar("PKG", True)
1977 runtime_mapping_rename("RDEPENDS", pkg, d)
1978 runtime_mapping_rename("RRECOMMENDS", pkg, d)
1979 runtime_mapping_rename("RSUGGESTS", pkg, d)
1980 runtime_mapping_rename("RPROVIDES", pkg, d)
1981 runtime_mapping_rename("RREPLACES", pkg, d)
1982 runtime_mapping_rename("RCONFLICTS", pkg, d)
1983