summaryrefslogtreecommitdiffstats
path: root/meta/classes/package.bbclass
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes/package.bbclass')
-rw-r--r--meta/classes/package.bbclass2019
1 files changed, 2019 insertions, 0 deletions
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass
new file mode 100644
index 0000000000..ea7591855e
--- /dev/null
+++ b/meta/classes/package.bbclass
@@ -0,0 +1,2019 @@
1#
2# Packaging process
3#
4# Executive summary: This class iterates over the functions listed in PACKAGEFUNCS
5# Taking D and splitting it up into the packages listed in PACKAGES, placing the
6# resulting output in PKGDEST.
7#
8# There are the following default steps but PACKAGEFUNCS can be extended:
9#
10# a) package_get_auto_pr - get PRAUTO from remote PR service
11#
12# b) perform_packagecopy - Copy D into PKGD
13#
14# c) package_do_split_locales - Split out the locale files, updates FILES and PACKAGES
15#
16# d) split_and_strip_files - split the files into runtime and debug and strip them.
17# Debug files include debug info split, and associated sources that end up in -dbg packages
18#
19# e) fixup_perms - Fix up permissions in the package before we split it.
20#
21# f) populate_packages - Split the files in PKGD into separate packages in PKGDEST/<pkgname>
22# Also triggers the binary stripping code to put files in -dbg packages.
23#
24# g) package_do_filedeps - Collect perfile run-time dependency metadata
25# The data is stores in FILER{PROVIDES,DEPENDS}_file_pkg variables with
26# a list of affected files in FILER{PROVIDES,DEPENDS}FLIST_pkg
27#
28# h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any
29# depenedencies found. Also stores the package name so anyone else using this library
30# knows which package to depend on.
31#
32# i) package_do_pkgconfig - Keep track of which packages need and provide which .pc files
33#
34# j) read_shlibdeps - Reads the stored shlibs information into the metadata
35#
36# k) package_depchains - Adds automatic dependencies to -dbg and -dev packages
37#
38# l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later
39# packaging steps
40
41inherit packagedata
42inherit prserv
43inherit chrpath
44
45# Need the package_qa_handle_error() in insane.bbclass
46inherit insane
47
48PKGD = "${WORKDIR}/package"
49PKGDEST = "${WORKDIR}/packages-split"
50
51LOCALE_SECTION ?= ''
52
53ALL_MULTILIB_PACKAGE_ARCHS = "${@all_multilib_tune_values(d, 'PACKAGE_ARCHS')}"
54
55# rpm is used for the per-file dependency identification
56PACKAGE_DEPENDS += "rpm-native"
57
58def legitimize_package_name(s):
59 """
60 Make sure package names are legitimate strings
61 """
62 import re
63
64 def fixutf(m):
65 cp = m.group(1)
66 if cp:
67 return ('\u%s' % cp).decode('unicode_escape').encode('utf-8')
68
69 # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
70 s = re.sub('<U([0-9A-Fa-f]{1,4})>', fixutf, s)
71
72 # Remaining package name validity fixes
73 return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
74
75def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False, summary=None):
76 """
77 Used in .bb files to split up dynamically generated subpackages of a
78 given package, usually plugins or modules.
79
80 Arguments:
81 root -- the path in which to search
82 file_regex -- regular expression to match searched files. Use
83 parentheses () to mark the part of this expression
84 that should be used to derive the module name (to be
85 substituted where %s is used in other function
86 arguments as noted below)
87 output_pattern -- pattern to use for the package names. Must include %s.
88 description -- description to set for each package. Must include %s.
89 postinst -- postinstall script to use for all packages (as a
90 string)
91 recursive -- True to perform a recursive search - default False
92 hook -- a hook function to be called for every match. The
93 function will be called with the following arguments
94 (in the order listed):
95 f: full path to the file/directory match
96 pkg: the package name
97 file_regex: as above
98 output_pattern: as above
99 modulename: the module name derived using file_regex
100 extra_depends -- extra runtime dependencies (RDEPENDS) to be set for
101 all packages. The default value of None causes a
102 dependency on the main package (${PN}) - if you do
103 not want this, pass '' for this parameter.
104 aux_files_pattern -- extra item(s) to be added to FILES for each
105 package. Can be a single string item or a list of
106 strings for multiple items. Must include %s.
107 postrm -- postrm script to use for all packages (as a string)
108 allow_dirs -- True allow directories to be matched - default False
109 prepend -- if True, prepend created packages to PACKAGES instead
110 of the default False which appends them
111 match_path -- match file_regex on the whole relative path to the
112 root rather than just the file name
113 aux_files_pattern_verbatim -- extra item(s) to be added to FILES for
114 each package, using the actual derived module name
115 rather than converting it to something legal for a
116 package name. Can be a single string item or a list
117 of strings for multiple items. Must include %s.
118 allow_links -- True to allow symlinks to be matched - default False
119 summary -- Summary to set for each package. Must include %s;
120 defaults to description if not set.
121
122 """
123
124 dvar = d.getVar('PKGD', True)
125
126 # If the root directory doesn't exist, don't error out later but silently do
127 # no splitting.
128 if not os.path.exists(dvar + root):
129 return []
130
131 ml = d.getVar("MLPREFIX", True)
132 if ml:
133 if not output_pattern.startswith(ml):
134 output_pattern = ml + output_pattern
135
136 newdeps = []
137 for dep in (extra_depends or "").split():
138 if dep.startswith(ml):
139 newdeps.append(dep)
140 else:
141 newdeps.append(ml + dep)
142 if newdeps:
143 extra_depends = " ".join(newdeps)
144
145
146 packages = d.getVar('PACKAGES', True).split()
147 split_packages = []
148
149 if postinst:
150 postinst = '#!/bin/sh\n' + postinst + '\n'
151 if postrm:
152 postrm = '#!/bin/sh\n' + postrm + '\n'
153 if not recursive:
154 objs = os.listdir(dvar + root)
155 else:
156 objs = []
157 for walkroot, dirs, files in os.walk(dvar + root):
158 for file in files:
159 relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1)
160 if relpath:
161 objs.append(relpath)
162
163 if extra_depends == None:
164 extra_depends = d.getVar("PN", True)
165
166 if not summary:
167 summary = description
168
169 for o in sorted(objs):
170 import re, stat
171 if match_path:
172 m = re.match(file_regex, o)
173 else:
174 m = re.match(file_regex, os.path.basename(o))
175
176 if not m:
177 continue
178 f = os.path.join(dvar + root, o)
179 mode = os.lstat(f).st_mode
180 if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))):
181 continue
182 on = legitimize_package_name(m.group(1))
183 pkg = output_pattern % on
184 split_packages.append(pkg)
185 if not pkg in packages:
186 if prepend:
187 packages = [pkg] + packages
188 else:
189 packages.append(pkg)
190 oldfiles = d.getVar('FILES_' + pkg, True)
191 newfile = os.path.join(root, o)
192 # These names will be passed through glob() so if the filename actually
193 # contains * or ? (rare, but possible) we need to handle that specially
194 newfile = newfile.replace('*', '[*]')
195 newfile = newfile.replace('?', '[?]')
196 if not oldfiles:
197 the_files = [newfile]
198 if aux_files_pattern:
199 if type(aux_files_pattern) is list:
200 for fp in aux_files_pattern:
201 the_files.append(fp % on)
202 else:
203 the_files.append(aux_files_pattern % on)
204 if aux_files_pattern_verbatim:
205 if type(aux_files_pattern_verbatim) is list:
206 for fp in aux_files_pattern_verbatim:
207 the_files.append(fp % m.group(1))
208 else:
209 the_files.append(aux_files_pattern_verbatim % m.group(1))
210 d.setVar('FILES_' + pkg, " ".join(the_files))
211 if extra_depends != '':
212 d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends)
213 d.setVar('DESCRIPTION_' + pkg, description % on)
214 d.setVar('SUMMARY_' + pkg, summary % on)
215 if postinst:
216 d.setVar('pkg_postinst_' + pkg, postinst)
217 if postrm:
218 d.setVar('pkg_postrm_' + pkg, postrm)
219 else:
220 d.setVar('FILES_' + pkg, oldfiles + " " + newfile)
221 if callable(hook):
222 hook(f, pkg, file_regex, output_pattern, m.group(1))
223
224 d.setVar('PACKAGES', ' '.join(packages))
225 return split_packages
226
227PACKAGE_DEPENDS += "file-native"
228
229python () {
230 if d.getVar('PACKAGES', True) != '':
231 deps = ""
232 for dep in (d.getVar('PACKAGE_DEPENDS', True) or "").split():
233 deps += " %s:do_populate_sysroot" % dep
234 d.appendVarFlag('do_package', 'depends', deps)
235
236 # shlibs requires any DEPENDS to have already packaged for the *.list files
237 d.appendVarFlag('do_package', 'deptask', " do_packagedata")
238}
239
240def splitdebuginfo(file, debugfile, debugsrcdir, sourcefile, d):
241 # Function to split a single file into two components, one is the stripped
242 # target system binary, the other contains any debugging information. The
243 # two files are linked to reference each other.
244 #
245 # sourcefile is also generated containing a list of debugsources
246
247 import stat
248
249 dvar = d.getVar('PKGD', True)
250 objcopy = d.getVar("OBJCOPY", True)
251 debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
252 workdir = d.getVar("WORKDIR", True)
253 workparentdir = d.getVar("DEBUGSRC_OVERRIDE_PATH", True) or os.path.dirname(os.path.dirname(workdir))
254
255 # We ignore kernel modules, we don't generate debug info files.
256 if file.find("/lib/modules/") != -1 and file.endswith(".ko"):
257 return 1
258
259 newmode = None
260 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
261 origmode = os.stat(file)[stat.ST_MODE]
262 newmode = origmode | stat.S_IWRITE | stat.S_IREAD
263 os.chmod(file, newmode)
264
265 # We need to extract the debug src information here...
266 if debugsrcdir:
267 cmd = "'%s' -b '%s' -d '%s' -i -l '%s' '%s'" % (debugedit, workparentdir, debugsrcdir, sourcefile, file)
268 (retval, output) = oe.utils.getstatusoutput(cmd)
269 if retval:
270 bb.fatal("debugedit failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
271
272 bb.utils.mkdirhier(os.path.dirname(debugfile))
273
274 cmd = "'%s' --only-keep-debug '%s' '%s'" % (objcopy, file, debugfile)
275 (retval, output) = oe.utils.getstatusoutput(cmd)
276 if retval:
277 bb.fatal("objcopy failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
278
279 # Set the debuglink to have the view of the file path on the target
280 cmd = "'%s' --add-gnu-debuglink='%s' '%s'" % (objcopy, debugfile, file)
281 (retval, output) = oe.utils.getstatusoutput(cmd)
282 if retval:
283 bb.fatal("objcopy failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
284
285 if newmode:
286 os.chmod(file, origmode)
287
288 return 0
289
290def copydebugsources(debugsrcdir, d):
291 # The debug src information written out to sourcefile is further procecessed
292 # and copied to the destination here.
293
294 import stat
295
296 sourcefile = d.expand("${WORKDIR}/debugsources.list")
297 if debugsrcdir and os.path.isfile(sourcefile):
298 dvar = d.getVar('PKGD', True)
299 strip = d.getVar("STRIP", True)
300 objcopy = d.getVar("OBJCOPY", True)
301 debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
302 workdir = d.getVar("WORKDIR", True)
303 workparentdir = os.path.dirname(os.path.dirname(workdir))
304 workbasedir = os.path.basename(os.path.dirname(workdir)) + "/" + os.path.basename(workdir)
305
306 nosuchdir = []
307 basepath = dvar
308 for p in debugsrcdir.split("/"):
309 basepath = basepath + "/" + p
310 if not cpath.exists(basepath):
311 nosuchdir.append(basepath)
312 bb.utils.mkdirhier(basepath)
313 cpath.updatecache(basepath)
314
315 processdebugsrc = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '(<internal>|<built-in>)$' | "
316 # We need to ignore files that are not actually ours
317 # we do this by only paying attention to items from this package
318 processdebugsrc += "fgrep -zw '%s' | "
319 processdebugsrc += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)"
320
321 cmd = processdebugsrc % (sourcefile, workbasedir, workparentdir, dvar, debugsrcdir)
322 (retval, output) = oe.utils.getstatusoutput(cmd)
323 # Can "fail" if internal headers/transient sources are attempted
324 #if retval:
325 # bb.fatal("debug source copy failed with exit code %s (cmd was %s)" % (retval, cmd))
326
327 # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
328 # Work around this by manually finding and copying any symbolic links that made it through.
329 cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s' 2>/dev/null)" % (dvar, debugsrcdir, dvar, debugsrcdir, workparentdir, dvar, debugsrcdir)
330 (retval, output) = oe.utils.getstatusoutput(cmd)
331 if retval:
332 bb.fatal("debugsrc symlink fixup failed with exit code %s (cmd was %s)" % (retval, cmd))
333
334 # The copy by cpio may have resulted in some empty directories! Remove these
335 cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
336 (retval, output) = oe.utils.getstatusoutput(cmd)
337 if retval:
338 bb.fatal("empty directory removal failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
339
340 # Also remove debugsrcdir if its empty
341 for p in nosuchdir[::-1]:
342 if os.path.exists(p) and not os.listdir(p):
343 os.rmdir(p)
344
345#
346# Package data handling routines
347#
348
349def get_package_mapping (pkg, basepkg, d):
350 import oe.packagedata
351
352 data = oe.packagedata.read_subpkgdata(pkg, d)
353 key = "PKG_%s" % pkg
354
355 if key in data:
356 # Have to avoid undoing the write_extra_pkgs(global_variants...)
357 if bb.data.inherits_class('allarch', d) and data[key] == basepkg:
358 return pkg
359 return data[key]
360
361 return pkg
362
363def get_package_additional_metadata (pkg_type, d):
364 base_key = "PACKAGE_ADD_METADATA"
365 for key in ("%s_%s" % (base_key, pkg_type.upper()), base_key):
366 if d.getVar(key) is None:
367 continue
368 d.setVarFlag(key, "type", "list")
369 if d.getVarFlag(key, "separator") is None:
370 d.setVarFlag(key, "separator", "\\n")
371 metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)]
372 return "\n".join(metadata_fields).strip()
373
374def runtime_mapping_rename (varname, pkg, d):
375 #bb.note("%s before: %s" % (varname, d.getVar(varname, True)))
376
377 new_depends = {}
378 deps = bb.utils.explode_dep_versions2(d.getVar(varname, True) or "")
379 for depend in deps:
380 new_depend = get_package_mapping(depend, pkg, d)
381 new_depends[new_depend] = deps[depend]
382
383 d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
384
385 #bb.note("%s after: %s" % (varname, d.getVar(varname, True)))
386
387#
388# Package functions suitable for inclusion in PACKAGEFUNCS
389#
390
391python package_get_auto_pr() {
392 # per recipe PRSERV_HOST
393 pn = d.getVar('PN', True)
394 host = d.getVar("PRSERV_HOST_" + pn, True)
395 if not (host is None):
396 d.setVar("PRSERV_HOST", host)
397
398 if d.getVar('PRSERV_HOST', True):
399 try:
400 auto_pr=prserv_get_pr_auto(d)
401 except Exception as e:
402 bb.fatal("Can NOT get PRAUTO, exception %s" % str(e))
403 if auto_pr is None:
404 if d.getVar('PRSERV_LOCKDOWN', True):
405 bb.fatal("Can NOT get PRAUTO from lockdown exported file")
406 else:
407 bb.fatal("Can NOT get PRAUTO from remote PR service")
408 return
409 d.setVar('PRAUTO',str(auto_pr))
410 else:
411 pkgv = d.getVar("PKGV", True)
412 if 'AUTOINC' in pkgv:
413 d.setVar("PKGV", pkgv.replace("AUTOINC", "0"))
414}
415
416LOCALEBASEPN ??= "${PN}"
417
418python package_do_split_locales() {
419 if (d.getVar('PACKAGE_NO_LOCALE', True) == '1'):
420 bb.debug(1, "package requested not splitting locales")
421 return
422
423 packages = (d.getVar('PACKAGES', True) or "").split()
424
425 datadir = d.getVar('datadir', True)
426 if not datadir:
427 bb.note("datadir not defined")
428 return
429
430 dvar = d.getVar('PKGD', True)
431 pn = d.getVar('LOCALEBASEPN', True)
432
433 if pn + '-locale' in packages:
434 packages.remove(pn + '-locale')
435
436 localedir = os.path.join(dvar + datadir, 'locale')
437
438 if not cpath.isdir(localedir):
439 bb.debug(1, "No locale files in this package")
440 return
441
442 locales = os.listdir(localedir)
443
444 summary = d.getVar('SUMMARY', True) or pn
445 description = d.getVar('DESCRIPTION', True) or ""
446 locale_section = d.getVar('LOCALE_SECTION', True)
447 mlprefix = d.getVar('MLPREFIX', True) or ""
448 for l in sorted(locales):
449 ln = legitimize_package_name(l)
450 pkg = pn + '-locale-' + ln
451 packages.append(pkg)
452 d.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l))
453 d.setVar('RRECOMMENDS_' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
454 d.setVar('RPROVIDES_' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
455 d.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l))
456 d.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l))
457 if locale_section:
458 d.setVar('SECTION_' + pkg, locale_section)
459
460 d.setVar('PACKAGES', ' '.join(packages))
461
462 # Disabled by RP 18/06/07
463 # Wildcards aren't supported in debian
464 # They break with ipkg since glibc-locale* will mean that
465 # glibc-localedata-translit* won't install as a dependency
466 # for some other package which breaks meta-toolchain
467 # Probably breaks since virtual-locale- isn't provided anywhere
468 #rdep = (d.getVar('RDEPENDS_%s' % pn, True) or "").split()
469 #rdep.append('%s-locale*' % pn)
470 #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep))
471}
472
473python perform_packagecopy () {
474 dest = d.getVar('D', True)
475 dvar = d.getVar('PKGD', True)
476
477 # Start by package population by taking a copy of the installed
478 # files to operate on
479 # Preserve sparse files and hard links
480 cmd = 'tar -cf - -C %s -p . | tar -xf - -C %s' % (dest, dvar)
481 (retval, output) = oe.utils.getstatusoutput(cmd)
482 if retval:
483 bb.fatal("file copy failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
484
485 # replace RPATHs for the nativesdk binaries, to make them relocatable
486 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d):
487 rpath_replace (dvar, d)
488}
489perform_packagecopy[cleandirs] = "${PKGD}"
490perform_packagecopy[dirs] = "${PKGD}"
491
492# We generate a master list of directories to process, we start by
493# seeding this list with reasonable defaults, then load from
494# the fs-perms.txt files
495python fixup_perms () {
496 import pwd, grp
497
498 # init using a string with the same format as a line as documented in
499 # the fs-perms.txt file
500 # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
501 # <path> link <link target>
502 #
503 # __str__ can be used to print out an entry in the input format
504 #
505 # if fs_perms_entry.path is None:
506 # an error occured
507 # if fs_perms_entry.link, you can retrieve:
508 # fs_perms_entry.path = path
509 # fs_perms_entry.link = target of link
510 # if not fs_perms_entry.link, you can retrieve:
511 # fs_perms_entry.path = path
512 # fs_perms_entry.mode = expected dir mode or None
513 # fs_perms_entry.uid = expected uid or -1
514 # fs_perms_entry.gid = expected gid or -1
515 # fs_perms_entry.walk = 'true' or something else
516 # fs_perms_entry.fmode = expected file mode or None
517 # fs_perms_entry.fuid = expected file uid or -1
518 # fs_perms_entry_fgid = expected file gid or -1
519 class fs_perms_entry():
520 def __init__(self, line):
521 lsplit = line.split()
522 if len(lsplit) == 3 and lsplit[1].lower() == "link":
523 self._setlink(lsplit[0], lsplit[2])
524 elif len(lsplit) == 8:
525 self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
526 else:
527 msg = "Fixup Perms: invalid config line %s" % line
528 package_qa_handle_error("perm-config", msg, d)
529 self.path = None
530 self.link = None
531
532 def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
533 self.path = os.path.normpath(path)
534 self.link = None
535 self.mode = self._procmode(mode)
536 self.uid = self._procuid(uid)
537 self.gid = self._procgid(gid)
538 self.walk = walk.lower()
539 self.fmode = self._procmode(fmode)
540 self.fuid = self._procuid(fuid)
541 self.fgid = self._procgid(fgid)
542
543 def _setlink(self, path, link):
544 self.path = os.path.normpath(path)
545 self.link = link
546
547 def _procmode(self, mode):
548 if not mode or (mode and mode == "-"):
549 return None
550 else:
551 return int(mode,8)
552
553 # Note uid/gid -1 has special significance in os.lchown
554 def _procuid(self, uid):
555 if uid is None or uid == "-":
556 return -1
557 elif uid.isdigit():
558 return int(uid)
559 else:
560 return pwd.getpwnam(uid).pw_uid
561
562 def _procgid(self, gid):
563 if gid is None or gid == "-":
564 return -1
565 elif gid.isdigit():
566 return int(gid)
567 else:
568 return grp.getgrnam(gid).gr_gid
569
570 # Use for debugging the entries
571 def __str__(self):
572 if self.link:
573 return "%s link %s" % (self.path, self.link)
574 else:
575 mode = "-"
576 if self.mode:
577 mode = "0%o" % self.mode
578 fmode = "-"
579 if self.fmode:
580 fmode = "0%o" % self.fmode
581 uid = self._mapugid(self.uid)
582 gid = self._mapugid(self.gid)
583 fuid = self._mapugid(self.fuid)
584 fgid = self._mapugid(self.fgid)
585 return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
586
587 def _mapugid(self, id):
588 if id is None or id == -1:
589 return "-"
590 else:
591 return "%d" % id
592
593 # Fix the permission, owner and group of path
594 def fix_perms(path, mode, uid, gid, dir):
595 if mode and not os.path.islink(path):
596 #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
597 os.chmod(path, mode)
598 # -1 is a special value that means don't change the uid/gid
599 # if they are BOTH -1, don't bother to lchown
600 if not (uid == -1 and gid == -1):
601 #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
602 os.lchown(path, uid, gid)
603
604 # Return a list of configuration files based on either the default
605 # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
606 # paths are resolved via BBPATH
607 def get_fs_perms_list(d):
608 str = ""
609 bbpath = d.getVar('BBPATH', True)
610 fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES', True)
611 if not fs_perms_tables:
612 fs_perms_tables = 'files/fs-perms.txt'
613 for conf_file in fs_perms_tables.split():
614 str += " %s" % bb.utils.which(bbpath, conf_file)
615 return str
616
617
618
619 dvar = d.getVar('PKGD', True)
620
621 fs_perms_table = {}
622
623 # By default all of the standard directories specified in
624 # bitbake.conf will get 0755 root:root.
625 target_path_vars = [ 'base_prefix',
626 'prefix',
627 'exec_prefix',
628 'base_bindir',
629 'base_sbindir',
630 'base_libdir',
631 'datadir',
632 'sysconfdir',
633 'servicedir',
634 'sharedstatedir',
635 'localstatedir',
636 'infodir',
637 'mandir',
638 'docdir',
639 'bindir',
640 'sbindir',
641 'libexecdir',
642 'libdir',
643 'includedir',
644 'oldincludedir' ]
645
646 for path in target_path_vars:
647 dir = d.getVar(path, True) or ""
648 if dir == "":
649 continue
650 fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d))
651
652 # Now we actually load from the configuration files
653 for conf in get_fs_perms_list(d).split():
654 if os.path.exists(conf):
655 f = open(conf)
656 for line in f:
657 if line.startswith('#'):
658 continue
659 lsplit = line.split()
660 if len(lsplit) == 0:
661 continue
662 if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
663 msg = "Fixup perms: %s invalid line: %s" % (conf, line)
664 package_qa_handle_error("perm-line", msg, d)
665 continue
666 entry = fs_perms_entry(d.expand(line))
667 if entry and entry.path:
668 fs_perms_table[entry.path] = entry
669 f.close()
670
671 # Debug -- list out in-memory table
672 #for dir in fs_perms_table:
673 # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
674
675 # We process links first, so we can go back and fixup directory ownership
676 # for any newly created directories
677 for dir in fs_perms_table:
678 if not fs_perms_table[dir].link:
679 continue
680
681 origin = dvar + dir
682 if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
683 continue
684
685 link = fs_perms_table[dir].link
686 if link[0] == "/":
687 target = dvar + link
688 ptarget = link
689 else:
690 target = os.path.join(os.path.dirname(origin), link)
691 ptarget = os.path.join(os.path.dirname(dir), link)
692 if os.path.exists(target):
693 msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
694 package_qa_handle_error("perm-link", msg, d)
695 continue
696
697 # Create path to move directory to, move it, and then setup the symlink
698 bb.utils.mkdirhier(os.path.dirname(target))
699 #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
700 os.rename(origin, target)
701 #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
702 os.symlink(link, origin)
703
704 for dir in fs_perms_table:
705 if fs_perms_table[dir].link:
706 continue
707
708 origin = dvar + dir
709 if not (cpath.exists(origin) and cpath.isdir(origin)):
710 continue
711
712 fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
713
714 if fs_perms_table[dir].walk == 'true':
715 for root, dirs, files in os.walk(origin):
716 for dr in dirs:
717 each_dir = os.path.join(root, dr)
718 fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
719 for f in files:
720 each_file = os.path.join(root, f)
721 fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
722}
723
724python split_and_strip_files () {
725 import stat, errno
726
727 dvar = d.getVar('PKGD', True)
728 pn = d.getVar('PN', True)
729
730 # We default to '.debug' style
731 if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory':
732 # Single debug-file-directory style debug info
733 debugappend = ".debug"
734 debugdir = ""
735 debuglibdir = "/usr/lib/debug"
736 debugsrcdir = "/usr/src/debug"
737 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-without-src':
738 # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
739 debugappend = ""
740 debugdir = "/.debug"
741 debuglibdir = ""
742 debugsrcdir = ""
743 else:
744 # Original OE-core, a.k.a. ".debug", style debug info
745 debugappend = ""
746 debugdir = "/.debug"
747 debuglibdir = ""
748 debugsrcdir = "/usr/src/debug"
749
750 sourcefile = d.expand("${WORKDIR}/debugsources.list")
751 bb.utils.remove(sourcefile)
752
753 os.chdir(dvar)
754
755 # Return type (bits):
756 # 0 - not elf
757 # 1 - ELF
758 # 2 - stripped
759 # 4 - executable
760 # 8 - shared library
761 # 16 - kernel module
762 def isELF(path):
763 type = 0
764 ret, result = oe.utils.getstatusoutput("file \"%s\"" % path.replace("\"", "\\\""))
765
766 if ret:
767 msg = "split_and_strip_files: 'file %s' failed" % path
768 package_qa_handle_error("split-strip", msg, d)
769 return type
770
771 # Not stripped
772 if "ELF" in result:
773 type |= 1
774 if "not stripped" not in result:
775 type |= 2
776 if "executable" in result:
777 type |= 4
778 if "shared" in result:
779 type |= 8
780 return type
781
782
783 #
784 # First lets figure out all of the files we may have to process ... do this only once!
785 #
786 elffiles = {}
787 symlinks = {}
788 hardlinks = {}
789 kernmods = []
790 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir", True))
791 baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir", True))
792 if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
793 for root, dirs, files in cpath.walk(dvar):
794 for f in files:
795 file = os.path.join(root, f)
796 if file.endswith(".ko") and file.find("/lib/modules/") != -1:
797 kernmods.append(file)
798 continue
799
800 # Skip debug files
801 if debugappend and file.endswith(debugappend):
802 continue
803 if debugdir and debugdir in os.path.dirname(file[len(dvar):]):
804 continue
805
806 try:
807 ltarget = cpath.realpath(file, dvar, False)
808 s = cpath.lstat(ltarget)
809 except OSError as e:
810 (err, strerror) = e.args
811 if err != errno.ENOENT:
812 raise
813 # Skip broken symlinks
814 continue
815 if not s:
816 continue
817 # Check its an excutable
818 if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) or (s[stat.ST_MODE] & stat.S_IXOTH) \
819 or ((file.startswith(libdir) or file.startswith(baselibdir)) and ".so" in f):
820 # If it's a symlink, and points to an ELF file, we capture the readlink target
821 if cpath.islink(file):
822 target = os.readlink(file)
823 if isELF(ltarget):
824 #bb.note("Sym: %s (%d)" % (ltarget, isELF(ltarget)))
825 symlinks[file] = target
826 continue
827 # It's a file (or hardlink), not a link
828 # ...but is it ELF, and is it already stripped?
829 elf_file = isELF(file)
830 if elf_file & 1:
831 if elf_file & 2:
832 if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn, True) or "").split():
833 bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
834 else:
835 msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
836 package_qa_handle_error("already-stripped", msg, d)
837 continue
838 # Check if it's a hard link to something else
839 if s.st_nlink > 1:
840 file_reference = "%d_%d" % (s.st_dev, s.st_ino)
841 # Hard link to something else
842 hardlinks[file] = file_reference
843 continue
844 elffiles[file] = elf_file
845
846 #
847 # First lets process debug splitting
848 #
849 if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'):
850 hardlinkmap = {}
851 # For hardlinks, process only one of the files
852 for file in hardlinks:
853 file_reference = hardlinks[file]
854 if file_reference not in hardlinkmap:
855 # If this is a new file, add it as a reference, and
856 # update it's type, so we can fall through and split
857 elffiles[file] = isELF(file)
858 hardlinkmap[file_reference] = file
859
860 for file in elffiles:
861 src = file[len(dvar):]
862 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
863 fpath = dvar + dest
864
865 # Split the file...
866 bb.utils.mkdirhier(os.path.dirname(fpath))
867 #bb.note("Split %s -> %s" % (file, fpath))
868 # Only store off the hard link reference if we successfully split!
869 splitdebuginfo(file, fpath, debugsrcdir, sourcefile, d)
870
871 # Hardlink our debug symbols to the other hardlink copies
872 for file in hardlinks:
873 if file not in elffiles:
874 src = file[len(dvar):]
875 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
876 fpath = dvar + dest
877 file_reference = hardlinks[file]
878 target = hardlinkmap[file_reference][len(dvar):]
879 ftarget = dvar + debuglibdir + os.path.dirname(target) + debugdir + "/" + os.path.basename(target) + debugappend
880 bb.utils.mkdirhier(os.path.dirname(fpath))
881 #bb.note("Link %s -> %s" % (fpath, ftarget))
882 os.link(ftarget, fpath)
883
884 # Create symlinks for all cases we were able to split symbols
885 for file in symlinks:
886 src = file[len(dvar):]
887 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
888 fpath = dvar + dest
889 # Skip it if the target doesn't exist
890 try:
891 s = os.stat(fpath)
892 except OSError as e:
893 (err, strerror) = e.args
894 if err != errno.ENOENT:
895 raise
896 continue
897
898 ltarget = symlinks[file]
899 lpath = os.path.dirname(ltarget)
900 lbase = os.path.basename(ltarget)
901 ftarget = ""
902 if lpath and lpath != ".":
903 ftarget += lpath + debugdir + "/"
904 ftarget += lbase + debugappend
905 if lpath.startswith(".."):
906 ftarget = os.path.join("..", ftarget)
907 bb.utils.mkdirhier(os.path.dirname(fpath))
908 #bb.note("Symlink %s -> %s" % (fpath, ftarget))
909 os.symlink(ftarget, fpath)
910
911 # Process the debugsrcdir if requested...
912 # This copies and places the referenced sources for later debugging...
913 copydebugsources(debugsrcdir, d)
914 #
915 # End of debug splitting
916 #
917
918 #
919 # Now lets go back over things and strip them
920 #
921 if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
922 strip = d.getVar("STRIP", True)
923 sfiles = []
924 for file in elffiles:
925 elf_file = int(elffiles[file])
926 #bb.note("Strip %s" % file)
927 sfiles.append((file, elf_file, strip))
928 for f in kernmods:
929 sfiles.append((f, 16, strip))
930
931
932 import multiprocessing
933 nproc = multiprocessing.cpu_count()
934 pool = bb.utils.multiprocessingpool(nproc)
935 processed = list(pool.imap(oe.package.runstrip, sfiles))
936 pool.close()
937 pool.join()
938
939 #
940 # End of strip
941 #
942}
943
944python populate_packages () {
945 import glob, re
946
947 workdir = d.getVar('WORKDIR', True)
948 outdir = d.getVar('DEPLOY_DIR', True)
949 dvar = d.getVar('PKGD', True)
950 packages = d.getVar('PACKAGES', True)
951 pn = d.getVar('PN', True)
952
953 bb.utils.mkdirhier(outdir)
954 os.chdir(dvar)
955
956 # Sanity check PACKAGES for duplicates and for LICENSE_EXCLUSION
957 # Sanity should be moved to sanity.bbclass once we have the infrastucture
958 package_list = []
959
960 for pkg in packages.split():
961 if d.getVar('LICENSE_EXCLUSION-' + pkg, True):
962 msg = "%s has an incompatible license. Excluding from packaging." % pkg
963 package_qa_handle_error("incompatible-license", msg, d)
964 if pkg in package_list:
965 msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
966 package_qa_handle_error("packages-list", msg, d)
967 else:
968 package_list.append(pkg)
969 d.setVar('PACKAGES', ' '.join(package_list))
970 pkgdest = d.getVar('PKGDEST', True)
971
972 seen = []
973
974 # os.mkdir masks the permissions with umask so we have to unset it first
975 oldumask = os.umask(0)
976
977 for pkg in package_list:
978 root = os.path.join(pkgdest, pkg)
979 bb.utils.mkdirhier(root)
980
981 filesvar = d.getVar('FILES_%s' % pkg, True) or ""
982 if "//" in filesvar:
983 msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
984 package_qa_handle_error("files-invalid", msg, d)
985 filesvar.replace("//", "/")
986
987 origfiles = filesvar.split()
988 files = []
989 for file in origfiles:
990 if os.path.isabs(file):
991 file = '.' + file
992 if not file.startswith("./"):
993 file = './' + file
994 globbed = glob.glob(file)
995 if globbed:
996 if [ file ] != globbed:
997 files += globbed
998 continue
999 files.append(file)
1000
1001 for file in files:
1002 if not cpath.islink(file):
1003 if cpath.isdir(file):
1004 newfiles = [ os.path.join(file,x) for x in os.listdir(file) ]
1005 if newfiles:
1006 files += newfiles
1007 continue
1008 if (not cpath.islink(file)) and (not cpath.exists(file)):
1009 continue
1010 if file in seen:
1011 continue
1012 seen.append(file)
1013
1014 if d.getVar('LICENSE_EXCLUSION-' + pkg, True):
1015 continue
1016
1017 def mkdir(src, dest, p):
1018 src = os.path.join(src, p)
1019 dest = os.path.join(dest, p)
1020 fstat = cpath.stat(src)
1021 os.mkdir(dest, fstat.st_mode)
1022 os.chown(dest, fstat.st_uid, fstat.st_gid)
1023 if p not in seen:
1024 seen.append(p)
1025 cpath.updatecache(dest)
1026
1027 def mkdir_recurse(src, dest, paths):
1028 if cpath.exists(dest + '/' + paths):
1029 return
1030 while paths.startswith("./"):
1031 paths = paths[2:]
1032 p = "."
1033 for c in paths.split("/"):
1034 p = os.path.join(p, c)
1035 if not cpath.exists(os.path.join(dest, p)):
1036 mkdir(src, dest, p)
1037
1038 if cpath.isdir(file) and not cpath.islink(file):
1039 mkdir_recurse(dvar, root, file)
1040 continue
1041
1042 mkdir_recurse(dvar, root, os.path.dirname(file))
1043 fpath = os.path.join(root,file)
1044 if not cpath.islink(file):
1045 os.link(file, fpath)
1046 fstat = cpath.stat(file)
1047 os.chmod(fpath, fstat.st_mode)
1048 os.chown(fpath, fstat.st_uid, fstat.st_gid)
1049 continue
1050 ret = bb.utils.copyfile(file, fpath)
1051 if ret is False or ret == 0:
1052 raise bb.build.FuncFailed("File population failed")
1053
1054 os.umask(oldumask)
1055 os.chdir(workdir)
1056
1057 unshipped = []
1058 for root, dirs, files in cpath.walk(dvar):
1059 dir = root[len(dvar):]
1060 if not dir:
1061 dir = os.sep
1062 for f in (files + dirs):
1063 path = os.path.join(dir, f)
1064 if ('.' + path) not in seen:
1065 unshipped.append(path)
1066
1067 if unshipped != []:
1068 msg = pn + ": Files/directories were installed but not shipped"
1069 if "installed-vs-shipped" in (d.getVar('INSANE_SKIP_' + pn, True) or "").split():
1070 bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
1071 else:
1072 for f in unshipped:
1073 msg = msg + "\n " + f
1074 package_qa_handle_error("installed-vs-shipped", msg, d)
1075}
1076populate_packages[dirs] = "${D}"
1077
1078python package_fixsymlinks () {
1079 import errno
1080 pkgdest = d.getVar('PKGDEST', True)
1081 packages = d.getVar("PACKAGES").split()
1082
1083 dangling_links = {}
1084 pkg_files = {}
1085 for pkg in packages:
1086 dangling_links[pkg] = []
1087 pkg_files[pkg] = []
1088 inst_root = os.path.join(pkgdest, pkg)
1089 for path in pkgfiles[pkg]:
1090 rpath = path[len(inst_root):]
1091 pkg_files[pkg].append(rpath)
1092 rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
1093 if not cpath.lexists(rtarget):
1094 dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
1095
1096 newrdepends = {}
1097 for pkg in dangling_links:
1098 for l in dangling_links[pkg]:
1099 found = False
1100 bb.debug(1, "%s contains dangling link %s" % (pkg, l))
1101 for p in packages:
1102 if l in pkg_files[p]:
1103 found = True
1104 bb.debug(1, "target found in %s" % p)
1105 if p == pkg:
1106 break
1107 if pkg not in newrdepends:
1108 newrdepends[pkg] = []
1109 newrdepends[pkg].append(p)
1110 break
1111 if found == False:
1112 bb.note("%s contains dangling symlink to %s" % (pkg, l))
1113
1114 for pkg in newrdepends:
1115 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "")
1116 for p in newrdepends[pkg]:
1117 if p not in rdepends:
1118 rdepends[p] = []
1119 d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1120}
1121
1122PKGDESTWORK = "${WORKDIR}/pkgdata"
1123
1124python emit_pkgdata() {
1125 from glob import glob
1126 import json
1127
1128 def write_if_exists(f, pkg, var):
1129 def encode(str):
1130 import codecs
1131 c = codecs.getencoder("string_escape")
1132 return c(str)[0]
1133
1134 val = d.getVar('%s_%s' % (var, pkg), True)
1135 if val:
1136 f.write('%s_%s: %s\n' % (var, pkg, encode(val)))
1137 return
1138 val = d.getVar('%s' % (var), True)
1139 if val:
1140 f.write('%s: %s\n' % (var, encode(val)))
1141 return
1142
1143 def write_extra_pkgs(variants, pn, packages, pkgdatadir):
1144 for variant in variants:
1145 with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd:
1146 fd.write("PACKAGES: %s\n" % ' '.join(
1147 map(lambda pkg: '%s-%s' % (variant, pkg), packages.split())))
1148
1149 def write_extra_runtime_pkgs(variants, packages, pkgdatadir):
1150 for variant in variants:
1151 for pkg in packages.split():
1152 ml_pkg = "%s-%s" % (variant, pkg)
1153 subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg)
1154 with open(subdata_file, 'w') as fd:
1155 fd.write("PKG_%s: %s" % (ml_pkg, pkg))
1156
1157 packages = d.getVar('PACKAGES', True)
1158 pkgdest = d.getVar('PKGDEST', True)
1159 pkgdatadir = d.getVar('PKGDESTWORK', True)
1160
1161 # Take shared lock since we're only reading, not writing
1162 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True)
1163
1164 data_file = pkgdatadir + d.expand("/${PN}" )
1165 f = open(data_file, 'w')
1166 f.write("PACKAGES: %s\n" % packages)
1167 f.close()
1168
1169 pn = d.getVar('PN', True)
1170 global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS', True) or "").split()
1171 variants = (d.getVar('MULTILIB_VARIANTS', True) or "").split()
1172
1173 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
1174 write_extra_pkgs(variants, pn, packages, pkgdatadir)
1175
1176 if (bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d)):
1177 write_extra_pkgs(global_variants, pn, packages, pkgdatadir)
1178
1179 workdir = d.getVar('WORKDIR', True)
1180
1181 for pkg in packages.split():
1182 pkgval = d.getVar('PKG_%s' % pkg, True)
1183 if pkgval is None:
1184 pkgval = pkg
1185 d.setVar('PKG_%s' % pkg, pkg)
1186
1187 pkgdestpkg = os.path.join(pkgdest, pkg)
1188 files = {}
1189 total_size = 0
1190 for f in pkgfiles[pkg]:
1191 relpth = os.path.relpath(f, pkgdestpkg)
1192 fstat = os.lstat(f)
1193 total_size += fstat.st_size
1194 files[os.sep + relpth] = fstat.st_size
1195 d.setVar('FILES_INFO', json.dumps(files))
1196
1197 subdata_file = pkgdatadir + "/runtime/%s" % pkg
1198 sf = open(subdata_file, 'w')
1199 write_if_exists(sf, pkg, 'PN')
1200 write_if_exists(sf, pkg, 'PE')
1201 write_if_exists(sf, pkg, 'PV')
1202 write_if_exists(sf, pkg, 'PR')
1203 write_if_exists(sf, pkg, 'PKGE')
1204 write_if_exists(sf, pkg, 'PKGV')
1205 write_if_exists(sf, pkg, 'PKGR')
1206 write_if_exists(sf, pkg, 'LICENSE')
1207 write_if_exists(sf, pkg, 'DESCRIPTION')
1208 write_if_exists(sf, pkg, 'SUMMARY')
1209 write_if_exists(sf, pkg, 'RDEPENDS')
1210 write_if_exists(sf, pkg, 'RPROVIDES')
1211 write_if_exists(sf, pkg, 'RRECOMMENDS')
1212 write_if_exists(sf, pkg, 'RSUGGESTS')
1213 write_if_exists(sf, pkg, 'RREPLACES')
1214 write_if_exists(sf, pkg, 'RCONFLICTS')
1215 write_if_exists(sf, pkg, 'SECTION')
1216 write_if_exists(sf, pkg, 'PKG')
1217 write_if_exists(sf, pkg, 'ALLOW_EMPTY')
1218 write_if_exists(sf, pkg, 'FILES')
1219 write_if_exists(sf, pkg, 'pkg_postinst')
1220 write_if_exists(sf, pkg, 'pkg_postrm')
1221 write_if_exists(sf, pkg, 'pkg_preinst')
1222 write_if_exists(sf, pkg, 'pkg_prerm')
1223 write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
1224 write_if_exists(sf, pkg, 'FILES_INFO')
1225 for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg, True) or "").split():
1226 write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile)
1227
1228 write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
1229 for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg, True) or "").split():
1230 write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile)
1231
1232 sf.write('%s_%s: %d\n' % ('PKGSIZE', pkg, total_size))
1233 sf.close()
1234
1235 # Symlinks needed for reverse lookups (from the final package name)
1236 subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval
1237 oe.path.symlink("../runtime/%s" % pkg, subdata_sym, True)
1238
1239 allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg, True)
1240 if not allow_empty:
1241 allow_empty = d.getVar('ALLOW_EMPTY', True)
1242 root = "%s/%s" % (pkgdest, pkg)
1243 os.chdir(root)
1244 g = glob('*')
1245 if g or allow_empty == "1":
1246 packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
1247 open(packagedfile, 'w').close()
1248
1249 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
1250 write_extra_runtime_pkgs(variants, packages, pkgdatadir)
1251
1252 if bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d):
1253 write_extra_runtime_pkgs(global_variants, packages, pkgdatadir)
1254
1255 bb.utils.unlockfile(lf)
1256}
1257emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime ${PKGDESTWORK}/runtime-reverse"
1258
1259ldconfig_postinst_fragment() {
1260if [ x"$D" = "x" ]; then
1261 if [ -x /sbin/ldconfig ]; then /sbin/ldconfig ; fi
1262fi
1263}
1264
1265RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/bin/rpmdeps-oecore --macros ${STAGING_LIBDIR_NATIVE}/rpm/macros --define '_rpmfc_magic_path ${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc' --rpmpopt ${STAGING_LIBDIR_NATIVE}/rpm/rpmpopt"
1266
1267# Collect perfile run-time dependency metadata
1268# Output:
1269# FILERPROVIDESFLIST_pkg - list of all files w/ deps
1270# FILERPROVIDES_filepath_pkg - per file dep
1271#
1272# FILERDEPENDSFLIST_pkg - list of all files w/ deps
1273# FILERDEPENDS_filepath_pkg - per file dep
1274
1275python package_do_filedeps() {
1276 if d.getVar('SKIP_FILEDEPS', True) == '1':
1277 return
1278
1279 pkgdest = d.getVar('PKGDEST', True)
1280 packages = d.getVar('PACKAGES', True)
1281 rpmdeps = d.getVar('RPMDEPS', True)
1282
1283 def chunks(files, n):
1284 return [files[i:i+n] for i in range(0, len(files), n)]
1285
1286 pkglist = []
1287 for pkg in packages.split():
1288 if d.getVar('SKIP_FILEDEPS_' + pkg, True) == '1':
1289 continue
1290 if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-'):
1291 continue
1292 for files in chunks(pkgfiles[pkg], 100):
1293 pkglist.append((pkg, files, rpmdeps, pkgdest))
1294
1295 import multiprocessing
1296 nproc = multiprocessing.cpu_count()
1297 pool = bb.utils.multiprocessingpool(nproc)
1298 processed = list(pool.imap(oe.package.filedeprunner, pkglist))
1299 pool.close()
1300 pool.join()
1301
1302 provides_files = {}
1303 requires_files = {}
1304
1305 for result in processed:
1306 (pkg, provides, requires) = result
1307
1308 if pkg not in provides_files:
1309 provides_files[pkg] = []
1310 if pkg not in requires_files:
1311 requires_files[pkg] = []
1312
1313 for file in provides:
1314 provides_files[pkg].append(file)
1315 key = "FILERPROVIDES_" + file + "_" + pkg
1316 d.setVar(key, " ".join(provides[file]))
1317
1318 for file in requires:
1319 requires_files[pkg].append(file)
1320 key = "FILERDEPENDS_" + file + "_" + pkg
1321 d.setVar(key, " ".join(requires[file]))
1322
1323 for pkg in requires_files:
1324 d.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files[pkg]))
1325 for pkg in provides_files:
1326 d.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files[pkg]))
1327}
1328
1329SHLIBSDIRS = "${PKGDATA_DIR}/${MLPREFIX}shlibs"
1330SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs"
1331
1332python package_do_shlibs() {
1333 import re, pipes
1334
1335 exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', 0)
1336 if exclude_shlibs:
1337 bb.note("not generating shlibs")
1338 return
1339
1340 lib_re = re.compile("^.*\.so")
1341 libdir_re = re.compile(".*/%s$" % d.getVar('baselib', True))
1342
1343 packages = d.getVar('PACKAGES', True)
1344 targetos = d.getVar('TARGET_OS', True)
1345
1346 workdir = d.getVar('WORKDIR', True)
1347
1348 ver = d.getVar('PKGV', True)
1349 if not ver:
1350 msg = "PKGV not defined"
1351 package_qa_handle_error("pkgv-undefined", msg, d)
1352 return
1353
1354 pkgdest = d.getVar('PKGDEST', True)
1355
1356 shlibs_dirs = d.getVar('SHLIBSDIRS', True).split()
1357 shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
1358
1359 # Take shared lock since we're only reading, not writing
1360 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
1361
1362 def read_shlib_providers():
1363 list_re = re.compile('^(.*)\.list$')
1364 # Go from least to most specific since the last one found wins
1365 for dir in reversed(shlibs_dirs):
1366 bb.debug(2, "Reading shlib providers in %s" % (dir))
1367 if not os.path.exists(dir):
1368 continue
1369 for file in os.listdir(dir):
1370 m = list_re.match(file)
1371 if m:
1372 dep_pkg = m.group(1)
1373 fd = open(os.path.join(dir, file))
1374 lines = fd.readlines()
1375 fd.close()
1376 ver_file = os.path.join(dir, dep_pkg + '.ver')
1377 lib_ver = None
1378 if os.path.exists(ver_file):
1379 fd = open(ver_file)
1380 lib_ver = fd.readline().rstrip()
1381 fd.close()
1382 for l in lines:
1383 shlib_provider[l.rstrip()] = (dep_pkg, lib_ver)
1384
1385 def linux_so(file):
1386 needs_ldconfig = False
1387 cmd = d.getVar('OBJDUMP', True) + " -p " + pipes.quote(file) + " 2>/dev/null"
1388 fd = os.popen(cmd)
1389 lines = fd.readlines()
1390 fd.close()
1391 for l in lines:
1392 m = re.match("\s+NEEDED\s+([^\s]*)", l)
1393 if m:
1394 if m.group(1) not in needed[pkg]:
1395 needed[pkg].append(m.group(1))
1396 if m.group(1) not in needed_from:
1397 needed_from[m.group(1)] = []
1398 needed_from[m.group(1)].append(file)
1399 m = re.match("\s+SONAME\s+([^\s]*)", l)
1400 if m:
1401 this_soname = m.group(1)
1402 if not this_soname in sonames:
1403 # if library is private (only used by package) then do not build shlib for it
1404 if not private_libs or this_soname not in private_libs:
1405 sonames.append(this_soname)
1406 if libdir_re.match(os.path.dirname(file)):
1407 needs_ldconfig = True
1408 if snap_symlinks and (os.path.basename(file) != this_soname):
1409 renames.append((file, os.path.join(os.path.dirname(file), this_soname)))
1410 return needs_ldconfig
1411
1412 def darwin_so(file):
1413 if not os.path.exists(file):
1414 return
1415
1416 def get_combinations(base):
1417 #
1418 # Given a base library name, find all combinations of this split by "." and "-"
1419 #
1420 combos = []
1421 options = base.split(".")
1422 for i in range(1, len(options) + 1):
1423 combos.append(".".join(options[0:i]))
1424 options = base.split("-")
1425 for i in range(1, len(options) + 1):
1426 combos.append("-".join(options[0:i]))
1427 return combos
1428
1429 if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg'):
1430 # Drop suffix
1431 name = os.path.basename(file).rsplit(".",1)[0]
1432 # Find all combinations
1433 combos = get_combinations(name)
1434 for combo in combos:
1435 if not combo in sonames:
1436 sonames.append(combo)
1437 if file.endswith('.dylib') or file.endswith('.so'):
1438 lafile = file.replace(os.path.join(pkgdest, pkg), d.getVar('PKGD', True))
1439 # Drop suffix
1440 lafile = lafile.rsplit(".",1)[0]
1441 lapath = os.path.dirname(lafile)
1442 lafile = os.path.basename(lafile)
1443 # Find all combinations
1444 combos = get_combinations(lafile)
1445 for combo in combos:
1446 if os.path.exists(lapath + '/' + combo + '.la'):
1447 break
1448 lafile = lapath + '/' + combo + '.la'
1449
1450 #bb.note("Foo2: %s" % lafile)
1451 #bb.note("Foo %s" % file)
1452 if os.path.exists(lafile):
1453 fd = open(lafile, 'r')
1454 lines = fd.readlines()
1455 fd.close()
1456 for l in lines:
1457 m = re.match("\s*dependency_libs=\s*'(.*)'", l)
1458 if m:
1459 deps = m.group(1).split(" ")
1460 for dep in deps:
1461 #bb.note("Trying %s for %s" % (dep, pkg))
1462 name = None
1463 if dep.endswith(".la"):
1464 name = os.path.basename(dep).replace(".la", "")
1465 elif dep.startswith("-l"):
1466 name = dep.replace("-l", "lib")
1467 if pkg not in needed:
1468 needed[pkg] = []
1469 if name and name not in needed[pkg]:
1470 needed[pkg].append(name)
1471 if name not in needed_from:
1472 needed_from[name] = []
1473 if lafile and lafile not in needed_from[name]:
1474 needed_from[name].append(lafile)
1475 #bb.note("Adding %s for %s" % (name, pkg))
1476
1477 if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS', True) == "1":
1478 snap_symlinks = True
1479 else:
1480 snap_symlinks = False
1481
1482 if (d.getVar('USE_LDCONFIG', True) or "1") == "1":
1483 use_ldconfig = True
1484 else:
1485 use_ldconfig = False
1486
1487 needed = {}
1488 needed_from = {}
1489 shlib_provider = {}
1490 read_shlib_providers()
1491
1492 for pkg in packages.split():
1493 private_libs = d.getVar('PRIVATE_LIBS_' + pkg, True) or d.getVar('PRIVATE_LIBS', True) or ""
1494 private_libs = private_libs.split()
1495 needs_ldconfig = False
1496 bb.debug(2, "calculating shlib provides for %s" % pkg)
1497
1498 pkgver = d.getVar('PKGV_' + pkg, True)
1499 if not pkgver:
1500 pkgver = d.getVar('PV_' + pkg, True)
1501 if not pkgver:
1502 pkgver = ver
1503
1504 needed[pkg] = []
1505 sonames = list()
1506 renames = list()
1507 for file in pkgfiles[pkg]:
1508 soname = None
1509 if cpath.islink(file):
1510 continue
1511 if targetos == "darwin" or targetos == "darwin8":
1512 darwin_so(file)
1513 elif os.access(file, os.X_OK) or lib_re.match(file):
1514 ldconfig = linux_so(file)
1515 needs_ldconfig = needs_ldconfig or ldconfig
1516 for (old, new) in renames:
1517 bb.note("Renaming %s to %s" % (old, new))
1518 os.rename(old, new)
1519 pkgfiles[pkg].remove(old)
1520
1521 shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
1522 shver_file = os.path.join(shlibswork_dir, pkg + ".ver")
1523 if len(sonames):
1524 fd = open(shlibs_file, 'w')
1525 for s in sonames:
1526 if s in shlib_provider:
1527 (old_pkg, old_pkgver) = shlib_provider[s]
1528 if old_pkg != pkg:
1529 bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s, pkg, pkgver))
1530 bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s))
1531 fd.write(s + '\n')
1532 shlib_provider[s] = (pkg, pkgver)
1533 fd.close()
1534 fd = open(shver_file, 'w')
1535 fd.write(pkgver + '\n')
1536 fd.close()
1537 if needs_ldconfig and use_ldconfig:
1538 bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
1539 postinst = d.getVar('pkg_postinst_%s' % pkg, True)
1540 if not postinst:
1541 postinst = '#!/bin/sh\n'
1542 postinst += d.getVar('ldconfig_postinst_fragment', True)
1543 d.setVar('pkg_postinst_%s' % pkg, postinst)
1544 bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
1545
1546 bb.utils.unlockfile(lf)
1547
1548 assumed_libs = d.getVar('ASSUME_SHLIBS', True)
1549 if assumed_libs:
1550 for e in assumed_libs.split():
1551 l, dep_pkg = e.split(":")
1552 lib_ver = None
1553 dep_pkg = dep_pkg.rsplit("_", 1)
1554 if len(dep_pkg) == 2:
1555 lib_ver = dep_pkg[1]
1556 dep_pkg = dep_pkg[0]
1557 shlib_provider[l] = (dep_pkg, lib_ver)
1558
1559 for pkg in packages.split():
1560 bb.debug(2, "calculating shlib requirements for %s" % pkg)
1561
1562 deps = list()
1563 for n in needed[pkg]:
1564 # if n is in private libraries, don't try to search provider for it
1565 # this could cause problem in case some abc.bb provides private
1566 # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
1567 # but skipping it is still better alternative than providing own
1568 # version and then adding runtime dependency for the same system library
1569 if private_libs and n in private_libs:
1570 bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n))
1571 continue
1572 if n in shlib_provider.keys():
1573 (dep_pkg, ver_needed) = shlib_provider[n]
1574
1575 bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n, dep_pkg, needed_from[n]))
1576
1577 if dep_pkg == pkg:
1578 continue
1579
1580 if ver_needed:
1581 dep = "%s (>= %s)" % (dep_pkg, ver_needed)
1582 else:
1583 dep = dep_pkg
1584 if not dep in deps:
1585 deps.append(dep)
1586 else:
1587 bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n, needed_from[n]))
1588
1589 deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
1590 if os.path.exists(deps_file):
1591 os.remove(deps_file)
1592 if len(deps):
1593 fd = open(deps_file, 'w')
1594 for dep in deps:
1595 fd.write(dep + '\n')
1596 fd.close()
1597}
1598
1599python package_do_pkgconfig () {
1600 import re
1601
1602 packages = d.getVar('PACKAGES', True)
1603 workdir = d.getVar('WORKDIR', True)
1604 pkgdest = d.getVar('PKGDEST', True)
1605
1606 shlibs_dirs = d.getVar('SHLIBSDIRS', True).split()
1607 shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
1608
1609 pc_re = re.compile('(.*)\.pc$')
1610 var_re = re.compile('(.*)=(.*)')
1611 field_re = re.compile('(.*): (.*)')
1612
1613 pkgconfig_provided = {}
1614 pkgconfig_needed = {}
1615 for pkg in packages.split():
1616 pkgconfig_provided[pkg] = []
1617 pkgconfig_needed[pkg] = []
1618 for file in pkgfiles[pkg]:
1619 m = pc_re.match(file)
1620 if m:
1621 pd = bb.data.init()
1622 name = m.group(1)
1623 pkgconfig_provided[pkg].append(name)
1624 if not os.access(file, os.R_OK):
1625 continue
1626 f = open(file, 'r')
1627 lines = f.readlines()
1628 f.close()
1629 for l in lines:
1630 m = var_re.match(l)
1631 if m:
1632 name = m.group(1)
1633 val = m.group(2)
1634 pd.setVar(name, pd.expand(val))
1635 continue
1636 m = field_re.match(l)
1637 if m:
1638 hdr = m.group(1)
1639 exp = bb.data.expand(m.group(2), pd)
1640 if hdr == 'Requires':
1641 pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
1642
1643 # Take shared lock since we're only reading, not writing
1644 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
1645
1646 for pkg in packages.split():
1647 pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
1648 if pkgconfig_provided[pkg] != []:
1649 f = open(pkgs_file, 'w')
1650 for p in pkgconfig_provided[pkg]:
1651 f.write('%s\n' % p)
1652 f.close()
1653
1654 # Go from least to most specific since the last one found wins
1655 for dir in reversed(shlibs_dirs):
1656 if not os.path.exists(dir):
1657 continue
1658 for file in os.listdir(dir):
1659 m = re.match('^(.*)\.pclist$', file)
1660 if m:
1661 pkg = m.group(1)
1662 fd = open(os.path.join(dir, file))
1663 lines = fd.readlines()
1664 fd.close()
1665 pkgconfig_provided[pkg] = []
1666 for l in lines:
1667 pkgconfig_provided[pkg].append(l.rstrip())
1668
1669 for pkg in packages.split():
1670 deps = []
1671 for n in pkgconfig_needed[pkg]:
1672 found = False
1673 for k in pkgconfig_provided.keys():
1674 if n in pkgconfig_provided[k]:
1675 if k != pkg and not (k in deps):
1676 deps.append(k)
1677 found = True
1678 if found == False:
1679 bb.note("couldn't find pkgconfig module '%s' in any package" % n)
1680 deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
1681 if len(deps):
1682 fd = open(deps_file, 'w')
1683 for dep in deps:
1684 fd.write(dep + '\n')
1685 fd.close()
1686
1687 bb.utils.unlockfile(lf)
1688}
1689
1690def read_libdep_files(d):
1691 pkglibdeps = {}
1692 packages = d.getVar('PACKAGES', True).split()
1693 for pkg in packages:
1694 pkglibdeps[pkg] = {}
1695 for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
1696 depsfile = d.expand("${PKGDEST}/" + pkg + extension)
1697 if os.access(depsfile, os.R_OK):
1698 fd = open(depsfile)
1699 lines = fd.readlines()
1700 fd.close()
1701 for l in lines:
1702 l.rstrip()
1703 deps = bb.utils.explode_dep_versions2(l)
1704 for dep in deps:
1705 if not dep in pkglibdeps[pkg]:
1706 pkglibdeps[pkg][dep] = deps[dep]
1707 return pkglibdeps
1708
1709python read_shlibdeps () {
1710 pkglibdeps = read_libdep_files(d)
1711
1712 packages = d.getVar('PACKAGES', True).split()
1713 for pkg in packages:
1714 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "")
1715 for dep in pkglibdeps[pkg]:
1716 # Add the dep if it's not already there, or if no comparison is set
1717 if dep not in rdepends:
1718 rdepends[dep] = []
1719 for v in pkglibdeps[pkg][dep]:
1720 if v not in rdepends[dep]:
1721 rdepends[dep].append(v)
1722 d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1723}
1724
1725python package_depchains() {
1726 """
1727 For a given set of prefix and postfix modifiers, make those packages
1728 RRECOMMENDS on the corresponding packages for its RDEPENDS.
1729
1730 Example: If package A depends upon package B, and A's .bb emits an
1731 A-dev package, this would make A-dev Recommends: B-dev.
1732
1733 If only one of a given suffix is specified, it will take the RRECOMMENDS
1734 based on the RDEPENDS of *all* other packages. If more than one of a given
1735 suffix is specified, its will only use the RDEPENDS of the single parent
1736 package.
1737 """
1738
1739 packages = d.getVar('PACKAGES', True)
1740 postfixes = (d.getVar('DEPCHAIN_POST', True) or '').split()
1741 prefixes = (d.getVar('DEPCHAIN_PRE', True) or '').split()
1742
1743 def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
1744
1745 #bb.note('depends for %s is %s' % (base, depends))
1746 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg, True) or "")
1747
1748 for depend in depends:
1749 if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
1750 #bb.note("Skipping %s" % depend)
1751 continue
1752 if depend.endswith('-dev'):
1753 depend = depend[:-4]
1754 if depend.endswith('-dbg'):
1755 depend = depend[:-4]
1756 pkgname = getname(depend, suffix)
1757 #bb.note("Adding %s for %s" % (pkgname, depend))
1758 if pkgname not in rreclist and pkgname != pkg:
1759 rreclist[pkgname] = []
1760
1761 #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
1762 d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
1763
1764 def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
1765
1766 #bb.note('rdepends for %s is %s' % (base, rdepends))
1767 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg, True) or "")
1768
1769 for depend in rdepends:
1770 if depend.find('virtual-locale-') != -1:
1771 #bb.note("Skipping %s" % depend)
1772 continue
1773 if depend.endswith('-dev'):
1774 depend = depend[:-4]
1775 if depend.endswith('-dbg'):
1776 depend = depend[:-4]
1777 pkgname = getname(depend, suffix)
1778 #bb.note("Adding %s for %s" % (pkgname, depend))
1779 if pkgname not in rreclist and pkgname != pkg:
1780 rreclist[pkgname] = []
1781
1782 #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
1783 d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
1784
1785 def add_dep(list, dep):
1786 if dep not in list:
1787 list.append(dep)
1788
1789 depends = []
1790 for dep in bb.utils.explode_deps(d.getVar('DEPENDS', True) or ""):
1791 add_dep(depends, dep)
1792
1793 rdepends = []
1794 for pkg in packages.split():
1795 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg, True) or ""):
1796 add_dep(rdepends, dep)
1797
1798 #bb.note('rdepends is %s' % rdepends)
1799
1800 def post_getname(name, suffix):
1801 return '%s%s' % (name, suffix)
1802 def pre_getname(name, suffix):
1803 return '%s%s' % (suffix, name)
1804
1805 pkgs = {}
1806 for pkg in packages.split():
1807 for postfix in postfixes:
1808 if pkg.endswith(postfix):
1809 if not postfix in pkgs:
1810 pkgs[postfix] = {}
1811 pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
1812
1813 for prefix in prefixes:
1814 if pkg.startswith(prefix):
1815 if not prefix in pkgs:
1816 pkgs[prefix] = {}
1817 pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
1818
1819 if "-dbg" in pkgs:
1820 pkglibdeps = read_libdep_files(d)
1821 pkglibdeplist = []
1822 for pkg in pkglibdeps:
1823 for k in pkglibdeps[pkg]:
1824 add_dep(pkglibdeplist, k)
1825 # FIXME this should not look at PN once all task recipes inherit from task.bbclass
1826 dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS', True) == '1') or (d.getVar('PN', True) or '').startswith('packagegroup-'))
1827
1828 for suffix in pkgs:
1829 for pkg in pkgs[suffix]:
1830 if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs'):
1831 continue
1832 (base, func) = pkgs[suffix][pkg]
1833 if suffix == "-dev":
1834 pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
1835 elif suffix == "-dbg":
1836 if not dbgdefaultdeps:
1837 pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
1838 continue
1839 if len(pkgs[suffix]) == 1:
1840 pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
1841 else:
1842 rdeps = []
1843 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base, True) or ""):
1844 add_dep(rdeps, dep)
1845 pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
1846}
1847
1848# Since bitbake can't determine which variables are accessed during package
1849# iteration, we need to list them here:
1850PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR USERADD_PARAM GROUPADD_PARAM CONFFILES"
1851
1852def gen_packagevar(d):
1853 ret = []
1854 pkgs = (d.getVar("PACKAGES", True) or "").split()
1855 vars = (d.getVar("PACKAGEVARS", True) or "").split()
1856 for p in pkgs:
1857 for v in vars:
1858 ret.append(v + "_" + p)
1859
1860 # Ensure that changes to INCOMPATIBLE_LICENSE re-run do_package for
1861 # affected recipes.
1862 ret.append('LICENSE_EXCLUSION-%s' % p)
1863 return " ".join(ret)
1864
1865PACKAGE_PREPROCESS_FUNCS ?= ""
1866# Functions for setting up PKGD
1867PACKAGEBUILDPKGD ?= " \
1868 perform_packagecopy \
1869 ${PACKAGE_PREPROCESS_FUNCS} \
1870 split_and_strip_files \
1871 fixup_perms \
1872 "
1873# Functions which split PKGD up into separate packages
1874PACKAGESPLITFUNCS ?= " \
1875 package_do_split_locales \
1876 populate_packages"
1877# Functions which process metadata based on split packages
1878PACKAGEFUNCS += " \
1879 package_fixsymlinks \
1880 package_name_hook \
1881 package_do_filedeps \
1882 package_do_shlibs \
1883 package_do_pkgconfig \
1884 read_shlibdeps \
1885 package_depchains \
1886 emit_pkgdata"
1887
1888python do_package () {
1889 # Change the following version to cause sstate to invalidate the package
1890 # cache. This is useful if an item this class depends on changes in a
1891 # way that the output of this class changes. rpmdeps is a good example
1892 # as any change to rpmdeps requires this to be rerun.
1893 # PACKAGE_BBCLASS_VERSION = "1"
1894
1895 # Init cachedpath
1896 global cpath
1897 cpath = oe.cachedpath.CachedPath()
1898
1899 ###########################################################################
1900 # Sanity test the setup
1901 ###########################################################################
1902
1903 packages = (d.getVar('PACKAGES', True) or "").split()
1904 if len(packages) < 1:
1905 bb.debug(1, "No packages to build, skipping do_package")
1906 return
1907
1908 workdir = d.getVar('WORKDIR', True)
1909 outdir = d.getVar('DEPLOY_DIR', True)
1910 dest = d.getVar('D', True)
1911 dvar = d.getVar('PKGD', True)
1912 pn = d.getVar('PN', True)
1913
1914 if not workdir or not outdir or not dest or not dvar or not pn:
1915 msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package"
1916 package_qa_handle_error("var-undefined", msg, d)
1917 return
1918
1919 bb.build.exec_func("package_get_auto_pr", d)
1920
1921 ###########################################################################
1922 # Optimisations
1923 ###########################################################################
1924
1925 # Contunually rexpanding complex expressions is inefficient, particularly when
1926 # we write to the datastore and invalidate the expansion cache. This code
1927 # pre-expands some frequently used variables
1928
1929 def expandVar(x, d):
1930 d.setVar(x, d.getVar(x, True))
1931
1932 for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO':
1933 expandVar(x, d)
1934
1935 ###########################################################################
1936 # Setup PKGD (from D)
1937 ###########################################################################
1938
1939 for f in (d.getVar('PACKAGEBUILDPKGD', True) or '').split():
1940 bb.build.exec_func(f, d)
1941
1942 ###########################################################################
1943 # Split up PKGD into PKGDEST
1944 ###########################################################################
1945
1946 cpath = oe.cachedpath.CachedPath()
1947
1948 for f in (d.getVar('PACKAGESPLITFUNCS', True) or '').split():
1949 bb.build.exec_func(f, d)
1950
1951 ###########################################################################
1952 # Process PKGDEST
1953 ###########################################################################
1954
1955 # Build global list of files in each split package
1956 global pkgfiles
1957 pkgfiles = {}
1958 packages = d.getVar('PACKAGES', True).split()
1959 pkgdest = d.getVar('PKGDEST', True)
1960 for pkg in packages:
1961 pkgfiles[pkg] = []
1962 for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg):
1963 for file in files:
1964 pkgfiles[pkg].append(walkroot + os.sep + file)
1965
1966 for f in (d.getVar('PACKAGEFUNCS', True) or '').split():
1967 bb.build.exec_func(f, d)
1968}
1969
1970do_package[dirs] = "${SHLIBSWORKDIR} ${PKGDESTWORK} ${D}"
1971do_package[vardeps] += "${PACKAGEBUILDPKGD} ${PACKAGESPLITFUNCS} ${PACKAGEFUNCS} ${@gen_packagevar(d)}"
1972addtask package after do_install
1973
1974PACKAGELOCK = "${STAGING_DIR}/package-output.lock"
1975SSTATETASKS += "do_package"
1976do_package[cleandirs] = "${PKGDEST} ${PKGDESTWORK}"
1977do_package[sstate-plaindirs] = "${PKGD} ${PKGDEST} ${PKGDESTWORK}"
1978do_package[sstate-lockfile-shared] = "${PACKAGELOCK}"
1979do_package_setscene[dirs] = "${STAGING_DIR}"
1980
1981python do_package_setscene () {
1982 sstate_setscene(d)
1983}
1984addtask do_package_setscene
1985
1986do_packagedata () {
1987 :
1988}
1989
1990addtask packagedata before do_build after do_package
1991
1992SSTATETASKS += "do_packagedata"
1993do_packagedata[sstate-inputdirs] = "${PKGDESTWORK}"
1994do_packagedata[sstate-outputdirs] = "${PKGDATA_DIR}"
1995do_packagedata[sstate-lockfile-shared] = "${PACKAGELOCK}"
1996do_packagedata[stamp-extra-info] = "${MACHINE}"
1997
1998python do_packagedata_setscene () {
1999 sstate_setscene(d)
2000}
2001addtask do_packagedata_setscene
2002
2003#
2004# Helper functions for the package writing classes
2005#
2006
2007def mapping_rename_hook(d):
2008 """
2009 Rewrite variables to account for package renaming in things
2010 like debian.bbclass or manual PKG variable name changes
2011 """
2012 pkg = d.getVar("PKG", True)
2013 runtime_mapping_rename("RDEPENDS", pkg, d)
2014 runtime_mapping_rename("RRECOMMENDS", pkg, d)
2015 runtime_mapping_rename("RSUGGESTS", pkg, d)
2016 runtime_mapping_rename("RPROVIDES", pkg, d)
2017 runtime_mapping_rename("RREPLACES", pkg, d)
2018 runtime_mapping_rename("RCONFLICTS", pkg, d)
2019