summaryrefslogtreecommitdiffstats
path: root/meta/classes/package.bbclass
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes/package.bbclass')
-rw-r--r--meta/classes/package.bbclass2488
1 files changed, 0 insertions, 2488 deletions
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass
deleted file mode 100644
index e3f0a7060b..0000000000
--- a/meta/classes/package.bbclass
+++ /dev/null
@@ -1,2488 +0,0 @@
1#
2# Packaging process
3#
4# Executive summary: This class iterates over the functions listed in PACKAGEFUNCS
5# Taking D and splitting it up into the packages listed in PACKAGES, placing the
6# resulting output in PKGDEST.
7#
8# There are the following default steps but PACKAGEFUNCS can be extended:
9#
10# a) package_convert_pr_autoinc - convert AUTOINC in PKGV to ${PRSERV_PV_AUTOINC}
11#
12# b) perform_packagecopy - Copy D into PKGD
13#
14# c) package_do_split_locales - Split out the locale files, updates FILES and PACKAGES
15#
16# d) split_and_strip_files - split the files into runtime and debug and strip them.
17# Debug files include debug info split, and associated sources that end up in -dbg packages
18#
19# e) fixup_perms - Fix up permissions in the package before we split it.
20#
21# f) populate_packages - Split the files in PKGD into separate packages in PKGDEST/<pkgname>
22# Also triggers the binary stripping code to put files in -dbg packages.
23#
24# g) package_do_filedeps - Collect perfile run-time dependency metadata
25# The data is stores in FILER{PROVIDES,DEPENDS}_file_pkg variables with
26# a list of affected files in FILER{PROVIDES,DEPENDS}FLIST_pkg
27#
28# h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any
29# dependencies found. Also stores the package name so anyone else using this library
30# knows which package to depend on.
31#
32# i) package_do_pkgconfig - Keep track of which packages need and provide which .pc files
33#
34# j) read_shlibdeps - Reads the stored shlibs information into the metadata
35#
36# k) package_depchains - Adds automatic dependencies to -dbg and -dev packages
37#
38# l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later
39# packaging steps
40
41inherit packagedata
42inherit chrpath
43inherit package_pkgdata
44
45# Need the package_qa_handle_error() in insane.bbclass
46inherit insane
47
48PKGD = "${WORKDIR}/package"
49PKGDEST = "${WORKDIR}/packages-split"
50
51LOCALE_SECTION ?= ''
52
53ALL_MULTILIB_PACKAGE_ARCHS = "${@all_multilib_tune_values(d, 'PACKAGE_ARCHS')}"
54
55# rpm is used for the per-file dependency identification
56# dwarfsrcfiles is used to determine the list of debug source files
57PACKAGE_DEPENDS += "rpm-native dwarfsrcfiles-native"
58
59
60# If your postinstall can execute at rootfs creation time rather than on
61# target but depends on a native/cross tool in order to execute, you need to
62# list that tool in PACKAGE_WRITE_DEPS. Target package dependencies belong
63# in the package dependencies as normal, this is just for native/cross support
64# tools at rootfs build time.
65PACKAGE_WRITE_DEPS ??= ""
66
67def legitimize_package_name(s):
68 """
69 Make sure package names are legitimate strings
70 """
71 import re
72
73 def fixutf(m):
74 cp = m.group(1)
75 if cp:
76 return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape')
77
78 # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
79 s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s)
80
81 # Remaining package name validity fixes
82 return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
83
84def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False, summary=None):
85 """
86 Used in .bb files to split up dynamically generated subpackages of a
87 given package, usually plugins or modules.
88
89 Arguments:
90 root -- the path in which to search
91 file_regex -- regular expression to match searched files. Use
92 parentheses () to mark the part of this expression
93 that should be used to derive the module name (to be
94 substituted where %s is used in other function
95 arguments as noted below)
96 output_pattern -- pattern to use for the package names. Must include %s.
97 description -- description to set for each package. Must include %s.
98 postinst -- postinstall script to use for all packages (as a
99 string)
100 recursive -- True to perform a recursive search - default False
101 hook -- a hook function to be called for every match. The
102 function will be called with the following arguments
103 (in the order listed):
104 f: full path to the file/directory match
105 pkg: the package name
106 file_regex: as above
107 output_pattern: as above
108 modulename: the module name derived using file_regex
109 extra_depends -- extra runtime dependencies (RDEPENDS) to be set for
110 all packages. The default value of None causes a
111 dependency on the main package (${PN}) - if you do
112 not want this, pass '' for this parameter.
113 aux_files_pattern -- extra item(s) to be added to FILES for each
114 package. Can be a single string item or a list of
115 strings for multiple items. Must include %s.
116 postrm -- postrm script to use for all packages (as a string)
117 allow_dirs -- True allow directories to be matched - default False
118 prepend -- if True, prepend created packages to PACKAGES instead
119 of the default False which appends them
120 match_path -- match file_regex on the whole relative path to the
121 root rather than just the file name
122 aux_files_pattern_verbatim -- extra item(s) to be added to FILES for
123 each package, using the actual derived module name
124 rather than converting it to something legal for a
125 package name. Can be a single string item or a list
126 of strings for multiple items. Must include %s.
127 allow_links -- True to allow symlinks to be matched - default False
128 summary -- Summary to set for each package. Must include %s;
129 defaults to description if not set.
130
131 """
132
133 dvar = d.getVar('PKGD')
134 root = d.expand(root)
135 output_pattern = d.expand(output_pattern)
136 extra_depends = d.expand(extra_depends)
137
138 # If the root directory doesn't exist, don't error out later but silently do
139 # no splitting.
140 if not os.path.exists(dvar + root):
141 return []
142
143 ml = d.getVar("MLPREFIX")
144 if ml:
145 if not output_pattern.startswith(ml):
146 output_pattern = ml + output_pattern
147
148 newdeps = []
149 for dep in (extra_depends or "").split():
150 if dep.startswith(ml):
151 newdeps.append(dep)
152 else:
153 newdeps.append(ml + dep)
154 if newdeps:
155 extra_depends = " ".join(newdeps)
156
157
158 packages = d.getVar('PACKAGES').split()
159 split_packages = set()
160
161 if postinst:
162 postinst = '#!/bin/sh\n' + postinst + '\n'
163 if postrm:
164 postrm = '#!/bin/sh\n' + postrm + '\n'
165 if not recursive:
166 objs = os.listdir(dvar + root)
167 else:
168 objs = []
169 for walkroot, dirs, files in os.walk(dvar + root):
170 for file in files:
171 relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1)
172 if relpath:
173 objs.append(relpath)
174
175 if extra_depends == None:
176 extra_depends = d.getVar("PN")
177
178 if not summary:
179 summary = description
180
181 for o in sorted(objs):
182 import re, stat
183 if match_path:
184 m = re.match(file_regex, o)
185 else:
186 m = re.match(file_regex, os.path.basename(o))
187
188 if not m:
189 continue
190 f = os.path.join(dvar + root, o)
191 mode = os.lstat(f).st_mode
192 if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))):
193 continue
194 on = legitimize_package_name(m.group(1))
195 pkg = output_pattern % on
196 split_packages.add(pkg)
197 if not pkg in packages:
198 if prepend:
199 packages = [pkg] + packages
200 else:
201 packages.append(pkg)
202 oldfiles = d.getVar('FILES_' + pkg)
203 newfile = os.path.join(root, o)
204 # These names will be passed through glob() so if the filename actually
205 # contains * or ? (rare, but possible) we need to handle that specially
206 newfile = newfile.replace('*', '[*]')
207 newfile = newfile.replace('?', '[?]')
208 if not oldfiles:
209 the_files = [newfile]
210 if aux_files_pattern:
211 if type(aux_files_pattern) is list:
212 for fp in aux_files_pattern:
213 the_files.append(fp % on)
214 else:
215 the_files.append(aux_files_pattern % on)
216 if aux_files_pattern_verbatim:
217 if type(aux_files_pattern_verbatim) is list:
218 for fp in aux_files_pattern_verbatim:
219 the_files.append(fp % m.group(1))
220 else:
221 the_files.append(aux_files_pattern_verbatim % m.group(1))
222 d.setVar('FILES_' + pkg, " ".join(the_files))
223 else:
224 d.setVar('FILES_' + pkg, oldfiles + " " + newfile)
225 if extra_depends != '':
226 d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends)
227 if not d.getVar('DESCRIPTION_' + pkg):
228 d.setVar('DESCRIPTION_' + pkg, description % on)
229 if not d.getVar('SUMMARY_' + pkg):
230 d.setVar('SUMMARY_' + pkg, summary % on)
231 if postinst:
232 d.setVar('pkg_postinst_' + pkg, postinst)
233 if postrm:
234 d.setVar('pkg_postrm_' + pkg, postrm)
235 if callable(hook):
236 hook(f, pkg, file_regex, output_pattern, m.group(1))
237
238 d.setVar('PACKAGES', ' '.join(packages))
239 return list(split_packages)
240
241PACKAGE_DEPENDS += "file-native"
242
243python () {
244 if d.getVar('PACKAGES') != '':
245 deps = ""
246 for dep in (d.getVar('PACKAGE_DEPENDS') or "").split():
247 deps += " %s:do_populate_sysroot" % dep
248 if d.getVar('PACKAGE_MINIDEBUGINFO') == '1':
249 deps += ' xz-native:do_populate_sysroot'
250 d.appendVarFlag('do_package', 'depends', deps)
251
252 # shlibs requires any DEPENDS to have already packaged for the *.list files
253 d.appendVarFlag('do_package', 'deptask', " do_packagedata")
254}
255
256# Get a list of files from file vars by searching files under current working directory
257# The list contains symlinks, directories and normal files.
258def files_from_filevars(filevars):
259 import os,glob
260 cpath = oe.cachedpath.CachedPath()
261 files = []
262 for f in filevars:
263 if os.path.isabs(f):
264 f = '.' + f
265 if not f.startswith("./"):
266 f = './' + f
267 globbed = glob.glob(f)
268 if globbed:
269 if [ f ] != globbed:
270 files += globbed
271 continue
272 files.append(f)
273
274 symlink_paths = []
275 for ind, f in enumerate(files):
276 # Handle directory symlinks. Truncate path to the lowest level symlink
277 parent = ''
278 for dirname in f.split('/')[:-1]:
279 parent = os.path.join(parent, dirname)
280 if dirname == '.':
281 continue
282 if cpath.islink(parent):
283 bb.warn("FILES contains file '%s' which resides under a "
284 "directory symlink. Please fix the recipe and use the "
285 "real path for the file." % f[1:])
286 symlink_paths.append(f)
287 files[ind] = parent
288 f = parent
289 break
290
291 if not cpath.islink(f):
292 if cpath.isdir(f):
293 newfiles = [ os.path.join(f,x) for x in os.listdir(f) ]
294 if newfiles:
295 files += newfiles
296
297 return files, symlink_paths
298
299# Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files
300def get_conffiles(pkg, d):
301 pkgdest = d.getVar('PKGDEST')
302 root = os.path.join(pkgdest, pkg)
303 cwd = os.getcwd()
304 os.chdir(root)
305
306 conffiles = d.getVar('CONFFILES_%s' % pkg);
307 if conffiles == None:
308 conffiles = d.getVar('CONFFILES')
309 if conffiles == None:
310 conffiles = ""
311 conffiles = conffiles.split()
312 conf_orig_list = files_from_filevars(conffiles)[0]
313
314 # Remove links and directories from conf_orig_list to get conf_list which only contains normal files
315 conf_list = []
316 for f in conf_orig_list:
317 if os.path.isdir(f):
318 continue
319 if os.path.islink(f):
320 continue
321 if not os.path.exists(f):
322 continue
323 conf_list.append(f)
324
325 # Remove the leading './'
326 for i in range(0, len(conf_list)):
327 conf_list[i] = conf_list[i][1:]
328
329 os.chdir(cwd)
330 return conf_list
331
332def checkbuildpath(file, d):
333 tmpdir = d.getVar('TMPDIR')
334 with open(file) as f:
335 file_content = f.read()
336 if tmpdir in file_content:
337 return True
338
339 return False
340
341def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output):
342 debugfiles = {}
343
344 for line in dwarfsrcfiles_output.splitlines():
345 if line.startswith("\t"):
346 debugfiles[os.path.normpath(line.split()[0])] = ""
347
348 return debugfiles.keys()
349
350def source_info(file, d, fatal=True):
351 import subprocess
352
353 cmd = ["dwarfsrcfiles", file]
354 try:
355 output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT)
356 retval = 0
357 except subprocess.CalledProcessError as exc:
358 output = exc.output
359 retval = exc.returncode
360
361 # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure
362 if retval != 0 and retval != 255:
363 msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "")
364 if fatal:
365 bb.fatal(msg)
366 bb.note(msg)
367
368 debugsources = parse_debugsources_from_dwarfsrcfiles_output(output)
369
370 return list(debugsources)
371
372def splitdebuginfo(file, dvar, debugdir, debuglibdir, debugappend, debugsrcdir, d):
373 # Function to split a single file into two components, one is the stripped
374 # target system binary, the other contains any debugging information. The
375 # two files are linked to reference each other.
376 #
377 # return a mapping of files:debugsources
378
379 import stat
380 import subprocess
381
382 src = file[len(dvar):]
383 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
384 debugfile = dvar + dest
385 sources = []
386
387 # Split the file...
388 bb.utils.mkdirhier(os.path.dirname(debugfile))
389 #bb.note("Split %s -> %s" % (file, debugfile))
390 # Only store off the hard link reference if we successfully split!
391
392 dvar = d.getVar('PKGD')
393 objcopy = d.getVar("OBJCOPY")
394
395 # We ignore kernel modules, we don't generate debug info files.
396 if file.find("/lib/modules/") != -1 and file.endswith(".ko"):
397 return (file, sources)
398
399 newmode = None
400 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
401 origmode = os.stat(file)[stat.ST_MODE]
402 newmode = origmode | stat.S_IWRITE | stat.S_IREAD
403 os.chmod(file, newmode)
404
405 # We need to extract the debug src information here...
406 if debugsrcdir:
407 sources = source_info(file, d)
408
409 bb.utils.mkdirhier(os.path.dirname(debugfile))
410
411 subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT)
412
413 # Set the debuglink to have the view of the file path on the target
414 subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT)
415
416 if newmode:
417 os.chmod(file, origmode)
418
419 return (file, sources)
420
421def splitstaticdebuginfo(file, dvar, debugstaticdir, debugstaticlibdir, debugstaticappend, debugsrcdir, d):
422 # Unlike the function above, there is no way to split a static library
423 # two components. So to get similar results we will copy the unmodified
424 # static library (containing the debug symbols) into a new directory.
425 # We will then strip (preserving symbols) the static library in the
426 # typical location.
427 #
428 # return a mapping of files:debugsources
429
430 import stat
431 import shutil
432
433 src = file[len(dvar):]
434 dest = debugstaticlibdir + os.path.dirname(src) + debugstaticdir + "/" + os.path.basename(src) + debugstaticappend
435 debugfile = dvar + dest
436 sources = []
437
438 # Copy the file...
439 bb.utils.mkdirhier(os.path.dirname(debugfile))
440 #bb.note("Copy %s -> %s" % (file, debugfile))
441
442 dvar = d.getVar('PKGD')
443
444 newmode = None
445 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
446 origmode = os.stat(file)[stat.ST_MODE]
447 newmode = origmode | stat.S_IWRITE | stat.S_IREAD
448 os.chmod(file, newmode)
449
450 # We need to extract the debug src information here...
451 if debugsrcdir:
452 sources = source_info(file, d)
453
454 bb.utils.mkdirhier(os.path.dirname(debugfile))
455
456 # Copy the unmodified item to the debug directory
457 shutil.copy2(file, debugfile)
458
459 if newmode:
460 os.chmod(file, origmode)
461
462 return (file, sources)
463
464def inject_minidebuginfo(file, dvar, debugdir, debuglibdir, debugappend, debugsrcdir, d):
465 # Extract just the symbols from debuginfo into minidebuginfo,
466 # compress it with xz and inject it back into the binary in a .gnu_debugdata section.
467 # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html
468
469 import subprocess
470
471 readelf = d.getVar('READELF')
472 nm = d.getVar('NM')
473 objcopy = d.getVar('OBJCOPY')
474
475 minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo')
476
477 src = file[len(dvar):]
478 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
479 debugfile = dvar + dest
480 minidebugfile = minidebuginfodir + src + '.minidebug'
481 bb.utils.mkdirhier(os.path.dirname(minidebugfile))
482
483 # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either
484 # so skip it.
485 if not os.path.exists(debugfile):
486 bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file))
487 return
488
489 # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo.
490 # We will exclude all of these from minidebuginfo to save space.
491 remove_section_names = []
492 for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines():
493 fields = line.split()
494 if len(fields) < 8:
495 continue
496 name = fields[0]
497 type = fields[1]
498 flags = fields[7]
499 # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them
500 if name.startswith('.debug_'):
501 continue
502 if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']:
503 remove_section_names.append(name)
504
505 # List dynamic symbols in the binary. We can exclude these from minidebuginfo
506 # because they are always present in the binary.
507 dynsyms = set()
508 for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines():
509 dynsyms.add(line.split()[0])
510
511 # Find all function symbols from debuginfo which aren't in the dynamic symbols table.
512 # These are the ones we want to keep in minidebuginfo.
513 keep_symbols_file = minidebugfile + '.symlist'
514 found_any_symbols = False
515 with open(keep_symbols_file, 'w') as f:
516 for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines():
517 fields = line.split('|')
518 if len(fields) < 7:
519 continue
520 name = fields[0].strip()
521 type = fields[3].strip()
522 if type == 'FUNC' and name not in dynsyms:
523 f.write('{}\n'.format(name))
524 found_any_symbols = True
525
526 if not found_any_symbols:
527 bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file))
528 return
529
530 bb.utils.remove(minidebugfile)
531 bb.utils.remove(minidebugfile + '.xz')
532
533 subprocess.check_call([objcopy, '-S'] +
534 ['--remove-section={}'.format(s) for s in remove_section_names] +
535 ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile])
536
537 subprocess.check_call(['xz', '--keep', minidebugfile])
538
539 subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file])
540
541def copydebugsources(debugsrcdir, sources, d):
542 # The debug src information written out to sourcefile is further processed
543 # and copied to the destination here.
544
545 import stat
546 import subprocess
547
548 if debugsrcdir and sources:
549 sourcefile = d.expand("${WORKDIR}/debugsources.list")
550 bb.utils.remove(sourcefile)
551
552 # filenames are null-separated - this is an artefact of the previous use
553 # of rpm's debugedit, which was writing them out that way, and the code elsewhere
554 # is still assuming that.
555 debuglistoutput = '\0'.join(sources) + '\0'
556 with open(sourcefile, 'a') as sf:
557 sf.write(debuglistoutput)
558
559 dvar = d.getVar('PKGD')
560 strip = d.getVar("STRIP")
561 objcopy = d.getVar("OBJCOPY")
562 workdir = d.getVar("WORKDIR")
563 workparentdir = os.path.dirname(os.path.dirname(workdir))
564 workbasedir = os.path.basename(os.path.dirname(workdir)) + "/" + os.path.basename(workdir)
565
566 # If build path exists in sourcefile, it means toolchain did not use
567 # -fdebug-prefix-map to compile
568 if checkbuildpath(sourcefile, d):
569 localsrc_prefix = workparentdir + "/"
570 else:
571 localsrc_prefix = "/usr/src/debug/"
572
573 nosuchdir = []
574 basepath = dvar
575 for p in debugsrcdir.split("/"):
576 basepath = basepath + "/" + p
577 if not cpath.exists(basepath):
578 nosuchdir.append(basepath)
579 bb.utils.mkdirhier(basepath)
580 cpath.updatecache(basepath)
581
582 # Ignore files from the recipe sysroots (target and native)
583 processdebugsrc = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | "
584 # We need to ignore files that are not actually ours
585 # we do this by only paying attention to items from this package
586 processdebugsrc += "fgrep -zw '%s' | "
587 # Remove prefix in the source paths
588 processdebugsrc += "sed 's#%s##g' | "
589 processdebugsrc += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)"
590
591 cmd = processdebugsrc % (sourcefile, workbasedir, localsrc_prefix, workparentdir, dvar, debugsrcdir)
592 try:
593 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
594 except subprocess.CalledProcessError:
595 # Can "fail" if internal headers/transient sources are attempted
596 pass
597
598 # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
599 # Work around this by manually finding and copying any symbolic links that made it through.
600 cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \
601 (dvar, debugsrcdir, dvar, debugsrcdir, workparentdir, dvar, debugsrcdir)
602 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
603
604 # The copy by cpio may have resulted in some empty directories! Remove these
605 cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
606 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
607
608 # Also remove debugsrcdir if its empty
609 for p in nosuchdir[::-1]:
610 if os.path.exists(p) and not os.listdir(p):
611 os.rmdir(p)
612
613#
614# Package data handling routines
615#
616
617def get_package_mapping (pkg, basepkg, d, depversions=None):
618 import oe.packagedata
619
620 data = oe.packagedata.read_subpkgdata(pkg, d)
621 key = "PKG_%s" % pkg
622
623 if key in data:
624 # Have to avoid undoing the write_extra_pkgs(global_variants...)
625 if bb.data.inherits_class('allarch', d) and not d.getVar('MULTILIB_VARIANTS') \
626 and data[key] == basepkg:
627 return pkg
628 if depversions == []:
629 # Avoid returning a mapping if the renamed package rprovides its original name
630 rprovkey = "RPROVIDES_%s" % pkg
631 if rprovkey in data:
632 if pkg in bb.utils.explode_dep_versions2(data[rprovkey]):
633 bb.note("%s rprovides %s, not replacing the latter" % (data[key], pkg))
634 return pkg
635 # Do map to rewritten package name
636 return data[key]
637
638 return pkg
639
640def get_package_additional_metadata (pkg_type, d):
641 base_key = "PACKAGE_ADD_METADATA"
642 for key in ("%s_%s" % (base_key, pkg_type.upper()), base_key):
643 if d.getVar(key, False) is None:
644 continue
645 d.setVarFlag(key, "type", "list")
646 if d.getVarFlag(key, "separator") is None:
647 d.setVarFlag(key, "separator", "\\n")
648 metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)]
649 return "\n".join(metadata_fields).strip()
650
651def runtime_mapping_rename (varname, pkg, d):
652 #bb.note("%s before: %s" % (varname, d.getVar(varname)))
653
654 new_depends = {}
655 deps = bb.utils.explode_dep_versions2(d.getVar(varname) or "")
656 for depend, depversions in deps.items():
657 new_depend = get_package_mapping(depend, pkg, d, depversions)
658 if depend != new_depend:
659 bb.note("package name mapping done: %s -> %s" % (depend, new_depend))
660 new_depends[new_depend] = deps[depend]
661
662 d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
663
664 #bb.note("%s after: %s" % (varname, d.getVar(varname)))
665
666#
667# Used by do_packagedata (and possibly other routines post do_package)
668#
669
670package_get_auto_pr[vardepsexclude] = "BB_TASKDEPDATA"
671python package_get_auto_pr() {
672 import oe.prservice
673
674 def get_do_package_hash(pn):
675 if d.getVar("BB_RUNTASK") != "do_package":
676 taskdepdata = d.getVar("BB_TASKDEPDATA", False)
677 for dep in taskdepdata:
678 if taskdepdata[dep][1] == "do_package" and taskdepdata[dep][0] == pn:
679 return taskdepdata[dep][6]
680 return None
681
682 # Support per recipe PRSERV_HOST
683 pn = d.getVar('PN')
684 host = d.getVar("PRSERV_HOST_" + pn)
685 if not (host is None):
686 d.setVar("PRSERV_HOST", host)
687
688 pkgv = d.getVar("PKGV")
689
690 # PR Server not active, handle AUTOINC
691 if not d.getVar('PRSERV_HOST'):
692 d.setVar("PRSERV_PV_AUTOINC", "0")
693 return
694
695 auto_pr = None
696 pv = d.getVar("PV")
697 version = d.getVar("PRAUTOINX")
698 pkgarch = d.getVar("PACKAGE_ARCH")
699 checksum = get_do_package_hash(pn)
700
701 # If do_package isn't in the dependencies, we can't get the checksum...
702 if not checksum:
703 bb.warn('Task %s requested do_package unihash, but it was not available.' % d.getVar('BB_RUNTASK'))
704 #taskdepdata = d.getVar("BB_TASKDEPDATA", False)
705 #for dep in taskdepdata:
706 # bb.warn('%s:%s = %s' % (taskdepdata[dep][0], taskdepdata[dep][1], taskdepdata[dep][6]))
707 return
708
709 if d.getVar('PRSERV_LOCKDOWN'):
710 auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch) or d.getVar('PRAUTO_' + version) or None
711 if auto_pr is None:
712 bb.fatal("Can NOT get PRAUTO from lockdown exported file")
713 d.setVar('PRAUTO',str(auto_pr))
714 return
715
716 try:
717 conn = d.getVar("__PRSERV_CONN")
718 if conn is None:
719 conn = oe.prservice.prserv_make_conn(d)
720 if conn is not None:
721 if "AUTOINC" in pkgv:
722 srcpv = bb.fetch2.get_srcrev(d)
723 base_ver = "AUTOINC-%s" % version[:version.find(srcpv)]
724 value = conn.getPR(base_ver, pkgarch, srcpv)
725 d.setVar("PRSERV_PV_AUTOINC", str(value))
726
727 auto_pr = conn.getPR(version, pkgarch, checksum)
728 except Exception as e:
729 bb.fatal("Can NOT get PRAUTO, exception %s" % str(e))
730 if auto_pr is None:
731 bb.fatal("Can NOT get PRAUTO from remote PR service")
732 d.setVar('PRAUTO',str(auto_pr))
733}
734
735#
736# Package functions suitable for inclusion in PACKAGEFUNCS
737#
738
739python package_convert_pr_autoinc() {
740 pkgv = d.getVar("PKGV")
741
742 # Adjust pkgv as necessary...
743 if 'AUTOINC' in pkgv:
744 d.setVar("PKGV", pkgv.replace("AUTOINC", "${PRSERV_PV_AUTOINC}"))
745
746 # Change PRSERV_PV_AUTOINC and EXTENDPRAUTO usage to special values
747 d.setVar('PRSERV_PV_AUTOINC', '@PRSERV_PV_AUTOINC@')
748 d.setVar('EXTENDPRAUTO', '@EXTENDPRAUTO@')
749}
750
751LOCALEBASEPN ??= "${PN}"
752
753python package_do_split_locales() {
754 if (d.getVar('PACKAGE_NO_LOCALE') == '1'):
755 bb.debug(1, "package requested not splitting locales")
756 return
757
758 packages = (d.getVar('PACKAGES') or "").split()
759
760 datadir = d.getVar('datadir')
761 if not datadir:
762 bb.note("datadir not defined")
763 return
764
765 dvar = d.getVar('PKGD')
766 pn = d.getVar('LOCALEBASEPN')
767
768 if pn + '-locale' in packages:
769 packages.remove(pn + '-locale')
770
771 localedir = os.path.join(dvar + datadir, 'locale')
772
773 if not cpath.isdir(localedir):
774 bb.debug(1, "No locale files in this package")
775 return
776
777 locales = os.listdir(localedir)
778
779 summary = d.getVar('SUMMARY') or pn
780 description = d.getVar('DESCRIPTION') or ""
781 locale_section = d.getVar('LOCALE_SECTION')
782 mlprefix = d.getVar('MLPREFIX') or ""
783 for l in sorted(locales):
784 ln = legitimize_package_name(l)
785 pkg = pn + '-locale-' + ln
786 packages.append(pkg)
787 d.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l))
788 d.setVar('RRECOMMENDS_' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
789 d.setVar('RPROVIDES_' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
790 d.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l))
791 d.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l))
792 if locale_section:
793 d.setVar('SECTION_' + pkg, locale_section)
794
795 d.setVar('PACKAGES', ' '.join(packages))
796
797 # Disabled by RP 18/06/07
798 # Wildcards aren't supported in debian
799 # They break with ipkg since glibc-locale* will mean that
800 # glibc-localedata-translit* won't install as a dependency
801 # for some other package which breaks meta-toolchain
802 # Probably breaks since virtual-locale- isn't provided anywhere
803 #rdep = (d.getVar('RDEPENDS_%s' % pn) or "").split()
804 #rdep.append('%s-locale*' % pn)
805 #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep))
806}
807
808python perform_packagecopy () {
809 import subprocess
810 import shutil
811
812 dest = d.getVar('D')
813 dvar = d.getVar('PKGD')
814
815 # Remove ${D}/sysroot-only if present
816 sysroot_only = os.path.join(dest, 'sysroot-only')
817 if cpath.exists(sysroot_only) and cpath.isdir(sysroot_only):
818 shutil.rmtree(sysroot_only)
819
820 # Start by package population by taking a copy of the installed
821 # files to operate on
822 # Preserve sparse files and hard links
823 cmd = 'tar -cf - -C %s -p -S . | tar -xf - -C %s' % (dest, dvar)
824 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
825
826 # replace RPATHs for the nativesdk binaries, to make them relocatable
827 if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d):
828 rpath_replace (dvar, d)
829}
830perform_packagecopy[cleandirs] = "${PKGD}"
831perform_packagecopy[dirs] = "${PKGD}"
832
833# We generate a master list of directories to process, we start by
834# seeding this list with reasonable defaults, then load from
835# the fs-perms.txt files
836python fixup_perms () {
837 import pwd, grp
838
839 # init using a string with the same format as a line as documented in
840 # the fs-perms.txt file
841 # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
842 # <path> link <link target>
843 #
844 # __str__ can be used to print out an entry in the input format
845 #
846 # if fs_perms_entry.path is None:
847 # an error occurred
848 # if fs_perms_entry.link, you can retrieve:
849 # fs_perms_entry.path = path
850 # fs_perms_entry.link = target of link
851 # if not fs_perms_entry.link, you can retrieve:
852 # fs_perms_entry.path = path
853 # fs_perms_entry.mode = expected dir mode or None
854 # fs_perms_entry.uid = expected uid or -1
855 # fs_perms_entry.gid = expected gid or -1
856 # fs_perms_entry.walk = 'true' or something else
857 # fs_perms_entry.fmode = expected file mode or None
858 # fs_perms_entry.fuid = expected file uid or -1
859 # fs_perms_entry_fgid = expected file gid or -1
860 class fs_perms_entry():
861 def __init__(self, line):
862 lsplit = line.split()
863 if len(lsplit) == 3 and lsplit[1].lower() == "link":
864 self._setlink(lsplit[0], lsplit[2])
865 elif len(lsplit) == 8:
866 self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
867 else:
868 msg = "Fixup Perms: invalid config line %s" % line
869 package_qa_handle_error("perm-config", msg, d)
870 self.path = None
871 self.link = None
872
873 def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
874 self.path = os.path.normpath(path)
875 self.link = None
876 self.mode = self._procmode(mode)
877 self.uid = self._procuid(uid)
878 self.gid = self._procgid(gid)
879 self.walk = walk.lower()
880 self.fmode = self._procmode(fmode)
881 self.fuid = self._procuid(fuid)
882 self.fgid = self._procgid(fgid)
883
884 def _setlink(self, path, link):
885 self.path = os.path.normpath(path)
886 self.link = link
887
888 def _procmode(self, mode):
889 if not mode or (mode and mode == "-"):
890 return None
891 else:
892 return int(mode,8)
893
894 # Note uid/gid -1 has special significance in os.lchown
895 def _procuid(self, uid):
896 if uid is None or uid == "-":
897 return -1
898 elif uid.isdigit():
899 return int(uid)
900 else:
901 return pwd.getpwnam(uid).pw_uid
902
903 def _procgid(self, gid):
904 if gid is None or gid == "-":
905 return -1
906 elif gid.isdigit():
907 return int(gid)
908 else:
909 return grp.getgrnam(gid).gr_gid
910
911 # Use for debugging the entries
912 def __str__(self):
913 if self.link:
914 return "%s link %s" % (self.path, self.link)
915 else:
916 mode = "-"
917 if self.mode:
918 mode = "0%o" % self.mode
919 fmode = "-"
920 if self.fmode:
921 fmode = "0%o" % self.fmode
922 uid = self._mapugid(self.uid)
923 gid = self._mapugid(self.gid)
924 fuid = self._mapugid(self.fuid)
925 fgid = self._mapugid(self.fgid)
926 return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
927
928 def _mapugid(self, id):
929 if id is None or id == -1:
930 return "-"
931 else:
932 return "%d" % id
933
934 # Fix the permission, owner and group of path
935 def fix_perms(path, mode, uid, gid, dir):
936 if mode and not os.path.islink(path):
937 #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
938 os.chmod(path, mode)
939 # -1 is a special value that means don't change the uid/gid
940 # if they are BOTH -1, don't bother to lchown
941 if not (uid == -1 and gid == -1):
942 #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
943 os.lchown(path, uid, gid)
944
945 # Return a list of configuration files based on either the default
946 # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
947 # paths are resolved via BBPATH
948 def get_fs_perms_list(d):
949 str = ""
950 bbpath = d.getVar('BBPATH')
951 fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES') or ""
952 for conf_file in fs_perms_tables.split():
953 confpath = bb.utils.which(bbpath, conf_file)
954 if confpath:
955 str += " %s" % bb.utils.which(bbpath, conf_file)
956 else:
957 bb.warn("cannot find %s specified in FILESYSTEM_PERMS_TABLES" % conf_file)
958 return str
959
960
961
962 dvar = d.getVar('PKGD')
963
964 fs_perms_table = {}
965 fs_link_table = {}
966
967 # By default all of the standard directories specified in
968 # bitbake.conf will get 0755 root:root.
969 target_path_vars = [ 'base_prefix',
970 'prefix',
971 'exec_prefix',
972 'base_bindir',
973 'base_sbindir',
974 'base_libdir',
975 'datadir',
976 'sysconfdir',
977 'servicedir',
978 'sharedstatedir',
979 'localstatedir',
980 'infodir',
981 'mandir',
982 'docdir',
983 'bindir',
984 'sbindir',
985 'libexecdir',
986 'libdir',
987 'includedir',
988 'oldincludedir' ]
989
990 for path in target_path_vars:
991 dir = d.getVar(path) or ""
992 if dir == "":
993 continue
994 fs_perms_table[dir] = fs_perms_entry(d.expand("%s 0755 root root false - - -" % (dir)))
995
996 # Now we actually load from the configuration files
997 for conf in get_fs_perms_list(d).split():
998 if not os.path.exists(conf):
999 continue
1000 with open(conf) as f:
1001 for line in f:
1002 if line.startswith('#'):
1003 continue
1004 lsplit = line.split()
1005 if len(lsplit) == 0:
1006 continue
1007 if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
1008 msg = "Fixup perms: %s invalid line: %s" % (conf, line)
1009 package_qa_handle_error("perm-line", msg, d)
1010 continue
1011 entry = fs_perms_entry(d.expand(line))
1012 if entry and entry.path:
1013 if entry.link:
1014 fs_link_table[entry.path] = entry
1015 if entry.path in fs_perms_table:
1016 fs_perms_table.pop(entry.path)
1017 else:
1018 fs_perms_table[entry.path] = entry
1019 if entry.path in fs_link_table:
1020 fs_link_table.pop(entry.path)
1021
1022 # Debug -- list out in-memory table
1023 #for dir in fs_perms_table:
1024 # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
1025 #for link in fs_link_table:
1026 # bb.note("Fixup Perms: %s: %s" % (link, str(fs_link_table[link])))
1027
1028 # We process links first, so we can go back and fixup directory ownership
1029 # for any newly created directories
1030 # Process in sorted order so /run gets created before /run/lock, etc.
1031 for entry in sorted(fs_link_table.values(), key=lambda x: x.link):
1032 link = entry.link
1033 dir = entry.path
1034 origin = dvar + dir
1035 if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
1036 continue
1037
1038 if link[0] == "/":
1039 target = dvar + link
1040 ptarget = link
1041 else:
1042 target = os.path.join(os.path.dirname(origin), link)
1043 ptarget = os.path.join(os.path.dirname(dir), link)
1044 if os.path.exists(target):
1045 msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
1046 package_qa_handle_error("perm-link", msg, d)
1047 continue
1048
1049 # Create path to move directory to, move it, and then setup the symlink
1050 bb.utils.mkdirhier(os.path.dirname(target))
1051 #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
1052 os.rename(origin, target)
1053 #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
1054 os.symlink(link, origin)
1055
1056 for dir in fs_perms_table:
1057 origin = dvar + dir
1058 if not (cpath.exists(origin) and cpath.isdir(origin)):
1059 continue
1060
1061 fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
1062
1063 if fs_perms_table[dir].walk == 'true':
1064 for root, dirs, files in os.walk(origin):
1065 for dr in dirs:
1066 each_dir = os.path.join(root, dr)
1067 fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
1068 for f in files:
1069 each_file = os.path.join(root, f)
1070 fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
1071}
1072
1073python split_and_strip_files () {
1074 import stat, errno
1075 import subprocess
1076
1077 dvar = d.getVar('PKGD')
1078 pn = d.getVar('PN')
1079 hostos = d.getVar('HOST_OS')
1080
1081 oldcwd = os.getcwd()
1082 os.chdir(dvar)
1083
1084 # We default to '.debug' style
1085 if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory':
1086 # Single debug-file-directory style debug info
1087 debugappend = ".debug"
1088 debugstaticappend = ""
1089 debugdir = ""
1090 debugstaticdir = ""
1091 debuglibdir = "/usr/lib/debug"
1092 debugstaticlibdir = "/usr/lib/debug-static"
1093 debugsrcdir = "/usr/src/debug"
1094 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src':
1095 # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
1096 debugappend = ""
1097 debugstaticappend = ""
1098 debugdir = "/.debug"
1099 debugstaticdir = "/.debug-static"
1100 debuglibdir = ""
1101 debugstaticlibdir = ""
1102 debugsrcdir = ""
1103 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg':
1104 debugappend = ""
1105 debugstaticappend = ""
1106 debugdir = "/.debug"
1107 debugstaticdir = "/.debug-static"
1108 debuglibdir = ""
1109 debugstaticlibdir = ""
1110 debugsrcdir = "/usr/src/debug"
1111 else:
1112 # Original OE-core, a.k.a. ".debug", style debug info
1113 debugappend = ""
1114 debugstaticappend = ""
1115 debugdir = "/.debug"
1116 debugstaticdir = "/.debug-static"
1117 debuglibdir = ""
1118 debugstaticlibdir = ""
1119 debugsrcdir = "/usr/src/debug"
1120
1121 #
1122 # First lets figure out all of the files we may have to process ... do this only once!
1123 #
1124 elffiles = {}
1125 symlinks = {}
1126 kernmods = []
1127 staticlibs = []
1128 inodes = {}
1129 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
1130 baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
1131 skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split()
1132 if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \
1133 d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
1134 checkelf = {}
1135 checkelflinks = {}
1136 for root, dirs, files in cpath.walk(dvar):
1137 for f in files:
1138 file = os.path.join(root, f)
1139
1140 # Skip debug files
1141 if debugappend and file.endswith(debugappend):
1142 continue
1143 if debugdir and debugdir in os.path.dirname(file[len(dvar):]):
1144 continue
1145
1146 if file in skipfiles:
1147 continue
1148
1149 if file.endswith(".ko") and file.find("/lib/modules/") != -1:
1150 kernmods.append(file)
1151 continue
1152 if oe.package.is_static_lib(file):
1153 staticlibs.append(file)
1154 continue
1155
1156 try:
1157 ltarget = cpath.realpath(file, dvar, False)
1158 s = cpath.lstat(ltarget)
1159 except OSError as e:
1160 (err, strerror) = e.args
1161 if err != errno.ENOENT:
1162 raise
1163 # Skip broken symlinks
1164 continue
1165 if not s:
1166 continue
1167 # Check its an executable
1168 if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) or (s[stat.ST_MODE] & stat.S_IXOTH) \
1169 or ((file.startswith(libdir) or file.startswith(baselibdir)) and (".so" in f or ".node" in f)):
1170
1171 if cpath.islink(file):
1172 checkelflinks[file] = ltarget
1173 continue
1174 # Use a reference of device ID and inode number to identify files
1175 file_reference = "%d_%d" % (s.st_dev, s.st_ino)
1176 checkelf[file] = (file, file_reference)
1177
1178 results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d)
1179 results_map = {}
1180 for (ltarget, elf_file) in results:
1181 results_map[ltarget] = elf_file
1182 for file in checkelflinks:
1183 ltarget = checkelflinks[file]
1184 # If it's a symlink, and points to an ELF file, we capture the readlink target
1185 if results_map[ltarget]:
1186 target = os.readlink(file)
1187 #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget]))
1188 symlinks[file] = target
1189
1190 results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d)
1191
1192 # Sort results by file path. This ensures that the files are always
1193 # processed in the same order, which is important to make sure builds
1194 # are reproducible when dealing with hardlinks
1195 results.sort(key=lambda x: x[0])
1196
1197 for (file, elf_file) in results:
1198 # It's a file (or hardlink), not a link
1199 # ...but is it ELF, and is it already stripped?
1200 if elf_file & 1:
1201 if elf_file & 2:
1202 if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn) or "").split():
1203 bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
1204 else:
1205 msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
1206 package_qa_handle_error("already-stripped", msg, d)
1207 continue
1208
1209 # At this point we have an unstripped elf file. We need to:
1210 # a) Make sure any file we strip is not hardlinked to anything else outside this tree
1211 # b) Only strip any hardlinked file once (no races)
1212 # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks
1213
1214 # Use a reference of device ID and inode number to identify files
1215 file_reference = checkelf[file][1]
1216 if file_reference in inodes:
1217 os.unlink(file)
1218 os.link(inodes[file_reference][0], file)
1219 inodes[file_reference].append(file)
1220 else:
1221 inodes[file_reference] = [file]
1222 # break hardlink
1223 bb.utils.break_hardlinks(file)
1224 elffiles[file] = elf_file
1225 # Modified the file so clear the cache
1226 cpath.updatecache(file)
1227
1228 #
1229 # First lets process debug splitting
1230 #
1231 if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
1232 results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, debugdir, debuglibdir, debugappend, debugsrcdir, d))
1233
1234 if debugsrcdir and not hostos.startswith("mingw"):
1235 if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1236 results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, debugstaticdir, debugstaticlibdir, debugstaticappend, debugsrcdir, d))
1237 else:
1238 for file in staticlibs:
1239 results.append( (file,source_info(file, d)) )
1240
1241 sources = set()
1242 for r in results:
1243 sources.update(r[1])
1244
1245 # Hardlink our debug symbols to the other hardlink copies
1246 for ref in inodes:
1247 if len(inodes[ref]) == 1:
1248 continue
1249
1250 target = inodes[ref][0][len(dvar):]
1251 for file in inodes[ref][1:]:
1252 src = file[len(dvar):]
1253 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(target) + debugappend
1254 fpath = dvar + dest
1255 ftarget = dvar + debuglibdir + os.path.dirname(target) + debugdir + "/" + os.path.basename(target) + debugappend
1256 bb.utils.mkdirhier(os.path.dirname(fpath))
1257 # Only one hardlink of separated debug info file in each directory
1258 if not os.access(fpath, os.R_OK):
1259 #bb.note("Link %s -> %s" % (fpath, ftarget))
1260 os.link(ftarget, fpath)
1261
1262 # Create symlinks for all cases we were able to split symbols
1263 for file in symlinks:
1264 src = file[len(dvar):]
1265 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
1266 fpath = dvar + dest
1267 # Skip it if the target doesn't exist
1268 try:
1269 s = os.stat(fpath)
1270 except OSError as e:
1271 (err, strerror) = e.args
1272 if err != errno.ENOENT:
1273 raise
1274 continue
1275
1276 ltarget = symlinks[file]
1277 lpath = os.path.dirname(ltarget)
1278 lbase = os.path.basename(ltarget)
1279 ftarget = ""
1280 if lpath and lpath != ".":
1281 ftarget += lpath + debugdir + "/"
1282 ftarget += lbase + debugappend
1283 if lpath.startswith(".."):
1284 ftarget = os.path.join("..", ftarget)
1285 bb.utils.mkdirhier(os.path.dirname(fpath))
1286 #bb.note("Symlink %s -> %s" % (fpath, ftarget))
1287 os.symlink(ftarget, fpath)
1288
1289 # Process the debugsrcdir if requested...
1290 # This copies and places the referenced sources for later debugging...
1291 copydebugsources(debugsrcdir, sources, d)
1292 #
1293 # End of debug splitting
1294 #
1295
1296 #
1297 # Now lets go back over things and strip them
1298 #
1299 if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'):
1300 strip = d.getVar("STRIP")
1301 sfiles = []
1302 for file in elffiles:
1303 elf_file = int(elffiles[file])
1304 #bb.note("Strip %s" % file)
1305 sfiles.append((file, elf_file, strip))
1306 for f in kernmods:
1307 sfiles.append((f, 16, strip))
1308 if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1309 for f in staticlibs:
1310 sfiles.append((f, 16, strip))
1311
1312 oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d)
1313
1314 # Build "minidebuginfo" and reinject it back into the stripped binaries
1315 if d.getVar('PACKAGE_MINIDEBUGINFO') == '1':
1316 oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d,
1317 extraargs=(dvar, debugdir, debuglibdir, debugappend, debugsrcdir, d))
1318
1319 #
1320 # End of strip
1321 #
1322 os.chdir(oldcwd)
1323}
1324
1325python populate_packages () {
1326 import glob, re
1327
1328 workdir = d.getVar('WORKDIR')
1329 outdir = d.getVar('DEPLOY_DIR')
1330 dvar = d.getVar('PKGD')
1331 packages = d.getVar('PACKAGES').split()
1332 pn = d.getVar('PN')
1333
1334 bb.utils.mkdirhier(outdir)
1335 os.chdir(dvar)
1336
1337 autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False)
1338
1339 split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg')
1340
1341 # If debug-with-srcpkg mode is enabled then add the source package if it
1342 # doesn't exist and add the source file contents to the source package.
1343 if split_source_package:
1344 src_package_name = ('%s-src' % d.getVar('PN'))
1345 if not src_package_name in packages:
1346 packages.append(src_package_name)
1347 d.setVar('FILES_%s' % src_package_name, '/usr/src/debug')
1348
1349 # Sanity check PACKAGES for duplicates
1350 # Sanity should be moved to sanity.bbclass once we have the infrastructure
1351 package_dict = {}
1352
1353 for i, pkg in enumerate(packages):
1354 if pkg in package_dict:
1355 msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
1356 package_qa_handle_error("packages-list", msg, d)
1357 # Ensure the source package gets the chance to pick up the source files
1358 # before the debug package by ordering it first in PACKAGES. Whether it
1359 # actually picks up any source files is controlled by
1360 # PACKAGE_DEBUG_SPLIT_STYLE.
1361 elif pkg.endswith("-src"):
1362 package_dict[pkg] = (10, i)
1363 elif autodebug and pkg.endswith("-dbg"):
1364 package_dict[pkg] = (30, i)
1365 else:
1366 package_dict[pkg] = (50, i)
1367 packages = sorted(package_dict.keys(), key=package_dict.get)
1368 d.setVar('PACKAGES', ' '.join(packages))
1369 pkgdest = d.getVar('PKGDEST')
1370
1371 seen = []
1372
1373 # os.mkdir masks the permissions with umask so we have to unset it first
1374 oldumask = os.umask(0)
1375
1376 debug = []
1377 for root, dirs, files in cpath.walk(dvar):
1378 dir = root[len(dvar):]
1379 if not dir:
1380 dir = os.sep
1381 for f in (files + dirs):
1382 path = "." + os.path.join(dir, f)
1383 if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"):
1384 debug.append(path)
1385
1386 for pkg in packages:
1387 root = os.path.join(pkgdest, pkg)
1388 bb.utils.mkdirhier(root)
1389
1390 filesvar = d.getVar('FILES_%s' % pkg) or ""
1391 if "//" in filesvar:
1392 msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
1393 package_qa_handle_error("files-invalid", msg, d)
1394 filesvar.replace("//", "/")
1395
1396 origfiles = filesvar.split()
1397 files, symlink_paths = files_from_filevars(origfiles)
1398
1399 if autodebug and pkg.endswith("-dbg"):
1400 files.extend(debug)
1401
1402 for file in files:
1403 if (not cpath.islink(file)) and (not cpath.exists(file)):
1404 continue
1405 if file in seen:
1406 continue
1407 seen.append(file)
1408
1409 def mkdir(src, dest, p):
1410 src = os.path.join(src, p)
1411 dest = os.path.join(dest, p)
1412 fstat = cpath.stat(src)
1413 os.mkdir(dest)
1414 os.chmod(dest, fstat.st_mode)
1415 os.chown(dest, fstat.st_uid, fstat.st_gid)
1416 if p not in seen:
1417 seen.append(p)
1418 cpath.updatecache(dest)
1419
1420 def mkdir_recurse(src, dest, paths):
1421 if cpath.exists(dest + '/' + paths):
1422 return
1423 while paths.startswith("./"):
1424 paths = paths[2:]
1425 p = "."
1426 for c in paths.split("/"):
1427 p = os.path.join(p, c)
1428 if not cpath.exists(os.path.join(dest, p)):
1429 mkdir(src, dest, p)
1430
1431 if cpath.isdir(file) and not cpath.islink(file):
1432 mkdir_recurse(dvar, root, file)
1433 continue
1434
1435 mkdir_recurse(dvar, root, os.path.dirname(file))
1436 fpath = os.path.join(root,file)
1437 if not cpath.islink(file):
1438 os.link(file, fpath)
1439 continue
1440 ret = bb.utils.copyfile(file, fpath)
1441 if ret is False or ret == 0:
1442 bb.fatal("File population failed")
1443
1444 # Check if symlink paths exist
1445 for file in symlink_paths:
1446 if not os.path.exists(os.path.join(root,file)):
1447 bb.fatal("File '%s' cannot be packaged into '%s' because its "
1448 "parent directory structure does not exist. One of "
1449 "its parent directories is a symlink whose target "
1450 "directory is not included in the package." %
1451 (file, pkg))
1452
1453 os.umask(oldumask)
1454 os.chdir(workdir)
1455
1456 # Handle LICENSE_EXCLUSION
1457 package_list = []
1458 for pkg in packages:
1459 licenses = d.getVar('LICENSE_EXCLUSION-' + pkg)
1460 if licenses:
1461 msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses)
1462 package_qa_handle_error("incompatible-license", msg, d)
1463 else:
1464 package_list.append(pkg)
1465 d.setVar('PACKAGES', ' '.join(package_list))
1466
1467 unshipped = []
1468 for root, dirs, files in cpath.walk(dvar):
1469 dir = root[len(dvar):]
1470 if not dir:
1471 dir = os.sep
1472 for f in (files + dirs):
1473 path = os.path.join(dir, f)
1474 if ('.' + path) not in seen:
1475 unshipped.append(path)
1476
1477 if unshipped != []:
1478 msg = pn + ": Files/directories were installed but not shipped in any package:"
1479 if "installed-vs-shipped" in (d.getVar('INSANE_SKIP_' + pn) or "").split():
1480 bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
1481 else:
1482 for f in unshipped:
1483 msg = msg + "\n " + f
1484 msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n"
1485 msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped))
1486 package_qa_handle_error("installed-vs-shipped", msg, d)
1487}
1488populate_packages[dirs] = "${D}"
1489
1490python package_fixsymlinks () {
1491 import errno
1492 pkgdest = d.getVar('PKGDEST')
1493 packages = d.getVar("PACKAGES", False).split()
1494
1495 dangling_links = {}
1496 pkg_files = {}
1497 for pkg in packages:
1498 dangling_links[pkg] = []
1499 pkg_files[pkg] = []
1500 inst_root = os.path.join(pkgdest, pkg)
1501 for path in pkgfiles[pkg]:
1502 rpath = path[len(inst_root):]
1503 pkg_files[pkg].append(rpath)
1504 rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
1505 if not cpath.lexists(rtarget):
1506 dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
1507
1508 newrdepends = {}
1509 for pkg in dangling_links:
1510 for l in dangling_links[pkg]:
1511 found = False
1512 bb.debug(1, "%s contains dangling link %s" % (pkg, l))
1513 for p in packages:
1514 if l in pkg_files[p]:
1515 found = True
1516 bb.debug(1, "target found in %s" % p)
1517 if p == pkg:
1518 break
1519 if pkg not in newrdepends:
1520 newrdepends[pkg] = []
1521 newrdepends[pkg].append(p)
1522 break
1523 if found == False:
1524 bb.note("%s contains dangling symlink to %s" % (pkg, l))
1525
1526 for pkg in newrdepends:
1527 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg) or "")
1528 for p in newrdepends[pkg]:
1529 if p not in rdepends:
1530 rdepends[p] = []
1531 d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1532}
1533
1534
1535python package_package_name_hook() {
1536 """
1537 A package_name_hook function can be used to rewrite the package names by
1538 changing PKG. For an example, see debian.bbclass.
1539 """
1540 pass
1541}
1542
1543EXPORT_FUNCTIONS package_name_hook
1544
1545
1546PKGDESTWORK = "${WORKDIR}/pkgdata"
1547
1548PKGDATA_VARS = "PN PE PV PR PKGE PKGV PKGR LICENSE DESCRIPTION SUMMARY RDEPENDS RPROVIDES RRECOMMENDS RSUGGESTS RREPLACES RCONFLICTS SECTION PKG ALLOW_EMPTY FILES CONFFILES FILES_INFO PACKAGE_ADD_METADATA pkg_postinst pkg_postrm pkg_preinst pkg_prerm"
1549
1550python emit_pkgdata() {
1551 from glob import glob
1552 import json
1553
1554 def process_postinst_on_target(pkg, mlprefix):
1555 pkgval = d.getVar('PKG_%s' % pkg)
1556 if pkgval is None:
1557 pkgval = pkg
1558
1559 defer_fragment = """
1560if [ -n "$D" ]; then
1561 $INTERCEPT_DIR/postinst_intercept delay_to_first_boot %s mlprefix=%s
1562 exit 0
1563fi
1564""" % (pkgval, mlprefix)
1565
1566 postinst = d.getVar('pkg_postinst_%s' % pkg)
1567 postinst_ontarget = d.getVar('pkg_postinst_ontarget_%s' % pkg)
1568
1569 if postinst_ontarget:
1570 bb.debug(1, 'adding deferred pkg_postinst_ontarget() to pkg_postinst() for %s' % pkg)
1571 if not postinst:
1572 postinst = '#!/bin/sh\n'
1573 postinst += defer_fragment
1574 postinst += postinst_ontarget
1575 d.setVar('pkg_postinst_%s' % pkg, postinst)
1576
1577 def add_set_e_to_scriptlets(pkg):
1578 for scriptlet_name in ('pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm'):
1579 scriptlet = d.getVar('%s_%s' % (scriptlet_name, pkg))
1580 if scriptlet:
1581 scriptlet_split = scriptlet.split('\n')
1582 if scriptlet_split[0].startswith("#!"):
1583 scriptlet = scriptlet_split[0] + "\nset -e\n" + "\n".join(scriptlet_split[1:])
1584 else:
1585 scriptlet = "set -e\n" + "\n".join(scriptlet_split[0:])
1586 d.setVar('%s_%s' % (scriptlet_name, pkg), scriptlet)
1587
1588 def write_if_exists(f, pkg, var):
1589 def encode(str):
1590 import codecs
1591 c = codecs.getencoder("unicode_escape")
1592 return c(str)[0].decode("latin1")
1593
1594 val = d.getVar('%s_%s' % (var, pkg))
1595 if val:
1596 f.write('%s_%s: %s\n' % (var, pkg, encode(val)))
1597 return val
1598 val = d.getVar('%s' % (var))
1599 if val:
1600 f.write('%s: %s\n' % (var, encode(val)))
1601 return val
1602
1603 def write_extra_pkgs(variants, pn, packages, pkgdatadir):
1604 for variant in variants:
1605 with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd:
1606 fd.write("PACKAGES: %s\n" % ' '.join(
1607 map(lambda pkg: '%s-%s' % (variant, pkg), packages.split())))
1608
1609 def write_extra_runtime_pkgs(variants, packages, pkgdatadir):
1610 for variant in variants:
1611 for pkg in packages.split():
1612 ml_pkg = "%s-%s" % (variant, pkg)
1613 subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg)
1614 with open(subdata_file, 'w') as fd:
1615 fd.write("PKG_%s: %s" % (ml_pkg, pkg))
1616
1617 packages = d.getVar('PACKAGES')
1618 pkgdest = d.getVar('PKGDEST')
1619 pkgdatadir = d.getVar('PKGDESTWORK')
1620
1621 data_file = pkgdatadir + d.expand("/${PN}")
1622 with open(data_file, 'w') as fd:
1623 fd.write("PACKAGES: %s\n" % packages)
1624
1625 pn = d.getVar('PN')
1626 global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split()
1627 variants = (d.getVar('MULTILIB_VARIANTS') or "").split()
1628
1629 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
1630 write_extra_pkgs(variants, pn, packages, pkgdatadir)
1631
1632 if bb.data.inherits_class('allarch', d) and not variants \
1633 and not bb.data.inherits_class('packagegroup', d):
1634 write_extra_pkgs(global_variants, pn, packages, pkgdatadir)
1635
1636 workdir = d.getVar('WORKDIR')
1637
1638 for pkg in packages.split():
1639 pkgval = d.getVar('PKG_%s' % pkg)
1640 if pkgval is None:
1641 pkgval = pkg
1642 d.setVar('PKG_%s' % pkg, pkg)
1643
1644 pkgdestpkg = os.path.join(pkgdest, pkg)
1645 files = {}
1646 total_size = 0
1647 seen = set()
1648 for f in pkgfiles[pkg]:
1649 relpth = os.path.relpath(f, pkgdestpkg)
1650 fstat = os.lstat(f)
1651 files[os.sep + relpth] = fstat.st_size
1652 if fstat.st_ino not in seen:
1653 seen.add(fstat.st_ino)
1654 total_size += fstat.st_size
1655 d.setVar('FILES_INFO', json.dumps(files, sort_keys=True))
1656
1657 process_postinst_on_target(pkg, d.getVar("MLPREFIX"))
1658 add_set_e_to_scriptlets(pkg)
1659
1660 subdata_file = pkgdatadir + "/runtime/%s" % pkg
1661 with open(subdata_file, 'w') as sf:
1662 for var in (d.getVar('PKGDATA_VARS') or "").split():
1663 val = write_if_exists(sf, pkg, var)
1664
1665 write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
1666 for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg) or "").split():
1667 write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile)
1668
1669 write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
1670 for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg) or "").split():
1671 write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile)
1672
1673 sf.write('%s_%s: %d\n' % ('PKGSIZE', pkg, total_size))
1674
1675 # Symlinks needed for rprovides lookup
1676 rprov = d.getVar('RPROVIDES_%s' % pkg) or d.getVar('RPROVIDES')
1677 if rprov:
1678 for p in bb.utils.explode_deps(rprov):
1679 subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg)
1680 bb.utils.mkdirhier(os.path.dirname(subdata_sym))
1681 oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True)
1682
1683 allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg)
1684 if not allow_empty:
1685 allow_empty = d.getVar('ALLOW_EMPTY')
1686 root = "%s/%s" % (pkgdest, pkg)
1687 os.chdir(root)
1688 g = glob('*')
1689 if g or allow_empty == "1":
1690 # Symlinks needed for reverse lookups (from the final package name)
1691 subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval
1692 oe.path.symlink("../runtime/%s" % pkg, subdata_sym, True)
1693
1694 packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
1695 open(packagedfile, 'w').close()
1696
1697 if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
1698 write_extra_runtime_pkgs(variants, packages, pkgdatadir)
1699
1700 if bb.data.inherits_class('allarch', d) and not variants \
1701 and not bb.data.inherits_class('packagegroup', d):
1702 write_extra_runtime_pkgs(global_variants, packages, pkgdatadir)
1703
1704}
1705emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime ${PKGDESTWORK}/runtime-reverse ${PKGDESTWORK}/runtime-rprovides"
1706
1707ldconfig_postinst_fragment() {
1708if [ x"$D" = "x" ]; then
1709 if [ -x /sbin/ldconfig ]; then /sbin/ldconfig ; fi
1710fi
1711}
1712
1713RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/rpmdeps --alldeps --define '__font_provides %{nil}'"
1714
1715# Collect perfile run-time dependency metadata
1716# Output:
1717# FILERPROVIDESFLIST_pkg - list of all files w/ deps
1718# FILERPROVIDES_filepath_pkg - per file dep
1719#
1720# FILERDEPENDSFLIST_pkg - list of all files w/ deps
1721# FILERDEPENDS_filepath_pkg - per file dep
1722
1723python package_do_filedeps() {
1724 if d.getVar('SKIP_FILEDEPS') == '1':
1725 return
1726
1727 pkgdest = d.getVar('PKGDEST')
1728 packages = d.getVar('PACKAGES')
1729 rpmdeps = d.getVar('RPMDEPS')
1730
1731 def chunks(files, n):
1732 return [files[i:i+n] for i in range(0, len(files), n)]
1733
1734 pkglist = []
1735 for pkg in packages.split():
1736 if d.getVar('SKIP_FILEDEPS_' + pkg) == '1':
1737 continue
1738 if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'):
1739 continue
1740 for files in chunks(pkgfiles[pkg], 100):
1741 pkglist.append((pkg, files, rpmdeps, pkgdest))
1742
1743 processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d)
1744
1745 provides_files = {}
1746 requires_files = {}
1747
1748 for result in processed:
1749 (pkg, provides, requires) = result
1750
1751 if pkg not in provides_files:
1752 provides_files[pkg] = []
1753 if pkg not in requires_files:
1754 requires_files[pkg] = []
1755
1756 for file in sorted(provides):
1757 provides_files[pkg].append(file)
1758 key = "FILERPROVIDES_" + file + "_" + pkg
1759 d.appendVar(key, " " + " ".join(provides[file]))
1760
1761 for file in sorted(requires):
1762 requires_files[pkg].append(file)
1763 key = "FILERDEPENDS_" + file + "_" + pkg
1764 d.appendVar(key, " " + " ".join(requires[file]))
1765
1766 for pkg in requires_files:
1767 d.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files[pkg]))
1768 for pkg in provides_files:
1769 d.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files[pkg]))
1770}
1771
1772SHLIBSDIRS = "${WORKDIR_PKGDATA}/${MLPREFIX}shlibs2"
1773SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs2"
1774
1775python package_do_shlibs() {
1776 import itertools
1777 import re, pipes
1778 import subprocess
1779
1780 exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False)
1781 if exclude_shlibs:
1782 bb.note("not generating shlibs")
1783 return
1784
1785 lib_re = re.compile(r"^.*\.so")
1786 libdir_re = re.compile(r".*/%s$" % d.getVar('baselib'))
1787
1788 packages = d.getVar('PACKAGES')
1789
1790 shlib_pkgs = []
1791 exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS")
1792 if exclusion_list:
1793 for pkg in packages.split():
1794 if pkg not in exclusion_list.split():
1795 shlib_pkgs.append(pkg)
1796 else:
1797 bb.note("not generating shlibs for %s" % pkg)
1798 else:
1799 shlib_pkgs = packages.split()
1800
1801 hostos = d.getVar('HOST_OS')
1802
1803 workdir = d.getVar('WORKDIR')
1804
1805 ver = d.getVar('PKGV')
1806 if not ver:
1807 msg = "PKGV not defined"
1808 package_qa_handle_error("pkgv-undefined", msg, d)
1809 return
1810
1811 pkgdest = d.getVar('PKGDEST')
1812
1813 shlibswork_dir = d.getVar('SHLIBSWORKDIR')
1814
1815 def linux_so(file, pkg, pkgver, d):
1816 needs_ldconfig = False
1817 needed = set()
1818 sonames = set()
1819 renames = []
1820 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1821 cmd = d.getVar('OBJDUMP') + " -p " + pipes.quote(file) + " 2>/dev/null"
1822 fd = os.popen(cmd)
1823 lines = fd.readlines()
1824 fd.close()
1825 rpath = tuple()
1826 for l in lines:
1827 m = re.match(r"\s+RPATH\s+([^\s]*)", l)
1828 if m:
1829 rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
1830 rpath = tuple(map(os.path.normpath, rpaths))
1831 for l in lines:
1832 m = re.match(r"\s+NEEDED\s+([^\s]*)", l)
1833 if m:
1834 dep = m.group(1)
1835 if dep not in needed:
1836 needed.add((dep, file, rpath))
1837 m = re.match(r"\s+SONAME\s+([^\s]*)", l)
1838 if m:
1839 this_soname = m.group(1)
1840 prov = (this_soname, ldir, pkgver)
1841 if not prov in sonames:
1842 # if library is private (only used by package) then do not build shlib for it
1843 import fnmatch
1844 if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0:
1845 sonames.add(prov)
1846 if libdir_re.match(os.path.dirname(file)):
1847 needs_ldconfig = True
1848 if snap_symlinks and (os.path.basename(file) != this_soname):
1849 renames.append((file, os.path.join(os.path.dirname(file), this_soname)))
1850 return (needs_ldconfig, needed, sonames, renames)
1851
1852 def darwin_so(file, needed, sonames, renames, pkgver):
1853 if not os.path.exists(file):
1854 return
1855 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1856
1857 def get_combinations(base):
1858 #
1859 # Given a base library name, find all combinations of this split by "." and "-"
1860 #
1861 combos = []
1862 options = base.split(".")
1863 for i in range(1, len(options) + 1):
1864 combos.append(".".join(options[0:i]))
1865 options = base.split("-")
1866 for i in range(1, len(options) + 1):
1867 combos.append("-".join(options[0:i]))
1868 return combos
1869
1870 if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'):
1871 # Drop suffix
1872 name = os.path.basename(file).rsplit(".",1)[0]
1873 # Find all combinations
1874 combos = get_combinations(name)
1875 for combo in combos:
1876 if not combo in sonames:
1877 prov = (combo, ldir, pkgver)
1878 sonames.add(prov)
1879 if file.endswith('.dylib') or file.endswith('.so'):
1880 rpath = []
1881 p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1882 out, err = p.communicate()
1883 # If returned successfully, process stdout for results
1884 if p.returncode == 0:
1885 for l in out.split("\n"):
1886 l = l.strip()
1887 if l.startswith('path '):
1888 rpath.append(l.split()[1])
1889
1890 p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1891 out, err = p.communicate()
1892 # If returned successfully, process stdout for results
1893 if p.returncode == 0:
1894 for l in out.split("\n"):
1895 l = l.strip()
1896 if not l or l.endswith(":"):
1897 continue
1898 if "is not an object file" in l:
1899 continue
1900 name = os.path.basename(l.split()[0]).rsplit(".", 1)[0]
1901 if name and name not in needed[pkg]:
1902 needed[pkg].add((name, file, tuple()))
1903
1904 def mingw_dll(file, needed, sonames, renames, pkgver):
1905 if not os.path.exists(file):
1906 return
1907
1908 if file.endswith(".dll"):
1909 # assume all dlls are shared objects provided by the package
1910 sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver))
1911
1912 if (file.endswith(".dll") or file.endswith(".exe")):
1913 # use objdump to search for "DLL Name: .*\.dll"
1914 p = subprocess.Popen([d.expand("${HOST_PREFIX}objdump"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1915 out, err = p.communicate()
1916 # process the output, grabbing all .dll names
1917 if p.returncode == 0:
1918 for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE):
1919 dllname = m.group(1)
1920 if dllname:
1921 needed[pkg].add((dllname, file, tuple()))
1922
1923 if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1":
1924 snap_symlinks = True
1925 else:
1926 snap_symlinks = False
1927
1928 needed = {}
1929
1930 shlib_provider = oe.package.read_shlib_providers(d)
1931
1932 for pkg in shlib_pkgs:
1933 private_libs = d.getVar('PRIVATE_LIBS_' + pkg) or d.getVar('PRIVATE_LIBS') or ""
1934 private_libs = private_libs.split()
1935 needs_ldconfig = False
1936 bb.debug(2, "calculating shlib provides for %s" % pkg)
1937
1938 pkgver = d.getVar('PKGV_' + pkg)
1939 if not pkgver:
1940 pkgver = d.getVar('PV_' + pkg)
1941 if not pkgver:
1942 pkgver = ver
1943
1944 needed[pkg] = set()
1945 sonames = set()
1946 renames = []
1947 linuxlist = []
1948 for file in pkgfiles[pkg]:
1949 soname = None
1950 if cpath.islink(file):
1951 continue
1952 if hostos == "darwin" or hostos == "darwin8":
1953 darwin_so(file, needed, sonames, renames, pkgver)
1954 elif hostos.startswith("mingw"):
1955 mingw_dll(file, needed, sonames, renames, pkgver)
1956 elif os.access(file, os.X_OK) or lib_re.match(file):
1957 linuxlist.append(file)
1958
1959 if linuxlist:
1960 results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d))
1961 for r in results:
1962 ldconfig = r[0]
1963 needed[pkg] |= r[1]
1964 sonames |= r[2]
1965 renames.extend(r[3])
1966 needs_ldconfig = needs_ldconfig or ldconfig
1967
1968 for (old, new) in renames:
1969 bb.note("Renaming %s to %s" % (old, new))
1970 os.rename(old, new)
1971 pkgfiles[pkg].remove(old)
1972
1973 shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
1974 if len(sonames):
1975 with open(shlibs_file, 'w') as fd:
1976 for s in sorted(sonames):
1977 if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]:
1978 (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]]
1979 if old_pkg != pkg:
1980 bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver))
1981 bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0]))
1982 fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n')
1983 if s[0] not in shlib_provider:
1984 shlib_provider[s[0]] = {}
1985 shlib_provider[s[0]][s[1]] = (pkg, pkgver)
1986 if needs_ldconfig:
1987 bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
1988 postinst = d.getVar('pkg_postinst_%s' % pkg)
1989 if not postinst:
1990 postinst = '#!/bin/sh\n'
1991 postinst += d.getVar('ldconfig_postinst_fragment')
1992 d.setVar('pkg_postinst_%s' % pkg, postinst)
1993 bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
1994
1995 assumed_libs = d.getVar('ASSUME_SHLIBS')
1996 if assumed_libs:
1997 libdir = d.getVar("libdir")
1998 for e in assumed_libs.split():
1999 l, dep_pkg = e.split(":")
2000 lib_ver = None
2001 dep_pkg = dep_pkg.rsplit("_", 1)
2002 if len(dep_pkg) == 2:
2003 lib_ver = dep_pkg[1]
2004 dep_pkg = dep_pkg[0]
2005 if l not in shlib_provider:
2006 shlib_provider[l] = {}
2007 shlib_provider[l][libdir] = (dep_pkg, lib_ver)
2008
2009 libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')]
2010
2011 for pkg in shlib_pkgs:
2012 bb.debug(2, "calculating shlib requirements for %s" % pkg)
2013
2014 private_libs = d.getVar('PRIVATE_LIBS_' + pkg) or d.getVar('PRIVATE_LIBS') or ""
2015 private_libs = private_libs.split()
2016
2017 deps = list()
2018 for n in needed[pkg]:
2019 # if n is in private libraries, don't try to search provider for it
2020 # this could cause problem in case some abc.bb provides private
2021 # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
2022 # but skipping it is still better alternative than providing own
2023 # version and then adding runtime dependency for the same system library
2024 import fnmatch
2025 if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0:
2026 bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
2027 continue
2028 if n[0] in shlib_provider.keys():
2029 shlib_provider_map = shlib_provider[n[0]]
2030 matches = set()
2031 for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath):
2032 if p in shlib_provider_map:
2033 matches.add(p)
2034 if len(matches) > 1:
2035 matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches])
2036 bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1]))
2037 elif len(matches) == 1:
2038 (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()]
2039
2040 bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1]))
2041
2042 if dep_pkg == pkg:
2043 continue
2044
2045 if ver_needed:
2046 dep = "%s (>= %s)" % (dep_pkg, ver_needed)
2047 else:
2048 dep = dep_pkg
2049 if not dep in deps:
2050 deps.append(dep)
2051 continue
2052 bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1]))
2053
2054 deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
2055 if os.path.exists(deps_file):
2056 os.remove(deps_file)
2057 if deps:
2058 with open(deps_file, 'w') as fd:
2059 for dep in sorted(deps):
2060 fd.write(dep + '\n')
2061}
2062
2063python package_do_pkgconfig () {
2064 import re
2065
2066 packages = d.getVar('PACKAGES')
2067 workdir = d.getVar('WORKDIR')
2068 pkgdest = d.getVar('PKGDEST')
2069
2070 shlibs_dirs = d.getVar('SHLIBSDIRS').split()
2071 shlibswork_dir = d.getVar('SHLIBSWORKDIR')
2072
2073 pc_re = re.compile(r'(.*)\.pc$')
2074 var_re = re.compile(r'(.*)=(.*)')
2075 field_re = re.compile(r'(.*): (.*)')
2076
2077 pkgconfig_provided = {}
2078 pkgconfig_needed = {}
2079 for pkg in packages.split():
2080 pkgconfig_provided[pkg] = []
2081 pkgconfig_needed[pkg] = []
2082 for file in pkgfiles[pkg]:
2083 m = pc_re.match(file)
2084 if m:
2085 pd = bb.data.init()
2086 name = m.group(1)
2087 pkgconfig_provided[pkg].append(name)
2088 if not os.access(file, os.R_OK):
2089 continue
2090 with open(file, 'r') as f:
2091 lines = f.readlines()
2092 for l in lines:
2093 m = var_re.match(l)
2094 if m:
2095 name = m.group(1)
2096 val = m.group(2)
2097 pd.setVar(name, pd.expand(val))
2098 continue
2099 m = field_re.match(l)
2100 if m:
2101 hdr = m.group(1)
2102 exp = pd.expand(m.group(2))
2103 if hdr == 'Requires':
2104 pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
2105
2106 for pkg in packages.split():
2107 pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
2108 if pkgconfig_provided[pkg] != []:
2109 with open(pkgs_file, 'w') as f:
2110 for p in pkgconfig_provided[pkg]:
2111 f.write('%s\n' % p)
2112
2113 # Go from least to most specific since the last one found wins
2114 for dir in reversed(shlibs_dirs):
2115 if not os.path.exists(dir):
2116 continue
2117 for file in sorted(os.listdir(dir)):
2118 m = re.match(r'^(.*)\.pclist$', file)
2119 if m:
2120 pkg = m.group(1)
2121 with open(os.path.join(dir, file)) as fd:
2122 lines = fd.readlines()
2123 pkgconfig_provided[pkg] = []
2124 for l in lines:
2125 pkgconfig_provided[pkg].append(l.rstrip())
2126
2127 for pkg in packages.split():
2128 deps = []
2129 for n in pkgconfig_needed[pkg]:
2130 found = False
2131 for k in pkgconfig_provided.keys():
2132 if n in pkgconfig_provided[k]:
2133 if k != pkg and not (k in deps):
2134 deps.append(k)
2135 found = True
2136 if found == False:
2137 bb.note("couldn't find pkgconfig module '%s' in any package" % n)
2138 deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
2139 if len(deps):
2140 with open(deps_file, 'w') as fd:
2141 for dep in deps:
2142 fd.write(dep + '\n')
2143}
2144
2145def read_libdep_files(d):
2146 pkglibdeps = {}
2147 packages = d.getVar('PACKAGES').split()
2148 for pkg in packages:
2149 pkglibdeps[pkg] = {}
2150 for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
2151 depsfile = d.expand("${PKGDEST}/" + pkg + extension)
2152 if os.access(depsfile, os.R_OK):
2153 with open(depsfile) as fd:
2154 lines = fd.readlines()
2155 for l in lines:
2156 l.rstrip()
2157 deps = bb.utils.explode_dep_versions2(l)
2158 for dep in deps:
2159 if not dep in pkglibdeps[pkg]:
2160 pkglibdeps[pkg][dep] = deps[dep]
2161 return pkglibdeps
2162
2163python read_shlibdeps () {
2164 pkglibdeps = read_libdep_files(d)
2165
2166 packages = d.getVar('PACKAGES').split()
2167 for pkg in packages:
2168 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg) or "")
2169 for dep in sorted(pkglibdeps[pkg]):
2170 # Add the dep if it's not already there, or if no comparison is set
2171 if dep not in rdepends:
2172 rdepends[dep] = []
2173 for v in pkglibdeps[pkg][dep]:
2174 if v not in rdepends[dep]:
2175 rdepends[dep].append(v)
2176 d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
2177}
2178
2179python package_depchains() {
2180 """
2181 For a given set of prefix and postfix modifiers, make those packages
2182 RRECOMMENDS on the corresponding packages for its RDEPENDS.
2183
2184 Example: If package A depends upon package B, and A's .bb emits an
2185 A-dev package, this would make A-dev Recommends: B-dev.
2186
2187 If only one of a given suffix is specified, it will take the RRECOMMENDS
2188 based on the RDEPENDS of *all* other packages. If more than one of a given
2189 suffix is specified, its will only use the RDEPENDS of the single parent
2190 package.
2191 """
2192
2193 packages = d.getVar('PACKAGES')
2194 postfixes = (d.getVar('DEPCHAIN_POST') or '').split()
2195 prefixes = (d.getVar('DEPCHAIN_PRE') or '').split()
2196
2197 def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
2198
2199 #bb.note('depends for %s is %s' % (base, depends))
2200 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg) or "")
2201
2202 for depend in sorted(depends):
2203 if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
2204 #bb.note("Skipping %s" % depend)
2205 continue
2206 if depend.endswith('-dev'):
2207 depend = depend[:-4]
2208 if depend.endswith('-dbg'):
2209 depend = depend[:-4]
2210 pkgname = getname(depend, suffix)
2211 #bb.note("Adding %s for %s" % (pkgname, depend))
2212 if pkgname not in rreclist and pkgname != pkg:
2213 rreclist[pkgname] = []
2214
2215 #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
2216 d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
2217
2218 def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
2219
2220 #bb.note('rdepends for %s is %s' % (base, rdepends))
2221 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg) or "")
2222
2223 for depend in sorted(rdepends):
2224 if depend.find('virtual-locale-') != -1:
2225 #bb.note("Skipping %s" % depend)
2226 continue
2227 if depend.endswith('-dev'):
2228 depend = depend[:-4]
2229 if depend.endswith('-dbg'):
2230 depend = depend[:-4]
2231 pkgname = getname(depend, suffix)
2232 #bb.note("Adding %s for %s" % (pkgname, depend))
2233 if pkgname not in rreclist and pkgname != pkg:
2234 rreclist[pkgname] = []
2235
2236 #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
2237 d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
2238
2239 def add_dep(list, dep):
2240 if dep not in list:
2241 list.append(dep)
2242
2243 depends = []
2244 for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""):
2245 add_dep(depends, dep)
2246
2247 rdepends = []
2248 for pkg in packages.split():
2249 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg) or ""):
2250 add_dep(rdepends, dep)
2251
2252 #bb.note('rdepends is %s' % rdepends)
2253
2254 def post_getname(name, suffix):
2255 return '%s%s' % (name, suffix)
2256 def pre_getname(name, suffix):
2257 return '%s%s' % (suffix, name)
2258
2259 pkgs = {}
2260 for pkg in packages.split():
2261 for postfix in postfixes:
2262 if pkg.endswith(postfix):
2263 if not postfix in pkgs:
2264 pkgs[postfix] = {}
2265 pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
2266
2267 for prefix in prefixes:
2268 if pkg.startswith(prefix):
2269 if not prefix in pkgs:
2270 pkgs[prefix] = {}
2271 pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
2272
2273 if "-dbg" in pkgs:
2274 pkglibdeps = read_libdep_files(d)
2275 pkglibdeplist = []
2276 for pkg in pkglibdeps:
2277 for k in pkglibdeps[pkg]:
2278 add_dep(pkglibdeplist, k)
2279 dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d)))
2280
2281 for suffix in pkgs:
2282 for pkg in pkgs[suffix]:
2283 if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs'):
2284 continue
2285 (base, func) = pkgs[suffix][pkg]
2286 if suffix == "-dev":
2287 pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
2288 elif suffix == "-dbg":
2289 if not dbgdefaultdeps:
2290 pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
2291 continue
2292 if len(pkgs[suffix]) == 1:
2293 pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
2294 else:
2295 rdeps = []
2296 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base) or ""):
2297 add_dep(rdeps, dep)
2298 pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
2299}
2300
2301# Since bitbake can't determine which variables are accessed during package
2302# iteration, we need to list them here:
2303PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm pkg_postinst_ontarget INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR USERADD_PARAM GROUPADD_PARAM CONFFILES SYSTEMD_SERVICE LICENSE SECTION pkg_preinst pkg_prerm RREPLACES GROUPMEMS_PARAM SYSTEMD_AUTO_ENABLE SKIP_FILEDEPS PRIVATE_LIBS PACKAGE_ADD_METADATA"
2304
2305def gen_packagevar(d, pkgvars="PACKAGEVARS"):
2306 ret = []
2307 pkgs = (d.getVar("PACKAGES") or "").split()
2308 vars = (d.getVar(pkgvars) or "").split()
2309 for v in vars:
2310 ret.append(v)
2311 for p in pkgs:
2312 for v in vars:
2313 ret.append(v + "_" + p)
2314
2315 # Ensure that changes to INCOMPATIBLE_LICENSE re-run do_package for
2316 # affected recipes.
2317 ret.append('LICENSE_EXCLUSION-%s' % p)
2318 return " ".join(ret)
2319
2320PACKAGE_PREPROCESS_FUNCS ?= ""
2321# Functions for setting up PKGD
2322PACKAGEBUILDPKGD ?= " \
2323 package_prepare_pkgdata \
2324 perform_packagecopy \
2325 ${PACKAGE_PREPROCESS_FUNCS} \
2326 split_and_strip_files \
2327 fixup_perms \
2328 "
2329# Functions which split PKGD up into separate packages
2330PACKAGESPLITFUNCS ?= " \
2331 package_do_split_locales \
2332 populate_packages"
2333# Functions which process metadata based on split packages
2334PACKAGEFUNCS += " \
2335 package_fixsymlinks \
2336 package_name_hook \
2337 package_do_filedeps \
2338 package_do_shlibs \
2339 package_do_pkgconfig \
2340 read_shlibdeps \
2341 package_depchains \
2342 emit_pkgdata"
2343
2344python do_package () {
2345 # Change the following version to cause sstate to invalidate the package
2346 # cache. This is useful if an item this class depends on changes in a
2347 # way that the output of this class changes. rpmdeps is a good example
2348 # as any change to rpmdeps requires this to be rerun.
2349 # PACKAGE_BBCLASS_VERSION = "4"
2350
2351 # Init cachedpath
2352 global cpath
2353 cpath = oe.cachedpath.CachedPath()
2354
2355 ###########################################################################
2356 # Sanity test the setup
2357 ###########################################################################
2358
2359 packages = (d.getVar('PACKAGES') or "").split()
2360 if len(packages) < 1:
2361 bb.debug(1, "No packages to build, skipping do_package")
2362 return
2363
2364 workdir = d.getVar('WORKDIR')
2365 outdir = d.getVar('DEPLOY_DIR')
2366 dest = d.getVar('D')
2367 dvar = d.getVar('PKGD')
2368 pn = d.getVar('PN')
2369
2370 if not workdir or not outdir or not dest or not dvar or not pn:
2371 msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package"
2372 package_qa_handle_error("var-undefined", msg, d)
2373 return
2374
2375 bb.build.exec_func("package_convert_pr_autoinc", d)
2376
2377 ###########################################################################
2378 # Optimisations
2379 ###########################################################################
2380
2381 # Continually expanding complex expressions is inefficient, particularly
2382 # when we write to the datastore and invalidate the expansion cache. This
2383 # code pre-expands some frequently used variables
2384
2385 def expandVar(x, d):
2386 d.setVar(x, d.getVar(x))
2387
2388 for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO':
2389 expandVar(x, d)
2390
2391 ###########################################################################
2392 # Setup PKGD (from D)
2393 ###########################################################################
2394
2395 for f in (d.getVar('PACKAGEBUILDPKGD') or '').split():
2396 bb.build.exec_func(f, d)
2397
2398 ###########################################################################
2399 # Split up PKGD into PKGDEST
2400 ###########################################################################
2401
2402 cpath = oe.cachedpath.CachedPath()
2403
2404 for f in (d.getVar('PACKAGESPLITFUNCS') or '').split():
2405 bb.build.exec_func(f, d)
2406
2407 ###########################################################################
2408 # Process PKGDEST
2409 ###########################################################################
2410
2411 # Build global list of files in each split package
2412 global pkgfiles
2413 pkgfiles = {}
2414 packages = d.getVar('PACKAGES').split()
2415 pkgdest = d.getVar('PKGDEST')
2416 for pkg in packages:
2417 pkgfiles[pkg] = []
2418 for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg):
2419 for file in files:
2420 pkgfiles[pkg].append(walkroot + os.sep + file)
2421
2422 for f in (d.getVar('PACKAGEFUNCS') or '').split():
2423 bb.build.exec_func(f, d)
2424
2425 qa_sane = d.getVar("QA_SANE")
2426 if not qa_sane:
2427 bb.fatal("Fatal QA errors found, failing task.")
2428}
2429
2430do_package[dirs] = "${SHLIBSWORKDIR} ${PKGDESTWORK} ${D}"
2431do_package[vardeps] += "${PACKAGEBUILDPKGD} ${PACKAGESPLITFUNCS} ${PACKAGEFUNCS} ${@gen_packagevar(d)}"
2432addtask package after do_install
2433
2434SSTATETASKS += "do_package"
2435do_package[cleandirs] = "${PKGDEST} ${PKGDESTWORK}"
2436do_package[sstate-plaindirs] = "${PKGD} ${PKGDEST} ${PKGDESTWORK}"
2437do_package_setscene[dirs] = "${STAGING_DIR}"
2438
2439python do_package_setscene () {
2440 sstate_setscene(d)
2441}
2442addtask do_package_setscene
2443
2444# Copy from PKGDESTWORK to tempdirectory as tempdirectory can be cleaned at both
2445# do_package_setscene and do_packagedata_setscene leading to races
2446python do_packagedata () {
2447 bb.build.exec_func("package_get_auto_pr", d)
2448
2449 src = d.expand("${PKGDESTWORK}")
2450 dest = d.expand("${WORKDIR}/pkgdata-pdata-input")
2451 oe.path.copyhardlinktree(src, dest)
2452
2453 bb.build.exec_func("packagedata_translate_pr_autoinc", d)
2454}
2455do_packagedata[cleandirs] += "${WORKDIR}/pkgdata-pdata-input"
2456
2457# Translate the EXTENDPRAUTO and AUTOINC to the final values
2458packagedata_translate_pr_autoinc() {
2459 find ${WORKDIR}/pkgdata-pdata-input -type f | xargs --no-run-if-empty \
2460 sed -e 's,@PRSERV_PV_AUTOINC@,${PRSERV_PV_AUTOINC},g' \
2461 -e 's,@EXTENDPRAUTO@,${EXTENDPRAUTO},g' -i
2462}
2463
2464addtask packagedata before do_build after do_package
2465
2466SSTATETASKS += "do_packagedata"
2467do_packagedata[sstate-inputdirs] = "${WORKDIR}/pkgdata-pdata-input"
2468do_packagedata[sstate-outputdirs] = "${PKGDATA_DIR}"
2469do_packagedata[stamp-extra-info] = "${MACHINE_ARCH}"
2470
2471python do_packagedata_setscene () {
2472 sstate_setscene(d)
2473}
2474addtask do_packagedata_setscene
2475
2476#
2477# Helper functions for the package writing classes
2478#
2479
2480def mapping_rename_hook(d):
2481 """
2482 Rewrite variables to account for package renaming in things
2483 like debian.bbclass or manual PKG variable name changes
2484 """
2485 pkg = d.getVar("PKG")
2486 runtime_mapping_rename("RDEPENDS", pkg, d)
2487 runtime_mapping_rename("RRECOMMENDS", pkg, d)
2488 runtime_mapping_rename("RSUGGESTS", pkg, d)