summaryrefslogtreecommitdiffstats
path: root/meta/classes-global
diff options
context:
space:
mode:
authorRichard Purdie <richard.purdie@linuxfoundation.org>2023-01-04 14:14:24 +0000
committerRichard Purdie <richard.purdie@linuxfoundation.org>2023-01-05 11:52:50 +0000
commite4f17fd72d0bc9c1b2f245abf9dd6a64b2544796 (patch)
treee42a6adff6ddc8adb8bd36b67ae4a69e9940f17a /meta/classes-global
parentb3573d38ef2653a6da58a73fa8aa647009086bee (diff)
downloadpoky-e4f17fd72d0bc9c1b2f245abf9dd6a64b2544796.tar.gz
package: Move package functions to function library
Move the bulk of the remaining package "processing" functions over to the package function library for parsing efficiency. (From OE-Core rev: f8785117074908330faca0b99afa7f60ed6ad952) Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta/classes-global')
-rw-r--r--meta/classes-global/package.bbclass1407
1 files changed, 11 insertions, 1396 deletions
diff --git a/meta/classes-global/package.bbclass b/meta/classes-global/package.bbclass
index 389c857804..21a50bbb45 100644
--- a/meta/classes-global/package.bbclass
+++ b/meta/classes-global/package.bbclass
@@ -69,21 +69,7 @@ PACKAGE_DEPENDS += "rpm-native dwarfsrcfiles-native"
69PACKAGE_WRITE_DEPS ??= "" 69PACKAGE_WRITE_DEPS ??= ""
70 70
71def legitimize_package_name(s): 71def legitimize_package_name(s):
72 """ 72 return oe.package.legitimize_package_name(s)
73 Make sure package names are legitimate strings
74 """
75 import re
76
77 def fixutf(m):
78 cp = m.group(1)
79 if cp:
80 return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape')
81
82 # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
83 s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s)
84
85 # Remaining package name validity fixes
86 return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
87 73
88def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False, summary=None): 74def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False, summary=None):
89 """ 75 """
@@ -195,7 +181,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
195 mode = os.lstat(f).st_mode 181 mode = os.lstat(f).st_mode
196 if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))): 182 if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))):
197 continue 183 continue
198 on = legitimize_package_name(m.group(1)) 184 on = oe.package.legitimize_package_name(m.group(1))
199 pkg = output_pattern % on 185 pkg = output_pattern % on
200 split_packages.add(pkg) 186 split_packages.add(pkg)
201 if not pkg in packages: 187 if not pkg in packages:
@@ -266,306 +252,6 @@ def checkbuildpath(file, d):
266 252
267 return False 253 return False
268 254
269def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output):
270 debugfiles = {}
271
272 for line in dwarfsrcfiles_output.splitlines():
273 if line.startswith("\t"):
274 debugfiles[os.path.normpath(line.split()[0])] = ""
275
276 return debugfiles.keys()
277
278def source_info(file, d, fatal=True):
279 import subprocess
280
281 cmd = ["dwarfsrcfiles", file]
282 try:
283 output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT)
284 retval = 0
285 except subprocess.CalledProcessError as exc:
286 output = exc.output
287 retval = exc.returncode
288
289 # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure
290 if retval != 0 and retval != 255:
291 msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "")
292 if fatal:
293 bb.fatal(msg)
294 bb.note(msg)
295
296 debugsources = parse_debugsources_from_dwarfsrcfiles_output(output)
297
298 return list(debugsources)
299
300def splitdebuginfo(file, dvar, dv, d):
301 # Function to split a single file into two components, one is the stripped
302 # target system binary, the other contains any debugging information. The
303 # two files are linked to reference each other.
304 #
305 # return a mapping of files:debugsources
306
307 import stat
308 import subprocess
309
310 src = file[len(dvar):]
311 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
312 debugfile = dvar + dest
313 sources = []
314
315 if file.endswith(".ko") and file.find("/lib/modules/") != -1:
316 if oe.package.is_kernel_module_signed(file):
317 bb.debug(1, "Skip strip on signed module %s" % file)
318 return (file, sources)
319
320 # Split the file...
321 bb.utils.mkdirhier(os.path.dirname(debugfile))
322 #bb.note("Split %s -> %s" % (file, debugfile))
323 # Only store off the hard link reference if we successfully split!
324
325 dvar = d.getVar('PKGD')
326 objcopy = d.getVar("OBJCOPY")
327
328 newmode = None
329 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
330 origmode = os.stat(file)[stat.ST_MODE]
331 newmode = origmode | stat.S_IWRITE | stat.S_IREAD
332 os.chmod(file, newmode)
333
334 # We need to extract the debug src information here...
335 if dv["srcdir"]:
336 sources = source_info(file, d)
337
338 bb.utils.mkdirhier(os.path.dirname(debugfile))
339
340 subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT)
341
342 # Set the debuglink to have the view of the file path on the target
343 subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT)
344
345 if newmode:
346 os.chmod(file, origmode)
347
348 return (file, sources)
349
350def splitstaticdebuginfo(file, dvar, dv, d):
351 # Unlike the function above, there is no way to split a static library
352 # two components. So to get similar results we will copy the unmodified
353 # static library (containing the debug symbols) into a new directory.
354 # We will then strip (preserving symbols) the static library in the
355 # typical location.
356 #
357 # return a mapping of files:debugsources
358
359 import stat
360
361 src = file[len(dvar):]
362 dest = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(src) + dv["staticappend"]
363 debugfile = dvar + dest
364 sources = []
365
366 # Copy the file...
367 bb.utils.mkdirhier(os.path.dirname(debugfile))
368 #bb.note("Copy %s -> %s" % (file, debugfile))
369
370 dvar = d.getVar('PKGD')
371
372 newmode = None
373 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
374 origmode = os.stat(file)[stat.ST_MODE]
375 newmode = origmode | stat.S_IWRITE | stat.S_IREAD
376 os.chmod(file, newmode)
377
378 # We need to extract the debug src information here...
379 if dv["srcdir"]:
380 sources = source_info(file, d)
381
382 bb.utils.mkdirhier(os.path.dirname(debugfile))
383
384 # Copy the unmodified item to the debug directory
385 shutil.copy2(file, debugfile)
386
387 if newmode:
388 os.chmod(file, origmode)
389
390 return (file, sources)
391
392def inject_minidebuginfo(file, dvar, dv, d):
393 # Extract just the symbols from debuginfo into minidebuginfo,
394 # compress it with xz and inject it back into the binary in a .gnu_debugdata section.
395 # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html
396
397 import subprocess
398
399 readelf = d.getVar('READELF')
400 nm = d.getVar('NM')
401 objcopy = d.getVar('OBJCOPY')
402
403 minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo')
404
405 src = file[len(dvar):]
406 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
407 debugfile = dvar + dest
408 minidebugfile = minidebuginfodir + src + '.minidebug'
409 bb.utils.mkdirhier(os.path.dirname(minidebugfile))
410
411 # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either
412 # so skip it.
413 if not os.path.exists(debugfile):
414 bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file))
415 return
416
417 # minidebuginfo does not make sense to apply to ELF objects other than
418 # executables and shared libraries, skip applying the minidebuginfo
419 # generation for objects like kernel modules.
420 for line in subprocess.check_output([readelf, '-h', debugfile], universal_newlines=True).splitlines():
421 if not line.strip().startswith("Type:"):
422 continue
423 elftype = line.split(":")[1].strip()
424 if not any(elftype.startswith(i) for i in ["EXEC", "DYN"]):
425 bb.debug(1, 'ELF file {} is not executable/shared, skipping minidebuginfo injection'.format(file))
426 return
427 break
428
429 # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo.
430 # We will exclude all of these from minidebuginfo to save space.
431 remove_section_names = []
432 for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines():
433 # strip the leading " [ 1]" section index to allow splitting on space
434 if ']' not in line:
435 continue
436 fields = line[line.index(']') + 1:].split()
437 if len(fields) < 7:
438 continue
439 name = fields[0]
440 type = fields[1]
441 flags = fields[6]
442 # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them
443 if name.startswith('.debug_'):
444 continue
445 if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']:
446 remove_section_names.append(name)
447
448 # List dynamic symbols in the binary. We can exclude these from minidebuginfo
449 # because they are always present in the binary.
450 dynsyms = set()
451 for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines():
452 dynsyms.add(line.split()[0])
453
454 # Find all function symbols from debuginfo which aren't in the dynamic symbols table.
455 # These are the ones we want to keep in minidebuginfo.
456 keep_symbols_file = minidebugfile + '.symlist'
457 found_any_symbols = False
458 with open(keep_symbols_file, 'w') as f:
459 for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines():
460 fields = line.split('|')
461 if len(fields) < 7:
462 continue
463 name = fields[0].strip()
464 type = fields[3].strip()
465 if type == 'FUNC' and name not in dynsyms:
466 f.write('{}\n'.format(name))
467 found_any_symbols = True
468
469 if not found_any_symbols:
470 bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file))
471 return
472
473 bb.utils.remove(minidebugfile)
474 bb.utils.remove(minidebugfile + '.xz')
475
476 subprocess.check_call([objcopy, '-S'] +
477 ['--remove-section={}'.format(s) for s in remove_section_names] +
478 ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile])
479
480 subprocess.check_call(['xz', '--keep', minidebugfile])
481
482 subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file])
483
484def copydebugsources(debugsrcdir, sources, d):
485 # The debug src information written out to sourcefile is further processed
486 # and copied to the destination here.
487
488 import stat
489 import subprocess
490
491 if debugsrcdir and sources:
492 sourcefile = d.expand("${WORKDIR}/debugsources.list")
493 bb.utils.remove(sourcefile)
494
495 # filenames are null-separated - this is an artefact of the previous use
496 # of rpm's debugedit, which was writing them out that way, and the code elsewhere
497 # is still assuming that.
498 debuglistoutput = '\0'.join(sources) + '\0'
499 with open(sourcefile, 'a') as sf:
500 sf.write(debuglistoutput)
501
502 dvar = d.getVar('PKGD')
503 strip = d.getVar("STRIP")
504 objcopy = d.getVar("OBJCOPY")
505 workdir = d.getVar("WORKDIR")
506 sdir = d.getVar("S")
507 cflags = d.expand("${CFLAGS}")
508
509 prefixmap = {}
510 for flag in cflags.split():
511 if not flag.startswith("-fdebug-prefix-map"):
512 continue
513 if "recipe-sysroot" in flag:
514 continue
515 flag = flag.split("=")
516 prefixmap[flag[1]] = flag[2]
517
518 nosuchdir = []
519 basepath = dvar
520 for p in debugsrcdir.split("/"):
521 basepath = basepath + "/" + p
522 if not cpath.exists(basepath):
523 nosuchdir.append(basepath)
524 bb.utils.mkdirhier(basepath)
525 cpath.updatecache(basepath)
526
527 for pmap in prefixmap:
528 # Ignore files from the recipe sysroots (target and native)
529 cmd = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | " % sourcefile
530 # We need to ignore files that are not actually ours
531 # we do this by only paying attention to items from this package
532 cmd += "fgrep -zw '%s' | " % prefixmap[pmap]
533 # Remove prefix in the source paths
534 cmd += "sed 's#%s/##g' | " % (prefixmap[pmap])
535 cmd += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)" % (pmap, dvar, prefixmap[pmap])
536
537 try:
538 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
539 except subprocess.CalledProcessError:
540 # Can "fail" if internal headers/transient sources are attempted
541 pass
542 # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
543 # Work around this by manually finding and copying any symbolic links that made it through.
544 cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \
545 (dvar, prefixmap[pmap], dvar, prefixmap[pmap], pmap, dvar, prefixmap[pmap])
546 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
547
548 # debugsources.list may be polluted from the host if we used externalsrc,
549 # cpio uses copy-pass and may have just created a directory structure
550 # matching the one from the host, if thats the case move those files to
551 # debugsrcdir to avoid host contamination.
552 # Empty dir structure will be deleted in the next step.
553
554 # Same check as above for externalsrc
555 if workdir not in sdir:
556 if os.path.exists(dvar + debugsrcdir + sdir):
557 cmd = "mv %s%s%s/* %s%s" % (dvar, debugsrcdir, sdir, dvar,debugsrcdir)
558 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
559
560 # The copy by cpio may have resulted in some empty directories! Remove these
561 cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
562 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
563
564 # Also remove debugsrcdir if its empty
565 for p in nosuchdir[::-1]:
566 if os.path.exists(p) and not os.listdir(p):
567 os.rmdir(p)
568
569# 255#
570# Used by do_packagedata (and possibly other routines post do_package) 256# Used by do_packagedata (and possibly other routines post do_package)
571# 257#
@@ -656,58 +342,7 @@ python package_convert_pr_autoinc() {
656LOCALEBASEPN ??= "${PN}" 342LOCALEBASEPN ??= "${PN}"
657 343
658python package_do_split_locales() { 344python package_do_split_locales() {
659 if (d.getVar('PACKAGE_NO_LOCALE') == '1'): 345 oe.package.split_locales(d)
660 bb.debug(1, "package requested not splitting locales")
661 return
662
663 packages = (d.getVar('PACKAGES') or "").split()
664
665 datadir = d.getVar('datadir')
666 if not datadir:
667 bb.note("datadir not defined")
668 return
669
670 dvar = d.getVar('PKGD')
671 pn = d.getVar('LOCALEBASEPN')
672
673 if pn + '-locale' in packages:
674 packages.remove(pn + '-locale')
675
676 localedir = os.path.join(dvar + datadir, 'locale')
677
678 if not cpath.isdir(localedir):
679 bb.debug(1, "No locale files in this package")
680 return
681
682 locales = os.listdir(localedir)
683
684 summary = d.getVar('SUMMARY') or pn
685 description = d.getVar('DESCRIPTION') or ""
686 locale_section = d.getVar('LOCALE_SECTION')
687 mlprefix = d.getVar('MLPREFIX') or ""
688 for l in sorted(locales):
689 ln = legitimize_package_name(l)
690 pkg = pn + '-locale-' + ln
691 packages.append(pkg)
692 d.setVar('FILES:' + pkg, os.path.join(datadir, 'locale', l))
693 d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
694 d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
695 d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l))
696 d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l))
697 if locale_section:
698 d.setVar('SECTION:' + pkg, locale_section)
699
700 d.setVar('PACKAGES', ' '.join(packages))
701
702 # Disabled by RP 18/06/07
703 # Wildcards aren't supported in debian
704 # They break with ipkg since glibc-locale* will mean that
705 # glibc-localedata-translit* won't install as a dependency
706 # for some other package which breaks meta-toolchain
707 # Probably breaks since virtual-locale- isn't provided anywhere
708 #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split()
709 #rdep.append('%s-locale*' % pn)
710 #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep))
711} 346}
712 347
713python perform_packagecopy () { 348python perform_packagecopy () {
@@ -734,488 +369,19 @@ python fixup_perms () {
734 oe.package.fixup_perms(d) 369 oe.package.fixup_perms(d)
735} 370}
736 371
737def package_debug_vars(d):
738 # We default to '.debug' style
739 if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory':
740 # Single debug-file-directory style debug info
741 debug_vars = {
742 "append": ".debug",
743 "staticappend": "",
744 "dir": "",
745 "staticdir": "",
746 "libdir": "/usr/lib/debug",
747 "staticlibdir": "/usr/lib/debug-static",
748 "srcdir": "/usr/src/debug",
749 }
750 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src':
751 # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
752 debug_vars = {
753 "append": "",
754 "staticappend": "",
755 "dir": "/.debug",
756 "staticdir": "/.debug-static",
757 "libdir": "",
758 "staticlibdir": "",
759 "srcdir": "",
760 }
761 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg':
762 debug_vars = {
763 "append": "",
764 "staticappend": "",
765 "dir": "/.debug",
766 "staticdir": "/.debug-static",
767 "libdir": "",
768 "staticlibdir": "",
769 "srcdir": "/usr/src/debug",
770 }
771 else:
772 # Original OE-core, a.k.a. ".debug", style debug info
773 debug_vars = {
774 "append": "",
775 "staticappend": "",
776 "dir": "/.debug",
777 "staticdir": "/.debug-static",
778 "libdir": "",
779 "staticlibdir": "",
780 "srcdir": "/usr/src/debug",
781 }
782
783 return debug_vars
784
785python split_and_strip_files () { 372python split_and_strip_files () {
786 import stat, errno 373 oe.package.process_split_and_strip_files(d)
787 import subprocess
788
789 dvar = d.getVar('PKGD')
790 pn = d.getVar('PN')
791 hostos = d.getVar('HOST_OS')
792
793 oldcwd = os.getcwd()
794 os.chdir(dvar)
795
796 dv = package_debug_vars(d)
797
798 #
799 # First lets figure out all of the files we may have to process ... do this only once!
800 #
801 elffiles = {}
802 symlinks = {}
803 staticlibs = []
804 inodes = {}
805 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
806 baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
807 skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split()
808 if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \
809 d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
810 checkelf = {}
811 checkelflinks = {}
812 for root, dirs, files in cpath.walk(dvar):
813 for f in files:
814 file = os.path.join(root, f)
815
816 # Skip debug files
817 if dv["append"] and file.endswith(dv["append"]):
818 continue
819 if dv["dir"] and dv["dir"] in os.path.dirname(file[len(dvar):]):
820 continue
821
822 if file in skipfiles:
823 continue
824
825 if oe.package.is_static_lib(file):
826 staticlibs.append(file)
827 continue
828
829 try:
830 ltarget = cpath.realpath(file, dvar, False)
831 s = cpath.lstat(ltarget)
832 except OSError as e:
833 (err, strerror) = e.args
834 if err != errno.ENOENT:
835 raise
836 # Skip broken symlinks
837 continue
838 if not s:
839 continue
840 # Check its an executable
841 if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \
842 or (s[stat.ST_MODE] & stat.S_IXOTH) \
843 or ((file.startswith(libdir) or file.startswith(baselibdir)) \
844 and (".so" in f or ".node" in f)) \
845 or (f.startswith('vmlinux') or ".ko" in f):
846
847 if cpath.islink(file):
848 checkelflinks[file] = ltarget
849 continue
850 # Use a reference of device ID and inode number to identify files
851 file_reference = "%d_%d" % (s.st_dev, s.st_ino)
852 checkelf[file] = (file, file_reference)
853
854 results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d)
855 results_map = {}
856 for (ltarget, elf_file) in results:
857 results_map[ltarget] = elf_file
858 for file in checkelflinks:
859 ltarget = checkelflinks[file]
860 # If it's a symlink, and points to an ELF file, we capture the readlink target
861 if results_map[ltarget]:
862 target = os.readlink(file)
863 #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget]))
864 symlinks[file] = target
865
866 results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d)
867
868 # Sort results by file path. This ensures that the files are always
869 # processed in the same order, which is important to make sure builds
870 # are reproducible when dealing with hardlinks
871 results.sort(key=lambda x: x[0])
872
873 for (file, elf_file) in results:
874 # It's a file (or hardlink), not a link
875 # ...but is it ELF, and is it already stripped?
876 if elf_file & 1:
877 if elf_file & 2:
878 if 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split():
879 bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
880 else:
881 msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
882 oe.qa.handle_error("already-stripped", msg, d)
883 continue
884
885 # At this point we have an unstripped elf file. We need to:
886 # a) Make sure any file we strip is not hardlinked to anything else outside this tree
887 # b) Only strip any hardlinked file once (no races)
888 # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks
889
890 # Use a reference of device ID and inode number to identify files
891 file_reference = checkelf[file][1]
892 if file_reference in inodes:
893 os.unlink(file)
894 os.link(inodes[file_reference][0], file)
895 inodes[file_reference].append(file)
896 else:
897 inodes[file_reference] = [file]
898 # break hardlink
899 bb.utils.break_hardlinks(file)
900 elffiles[file] = elf_file
901 # Modified the file so clear the cache
902 cpath.updatecache(file)
903
904 def strip_pkgd_prefix(f):
905 nonlocal dvar
906
907 if f.startswith(dvar):
908 return f[len(dvar):]
909
910 return f
911
912 #
913 # First lets process debug splitting
914 #
915 if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
916 results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, dv, d))
917
918 if dv["srcdir"] and not hostos.startswith("mingw"):
919 if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
920 results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, dv, d))
921 else:
922 for file in staticlibs:
923 results.append( (file,source_info(file, d)) )
924
925 d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results})
926
927 sources = set()
928 for r in results:
929 sources.update(r[1])
930
931 # Hardlink our debug symbols to the other hardlink copies
932 for ref in inodes:
933 if len(inodes[ref]) == 1:
934 continue
935
936 target = inodes[ref][0][len(dvar):]
937 for file in inodes[ref][1:]:
938 src = file[len(dvar):]
939 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
940 fpath = dvar + dest
941 ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
942 bb.utils.mkdirhier(os.path.dirname(fpath))
943 # Only one hardlink of separated debug info file in each directory
944 if not os.access(fpath, os.R_OK):
945 #bb.note("Link %s -> %s" % (fpath, ftarget))
946 os.link(ftarget, fpath)
947
948 # Create symlinks for all cases we were able to split symbols
949 for file in symlinks:
950 src = file[len(dvar):]
951 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
952 fpath = dvar + dest
953 # Skip it if the target doesn't exist
954 try:
955 s = os.stat(fpath)
956 except OSError as e:
957 (err, strerror) = e.args
958 if err != errno.ENOENT:
959 raise
960 continue
961
962 ltarget = symlinks[file]
963 lpath = os.path.dirname(ltarget)
964 lbase = os.path.basename(ltarget)
965 ftarget = ""
966 if lpath and lpath != ".":
967 ftarget += lpath + dv["dir"] + "/"
968 ftarget += lbase + dv["append"]
969 if lpath.startswith(".."):
970 ftarget = os.path.join("..", ftarget)
971 bb.utils.mkdirhier(os.path.dirname(fpath))
972 #bb.note("Symlink %s -> %s" % (fpath, ftarget))
973 os.symlink(ftarget, fpath)
974
975 # Process the dv["srcdir"] if requested...
976 # This copies and places the referenced sources for later debugging...
977 copydebugsources(dv["srcdir"], sources, d)
978 #
979 # End of debug splitting
980 #
981
982 #
983 # Now lets go back over things and strip them
984 #
985 if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'):
986 strip = d.getVar("STRIP")
987 sfiles = []
988 for file in elffiles:
989 elf_file = int(elffiles[file])
990 #bb.note("Strip %s" % file)
991 sfiles.append((file, elf_file, strip))
992 if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
993 for f in staticlibs:
994 sfiles.append((f, 16, strip))
995
996 oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d)
997
998 # Build "minidebuginfo" and reinject it back into the stripped binaries
999 if d.getVar('PACKAGE_MINIDEBUGINFO') == '1':
1000 oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d,
1001 extraargs=(dvar, dv, d))
1002
1003 #
1004 # End of strip
1005 #
1006 os.chdir(oldcwd)
1007} 374}
1008 375
1009python populate_packages () { 376python populate_packages () {
1010 import glob, re 377 oe.package.populate_packages(d)
1011
1012 workdir = d.getVar('WORKDIR')
1013 outdir = d.getVar('DEPLOY_DIR')
1014 dvar = d.getVar('PKGD')
1015 packages = d.getVar('PACKAGES').split()
1016 pn = d.getVar('PN')
1017
1018 bb.utils.mkdirhier(outdir)
1019 os.chdir(dvar)
1020
1021 autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False)
1022
1023 split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg')
1024
1025 # If debug-with-srcpkg mode is enabled then add the source package if it
1026 # doesn't exist and add the source file contents to the source package.
1027 if split_source_package:
1028 src_package_name = ('%s-src' % d.getVar('PN'))
1029 if not src_package_name in packages:
1030 packages.append(src_package_name)
1031 d.setVar('FILES:%s' % src_package_name, '/usr/src/debug')
1032
1033 # Sanity check PACKAGES for duplicates
1034 # Sanity should be moved to sanity.bbclass once we have the infrastructure
1035 package_dict = {}
1036
1037 for i, pkg in enumerate(packages):
1038 if pkg in package_dict:
1039 msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
1040 oe.qa.handle_error("packages-list", msg, d)
1041 # Ensure the source package gets the chance to pick up the source files
1042 # before the debug package by ordering it first in PACKAGES. Whether it
1043 # actually picks up any source files is controlled by
1044 # PACKAGE_DEBUG_SPLIT_STYLE.
1045 elif pkg.endswith("-src"):
1046 package_dict[pkg] = (10, i)
1047 elif autodebug and pkg.endswith("-dbg"):
1048 package_dict[pkg] = (30, i)
1049 else:
1050 package_dict[pkg] = (50, i)
1051 packages = sorted(package_dict.keys(), key=package_dict.get)
1052 d.setVar('PACKAGES', ' '.join(packages))
1053 pkgdest = d.getVar('PKGDEST')
1054
1055 seen = []
1056
1057 # os.mkdir masks the permissions with umask so we have to unset it first
1058 oldumask = os.umask(0)
1059
1060 debug = []
1061 for root, dirs, files in cpath.walk(dvar):
1062 dir = root[len(dvar):]
1063 if not dir:
1064 dir = os.sep
1065 for f in (files + dirs):
1066 path = "." + os.path.join(dir, f)
1067 if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"):
1068 debug.append(path)
1069
1070 for pkg in packages:
1071 root = os.path.join(pkgdest, pkg)
1072 bb.utils.mkdirhier(root)
1073
1074 filesvar = d.getVar('FILES:%s' % pkg) or ""
1075 if "//" in filesvar:
1076 msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
1077 oe.qa.handle_error("files-invalid", msg, d)
1078 filesvar.replace("//", "/")
1079
1080 origfiles = filesvar.split()
1081 files, symlink_paths = oe.package.files_from_filevars(origfiles)
1082
1083 if autodebug and pkg.endswith("-dbg"):
1084 files.extend(debug)
1085
1086 for file in files:
1087 if (not cpath.islink(file)) and (not cpath.exists(file)):
1088 continue
1089 if file in seen:
1090 continue
1091 seen.append(file)
1092
1093 def mkdir(src, dest, p):
1094 src = os.path.join(src, p)
1095 dest = os.path.join(dest, p)
1096 fstat = cpath.stat(src)
1097 os.mkdir(dest)
1098 os.chmod(dest, fstat.st_mode)
1099 os.chown(dest, fstat.st_uid, fstat.st_gid)
1100 if p not in seen:
1101 seen.append(p)
1102 cpath.updatecache(dest)
1103
1104 def mkdir_recurse(src, dest, paths):
1105 if cpath.exists(dest + '/' + paths):
1106 return
1107 while paths.startswith("./"):
1108 paths = paths[2:]
1109 p = "."
1110 for c in paths.split("/"):
1111 p = os.path.join(p, c)
1112 if not cpath.exists(os.path.join(dest, p)):
1113 mkdir(src, dest, p)
1114
1115 if cpath.isdir(file) and not cpath.islink(file):
1116 mkdir_recurse(dvar, root, file)
1117 continue
1118
1119 mkdir_recurse(dvar, root, os.path.dirname(file))
1120 fpath = os.path.join(root,file)
1121 if not cpath.islink(file):
1122 os.link(file, fpath)
1123 continue
1124 ret = bb.utils.copyfile(file, fpath)
1125 if ret is False or ret == 0:
1126 bb.fatal("File population failed")
1127
1128 # Check if symlink paths exist
1129 for file in symlink_paths:
1130 if not os.path.exists(os.path.join(root,file)):
1131 bb.fatal("File '%s' cannot be packaged into '%s' because its "
1132 "parent directory structure does not exist. One of "
1133 "its parent directories is a symlink whose target "
1134 "directory is not included in the package." %
1135 (file, pkg))
1136
1137 os.umask(oldumask)
1138 os.chdir(workdir)
1139
1140 # Handle excluding packages with incompatible licenses
1141 package_list = []
1142 for pkg in packages:
1143 licenses = d.getVar('_exclude_incompatible-' + pkg)
1144 if licenses:
1145 msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses)
1146 oe.qa.handle_error("incompatible-license", msg, d)
1147 else:
1148 package_list.append(pkg)
1149 d.setVar('PACKAGES', ' '.join(package_list))
1150
1151 unshipped = []
1152 for root, dirs, files in cpath.walk(dvar):
1153 dir = root[len(dvar):]
1154 if not dir:
1155 dir = os.sep
1156 for f in (files + dirs):
1157 path = os.path.join(dir, f)
1158 if ('.' + path) not in seen:
1159 unshipped.append(path)
1160
1161 if unshipped != []:
1162 msg = pn + ": Files/directories were installed but not shipped in any package:"
1163 if "installed-vs-shipped" in (d.getVar('INSANE_SKIP:' + pn) or "").split():
1164 bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
1165 else:
1166 for f in unshipped:
1167 msg = msg + "\n " + f
1168 msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n"
1169 msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped))
1170 oe.qa.handle_error("installed-vs-shipped", msg, d)
1171} 378}
1172populate_packages[dirs] = "${D}" 379populate_packages[dirs] = "${D}"
1173 380
1174python package_fixsymlinks () { 381python package_fixsymlinks () {
1175 import errno 382 oe.package.process_fixsymlinks(pkgfiles, d)
1176 pkgdest = d.getVar('PKGDEST')
1177 packages = d.getVar("PACKAGES", False).split()
1178
1179 dangling_links = {}
1180 pkg_files = {}
1181 for pkg in packages:
1182 dangling_links[pkg] = []
1183 pkg_files[pkg] = []
1184 inst_root = os.path.join(pkgdest, pkg)
1185 for path in pkgfiles[pkg]:
1186 rpath = path[len(inst_root):]
1187 pkg_files[pkg].append(rpath)
1188 rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
1189 if not cpath.lexists(rtarget):
1190 dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
1191
1192 newrdepends = {}
1193 for pkg in dangling_links:
1194 for l in dangling_links[pkg]:
1195 found = False
1196 bb.debug(1, "%s contains dangling link %s" % (pkg, l))
1197 for p in packages:
1198 if l in pkg_files[p]:
1199 found = True
1200 bb.debug(1, "target found in %s" % p)
1201 if p == pkg:
1202 break
1203 if pkg not in newrdepends:
1204 newrdepends[pkg] = []
1205 newrdepends[pkg].append(p)
1206 break
1207 if found == False:
1208 bb.note("%s contains dangling symlink to %s" % (pkg, l))
1209
1210 for pkg in newrdepends:
1211 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
1212 for p in newrdepends[pkg]:
1213 if p not in rdepends:
1214 rdepends[p] = []
1215 d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1216} 383}
1217 384
1218
1219python package_package_name_hook() { 385python package_package_name_hook() {
1220 """ 386 """
1221 A package_name_hook function can be used to rewrite the package names by 387 A package_name_hook function can be used to rewrite the package names by
@@ -1245,456 +411,23 @@ fi
1245 411
1246RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/rpmdeps --alldeps --define '__font_provides %{nil}'" 412RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/rpmdeps --alldeps --define '__font_provides %{nil}'"
1247 413
1248# Collect perfile run-time dependency metadata
1249# Output:
1250# FILERPROVIDESFLIST:pkg - list of all files w/ deps
1251# FILERPROVIDES:filepath:pkg - per file dep
1252#
1253# FILERDEPENDSFLIST:pkg - list of all files w/ deps
1254# FILERDEPENDS:filepath:pkg - per file dep
1255
1256python package_do_filedeps() { 414python package_do_filedeps() {
1257 if d.getVar('SKIP_FILEDEPS') == '1': 415 oe.package.process_filedeps(pkgfiles, d)
1258 return
1259
1260 pkgdest = d.getVar('PKGDEST')
1261 packages = d.getVar('PACKAGES')
1262 rpmdeps = d.getVar('RPMDEPS')
1263
1264 def chunks(files, n):
1265 return [files[i:i+n] for i in range(0, len(files), n)]
1266
1267 pkglist = []
1268 for pkg in packages.split():
1269 if d.getVar('SKIP_FILEDEPS:' + pkg) == '1':
1270 continue
1271 if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'):
1272 continue
1273 for files in chunks(pkgfiles[pkg], 100):
1274 pkglist.append((pkg, files, rpmdeps, pkgdest))
1275
1276 processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d)
1277
1278 provides_files = {}
1279 requires_files = {}
1280
1281 for result in processed:
1282 (pkg, provides, requires) = result
1283
1284 if pkg not in provides_files:
1285 provides_files[pkg] = []
1286 if pkg not in requires_files:
1287 requires_files[pkg] = []
1288
1289 for file in sorted(provides):
1290 provides_files[pkg].append(file)
1291 key = "FILERPROVIDES:" + file + ":" + pkg
1292 d.appendVar(key, " " + " ".join(provides[file]))
1293
1294 for file in sorted(requires):
1295 requires_files[pkg].append(file)
1296 key = "FILERDEPENDS:" + file + ":" + pkg
1297 d.appendVar(key, " " + " ".join(requires[file]))
1298
1299 for pkg in requires_files:
1300 d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg])))
1301 for pkg in provides_files:
1302 d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg])))
1303} 416}
1304 417
1305SHLIBSDIRS = "${WORKDIR_PKGDATA}/${MLPREFIX}shlibs2" 418SHLIBSDIRS = "${WORKDIR_PKGDATA}/${MLPREFIX}shlibs2"
1306SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs2" 419SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs2"
1307 420
1308python package_do_shlibs() { 421python package_do_shlibs() {
1309 import itertools 422 oe.package.process_shlibs(pkgfiles, d)
1310 import re, pipes
1311 import subprocess
1312
1313 exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False)
1314 if exclude_shlibs:
1315 bb.note("not generating shlibs")
1316 return
1317
1318 lib_re = re.compile(r"^.*\.so")
1319 libdir_re = re.compile(r".*/%s$" % d.getVar('baselib'))
1320
1321 packages = d.getVar('PACKAGES')
1322
1323 shlib_pkgs = []
1324 exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS")
1325 if exclusion_list:
1326 for pkg in packages.split():
1327 if pkg not in exclusion_list.split():
1328 shlib_pkgs.append(pkg)
1329 else:
1330 bb.note("not generating shlibs for %s" % pkg)
1331 else:
1332 shlib_pkgs = packages.split()
1333
1334 hostos = d.getVar('HOST_OS')
1335
1336 workdir = d.getVar('WORKDIR')
1337
1338 ver = d.getVar('PKGV')
1339 if not ver:
1340 msg = "PKGV not defined"
1341 oe.qa.handle_error("pkgv-undefined", msg, d)
1342 return
1343
1344 pkgdest = d.getVar('PKGDEST')
1345
1346 shlibswork_dir = d.getVar('SHLIBSWORKDIR')
1347
1348 def linux_so(file, pkg, pkgver, d):
1349 needs_ldconfig = False
1350 needed = set()
1351 sonames = set()
1352 renames = []
1353 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1354 cmd = d.getVar('OBJDUMP') + " -p " + pipes.quote(file) + " 2>/dev/null"
1355 fd = os.popen(cmd)
1356 lines = fd.readlines()
1357 fd.close()
1358 rpath = tuple()
1359 for l in lines:
1360 m = re.match(r"\s+RPATH\s+([^\s]*)", l)
1361 if m:
1362 rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
1363 rpath = tuple(map(os.path.normpath, rpaths))
1364 for l in lines:
1365 m = re.match(r"\s+NEEDED\s+([^\s]*)", l)
1366 if m:
1367 dep = m.group(1)
1368 if dep not in needed:
1369 needed.add((dep, file, rpath))
1370 m = re.match(r"\s+SONAME\s+([^\s]*)", l)
1371 if m:
1372 this_soname = m.group(1)
1373 prov = (this_soname, ldir, pkgver)
1374 if not prov in sonames:
1375 # if library is private (only used by package) then do not build shlib for it
1376 import fnmatch
1377 if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0:
1378 sonames.add(prov)
1379 if libdir_re.match(os.path.dirname(file)):
1380 needs_ldconfig = True
1381 if needs_ldconfig and snap_symlinks and (os.path.basename(file) != this_soname):
1382 renames.append((file, os.path.join(os.path.dirname(file), this_soname)))
1383 return (needs_ldconfig, needed, sonames, renames)
1384
1385 def darwin_so(file, needed, sonames, renames, pkgver):
1386 if not os.path.exists(file):
1387 return
1388 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1389
1390 def get_combinations(base):
1391 #
1392 # Given a base library name, find all combinations of this split by "." and "-"
1393 #
1394 combos = []
1395 options = base.split(".")
1396 for i in range(1, len(options) + 1):
1397 combos.append(".".join(options[0:i]))
1398 options = base.split("-")
1399 for i in range(1, len(options) + 1):
1400 combos.append("-".join(options[0:i]))
1401 return combos
1402
1403 if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'):
1404 # Drop suffix
1405 name = os.path.basename(file).rsplit(".",1)[0]
1406 # Find all combinations
1407 combos = get_combinations(name)
1408 for combo in combos:
1409 if not combo in sonames:
1410 prov = (combo, ldir, pkgver)
1411 sonames.add(prov)
1412 if file.endswith('.dylib') or file.endswith('.so'):
1413 rpath = []
1414 p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1415 out, err = p.communicate()
1416 # If returned successfully, process stdout for results
1417 if p.returncode == 0:
1418 for l in out.split("\n"):
1419 l = l.strip()
1420 if l.startswith('path '):
1421 rpath.append(l.split()[1])
1422
1423 p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1424 out, err = p.communicate()
1425 # If returned successfully, process stdout for results
1426 if p.returncode == 0:
1427 for l in out.split("\n"):
1428 l = l.strip()
1429 if not l or l.endswith(":"):
1430 continue
1431 if "is not an object file" in l:
1432 continue
1433 name = os.path.basename(l.split()[0]).rsplit(".", 1)[0]
1434 if name and name not in needed[pkg]:
1435 needed[pkg].add((name, file, tuple()))
1436
1437 def mingw_dll(file, needed, sonames, renames, pkgver):
1438 if not os.path.exists(file):
1439 return
1440
1441 if file.endswith(".dll"):
1442 # assume all dlls are shared objects provided by the package
1443 sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver))
1444
1445 if (file.endswith(".dll") or file.endswith(".exe")):
1446 # use objdump to search for "DLL Name: .*\.dll"
1447 p = subprocess.Popen([d.expand("${HOST_PREFIX}objdump"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1448 out, err = p.communicate()
1449 # process the output, grabbing all .dll names
1450 if p.returncode == 0:
1451 for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE):
1452 dllname = m.group(1)
1453 if dllname:
1454 needed[pkg].add((dllname, file, tuple()))
1455
1456 if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1":
1457 snap_symlinks = True
1458 else:
1459 snap_symlinks = False
1460
1461 needed = {}
1462
1463 shlib_provider = oe.package.read_shlib_providers(d)
1464
1465 for pkg in shlib_pkgs:
1466 private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
1467 private_libs = private_libs.split()
1468 needs_ldconfig = False
1469 bb.debug(2, "calculating shlib provides for %s" % pkg)
1470
1471 pkgver = d.getVar('PKGV:' + pkg)
1472 if not pkgver:
1473 pkgver = d.getVar('PV_' + pkg)
1474 if not pkgver:
1475 pkgver = ver
1476
1477 needed[pkg] = set()
1478 sonames = set()
1479 renames = []
1480 linuxlist = []
1481 for file in pkgfiles[pkg]:
1482 soname = None
1483 if cpath.islink(file):
1484 continue
1485 if hostos == "darwin" or hostos == "darwin8":
1486 darwin_so(file, needed, sonames, renames, pkgver)
1487 elif hostos.startswith("mingw"):
1488 mingw_dll(file, needed, sonames, renames, pkgver)
1489 elif os.access(file, os.X_OK) or lib_re.match(file):
1490 linuxlist.append(file)
1491
1492 if linuxlist:
1493 results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d))
1494 for r in results:
1495 ldconfig = r[0]
1496 needed[pkg] |= r[1]
1497 sonames |= r[2]
1498 renames.extend(r[3])
1499 needs_ldconfig = needs_ldconfig or ldconfig
1500
1501 for (old, new) in renames:
1502 bb.note("Renaming %s to %s" % (old, new))
1503 bb.utils.rename(old, new)
1504 pkgfiles[pkg].remove(old)
1505
1506 shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
1507 if len(sonames):
1508 with open(shlibs_file, 'w') as fd:
1509 for s in sorted(sonames):
1510 if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]:
1511 (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]]
1512 if old_pkg != pkg:
1513 bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver))
1514 bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0]))
1515 fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n')
1516 if s[0] not in shlib_provider:
1517 shlib_provider[s[0]] = {}
1518 shlib_provider[s[0]][s[1]] = (pkg, pkgver)
1519 if needs_ldconfig:
1520 bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
1521 postinst = d.getVar('pkg_postinst:%s' % pkg)
1522 if not postinst:
1523 postinst = '#!/bin/sh\n'
1524 postinst += d.getVar('ldconfig_postinst_fragment')
1525 d.setVar('pkg_postinst:%s' % pkg, postinst)
1526 bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
1527
1528 assumed_libs = d.getVar('ASSUME_SHLIBS')
1529 if assumed_libs:
1530 libdir = d.getVar("libdir")
1531 for e in assumed_libs.split():
1532 l, dep_pkg = e.split(":")
1533 lib_ver = None
1534 dep_pkg = dep_pkg.rsplit("_", 1)
1535 if len(dep_pkg) == 2:
1536 lib_ver = dep_pkg[1]
1537 dep_pkg = dep_pkg[0]
1538 if l not in shlib_provider:
1539 shlib_provider[l] = {}
1540 shlib_provider[l][libdir] = (dep_pkg, lib_ver)
1541
1542 libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')]
1543
1544 for pkg in shlib_pkgs:
1545 bb.debug(2, "calculating shlib requirements for %s" % pkg)
1546
1547 private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
1548 private_libs = private_libs.split()
1549
1550 deps = list()
1551 for n in needed[pkg]:
1552 # if n is in private libraries, don't try to search provider for it
1553 # this could cause problem in case some abc.bb provides private
1554 # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
1555 # but skipping it is still better alternative than providing own
1556 # version and then adding runtime dependency for the same system library
1557 import fnmatch
1558 if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0:
1559 bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
1560 continue
1561 if n[0] in shlib_provider.keys():
1562 shlib_provider_map = shlib_provider[n[0]]
1563 matches = set()
1564 for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath):
1565 if p in shlib_provider_map:
1566 matches.add(p)
1567 if len(matches) > 1:
1568 matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches])
1569 bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1]))
1570 elif len(matches) == 1:
1571 (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()]
1572
1573 bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1]))
1574
1575 if dep_pkg == pkg:
1576 continue
1577
1578 if ver_needed:
1579 dep = "%s (>= %s)" % (dep_pkg, ver_needed)
1580 else:
1581 dep = dep_pkg
1582 if not dep in deps:
1583 deps.append(dep)
1584 continue
1585 bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1]))
1586
1587 deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
1588 if os.path.exists(deps_file):
1589 os.remove(deps_file)
1590 if deps:
1591 with open(deps_file, 'w') as fd:
1592 for dep in sorted(deps):
1593 fd.write(dep + '\n')
1594} 423}
1595 424
1596python package_do_pkgconfig () { 425python package_do_pkgconfig () {
1597 import re 426 oe.package.process_pkgconfig(pkgfiles, d)
1598
1599 packages = d.getVar('PACKAGES')
1600 workdir = d.getVar('WORKDIR')
1601 pkgdest = d.getVar('PKGDEST')
1602
1603 shlibs_dirs = d.getVar('SHLIBSDIRS').split()
1604 shlibswork_dir = d.getVar('SHLIBSWORKDIR')
1605
1606 pc_re = re.compile(r'(.*)\.pc$')
1607 var_re = re.compile(r'(.*)=(.*)')
1608 field_re = re.compile(r'(.*): (.*)')
1609
1610 pkgconfig_provided = {}
1611 pkgconfig_needed = {}
1612 for pkg in packages.split():
1613 pkgconfig_provided[pkg] = []
1614 pkgconfig_needed[pkg] = []
1615 for file in sorted(pkgfiles[pkg]):
1616 m = pc_re.match(file)
1617 if m:
1618 pd = bb.data.init()
1619 name = m.group(1)
1620 pkgconfig_provided[pkg].append(os.path.basename(name))
1621 if not os.access(file, os.R_OK):
1622 continue
1623 with open(file, 'r') as f:
1624 lines = f.readlines()
1625 for l in lines:
1626 m = var_re.match(l)
1627 if m:
1628 name = m.group(1)
1629 val = m.group(2)
1630 pd.setVar(name, pd.expand(val))
1631 continue
1632 m = field_re.match(l)
1633 if m:
1634 hdr = m.group(1)
1635 exp = pd.expand(m.group(2))
1636 if hdr == 'Requires':
1637 pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
1638
1639 for pkg in packages.split():
1640 pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
1641 if pkgconfig_provided[pkg] != []:
1642 with open(pkgs_file, 'w') as f:
1643 for p in sorted(pkgconfig_provided[pkg]):
1644 f.write('%s\n' % p)
1645
1646 # Go from least to most specific since the last one found wins
1647 for dir in reversed(shlibs_dirs):
1648 if not os.path.exists(dir):
1649 continue
1650 for file in sorted(os.listdir(dir)):
1651 m = re.match(r'^(.*)\.pclist$', file)
1652 if m:
1653 pkg = m.group(1)
1654 with open(os.path.join(dir, file)) as fd:
1655 lines = fd.readlines()
1656 pkgconfig_provided[pkg] = []
1657 for l in lines:
1658 pkgconfig_provided[pkg].append(l.rstrip())
1659
1660 for pkg in packages.split():
1661 deps = []
1662 for n in pkgconfig_needed[pkg]:
1663 found = False
1664 for k in pkgconfig_provided.keys():
1665 if n in pkgconfig_provided[k]:
1666 if k != pkg and not (k in deps):
1667 deps.append(k)
1668 found = True
1669 if found == False:
1670 bb.note("couldn't find pkgconfig module '%s' in any package" % n)
1671 deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
1672 if len(deps):
1673 with open(deps_file, 'w') as fd:
1674 for dep in deps:
1675 fd.write(dep + '\n')
1676} 427}
1677 428
1678def read_libdep_files(d):
1679 pkglibdeps = {}
1680 packages = d.getVar('PACKAGES').split()
1681 for pkg in packages:
1682 pkglibdeps[pkg] = {}
1683 for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
1684 depsfile = d.expand("${PKGDEST}/" + pkg + extension)
1685 if os.access(depsfile, os.R_OK):
1686 with open(depsfile) as fd:
1687 lines = fd.readlines()
1688 for l in lines:
1689 l.rstrip()
1690 deps = bb.utils.explode_dep_versions2(l)
1691 for dep in deps:
1692 if not dep in pkglibdeps[pkg]:
1693 pkglibdeps[pkg][dep] = deps[dep]
1694 return pkglibdeps
1695
1696python read_shlibdeps () { 429python read_shlibdeps () {
1697 pkglibdeps = read_libdep_files(d) 430 pkglibdeps = oe.package.read_libdep_files(d)
1698 431
1699 packages = d.getVar('PACKAGES').split() 432 packages = d.getVar('PACKAGES').split()
1700 for pkg in packages: 433 for pkg in packages:
@@ -1710,125 +443,7 @@ python read_shlibdeps () {
1710} 443}
1711 444
1712python package_depchains() { 445python package_depchains() {
1713 """ 446 oe.package.process_depchains(pkgfiles, d)
1714 For a given set of prefix and postfix modifiers, make those packages
1715 RRECOMMENDS on the corresponding packages for its RDEPENDS.
1716
1717 Example: If package A depends upon package B, and A's .bb emits an
1718 A-dev package, this would make A-dev Recommends: B-dev.
1719
1720 If only one of a given suffix is specified, it will take the RRECOMMENDS
1721 based on the RDEPENDS of *all* other packages. If more than one of a given
1722 suffix is specified, its will only use the RDEPENDS of the single parent
1723 package.
1724 """
1725
1726 packages = d.getVar('PACKAGES')
1727 postfixes = (d.getVar('DEPCHAIN_POST') or '').split()
1728 prefixes = (d.getVar('DEPCHAIN_PRE') or '').split()
1729
1730 def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
1731
1732 #bb.note('depends for %s is %s' % (base, depends))
1733 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
1734
1735 for depend in sorted(depends):
1736 if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
1737 #bb.note("Skipping %s" % depend)
1738 continue
1739 if depend.endswith('-dev'):
1740 depend = depend[:-4]
1741 if depend.endswith('-dbg'):
1742 depend = depend[:-4]
1743 pkgname = getname(depend, suffix)
1744 #bb.note("Adding %s for %s" % (pkgname, depend))
1745 if pkgname not in rreclist and pkgname != pkg:
1746 rreclist[pkgname] = []
1747
1748 #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
1749 d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
1750
1751 def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
1752
1753 #bb.note('rdepends for %s is %s' % (base, rdepends))
1754 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
1755
1756 for depend in sorted(rdepends):
1757 if depend.find('virtual-locale-') != -1:
1758 #bb.note("Skipping %s" % depend)
1759 continue
1760 if depend.endswith('-dev'):
1761 depend = depend[:-4]
1762 if depend.endswith('-dbg'):
1763 depend = depend[:-4]
1764 pkgname = getname(depend, suffix)
1765 #bb.note("Adding %s for %s" % (pkgname, depend))
1766 if pkgname not in rreclist and pkgname != pkg:
1767 rreclist[pkgname] = []
1768
1769 #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
1770 d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
1771
1772 def add_dep(list, dep):
1773 if dep not in list:
1774 list.append(dep)
1775
1776 depends = []
1777 for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""):
1778 add_dep(depends, dep)
1779
1780 rdepends = []
1781 for pkg in packages.split():
1782 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + pkg) or ""):
1783 add_dep(rdepends, dep)
1784
1785 #bb.note('rdepends is %s' % rdepends)
1786
1787 def post_getname(name, suffix):
1788 return '%s%s' % (name, suffix)
1789 def pre_getname(name, suffix):
1790 return '%s%s' % (suffix, name)
1791
1792 pkgs = {}
1793 for pkg in packages.split():
1794 for postfix in postfixes:
1795 if pkg.endswith(postfix):
1796 if not postfix in pkgs:
1797 pkgs[postfix] = {}
1798 pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
1799
1800 for prefix in prefixes:
1801 if pkg.startswith(prefix):
1802 if not prefix in pkgs:
1803 pkgs[prefix] = {}
1804 pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
1805
1806 if "-dbg" in pkgs:
1807 pkglibdeps = read_libdep_files(d)
1808 pkglibdeplist = []
1809 for pkg in pkglibdeps:
1810 for k in pkglibdeps[pkg]:
1811 add_dep(pkglibdeplist, k)
1812 dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d)))
1813
1814 for suffix in pkgs:
1815 for pkg in pkgs[suffix]:
1816 if d.getVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs'):
1817 continue
1818 (base, func) = pkgs[suffix][pkg]
1819 if suffix == "-dev":
1820 pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
1821 elif suffix == "-dbg":
1822 if not dbgdefaultdeps:
1823 pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
1824 continue
1825 if len(pkgs[suffix]) == 1:
1826 pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
1827 else:
1828 rdeps = []
1829 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + base) or ""):
1830 add_dep(rdeps, dep)
1831 pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
1832} 447}
1833 448
1834# Since bitbake can't determine which variables are accessed during package 449# Since bitbake can't determine which variables are accessed during package