diff options
| -rw-r--r-- | meta/classes-global/package.bbclass | 1407 | ||||
| -rw-r--r-- | meta/lib/oe/package.py | 1408 |
2 files changed, 1416 insertions, 1399 deletions
diff --git a/meta/classes-global/package.bbclass b/meta/classes-global/package.bbclass index 389c857804..21a50bbb45 100644 --- a/meta/classes-global/package.bbclass +++ b/meta/classes-global/package.bbclass | |||
| @@ -69,21 +69,7 @@ PACKAGE_DEPENDS += "rpm-native dwarfsrcfiles-native" | |||
| 69 | PACKAGE_WRITE_DEPS ??= "" | 69 | PACKAGE_WRITE_DEPS ??= "" |
| 70 | 70 | ||
| 71 | def legitimize_package_name(s): | 71 | def legitimize_package_name(s): |
| 72 | """ | 72 | return oe.package.legitimize_package_name(s) |
| 73 | Make sure package names are legitimate strings | ||
| 74 | """ | ||
| 75 | import re | ||
| 76 | |||
| 77 | def fixutf(m): | ||
| 78 | cp = m.group(1) | ||
| 79 | if cp: | ||
| 80 | return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape') | ||
| 81 | |||
| 82 | # Handle unicode codepoints encoded as <U0123>, as in glibc locale files. | ||
| 83 | s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s) | ||
| 84 | |||
| 85 | # Remaining package name validity fixes | ||
| 86 | return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-') | ||
| 87 | 73 | ||
| 88 | def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False, summary=None): | 74 | def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False, summary=None): |
| 89 | """ | 75 | """ |
| @@ -195,7 +181,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst | |||
| 195 | mode = os.lstat(f).st_mode | 181 | mode = os.lstat(f).st_mode |
| 196 | if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))): | 182 | if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))): |
| 197 | continue | 183 | continue |
| 198 | on = legitimize_package_name(m.group(1)) | 184 | on = oe.package.legitimize_package_name(m.group(1)) |
| 199 | pkg = output_pattern % on | 185 | pkg = output_pattern % on |
| 200 | split_packages.add(pkg) | 186 | split_packages.add(pkg) |
| 201 | if not pkg in packages: | 187 | if not pkg in packages: |
| @@ -266,306 +252,6 @@ def checkbuildpath(file, d): | |||
| 266 | 252 | ||
| 267 | return False | 253 | return False |
| 268 | 254 | ||
| 269 | def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output): | ||
| 270 | debugfiles = {} | ||
| 271 | |||
| 272 | for line in dwarfsrcfiles_output.splitlines(): | ||
| 273 | if line.startswith("\t"): | ||
| 274 | debugfiles[os.path.normpath(line.split()[0])] = "" | ||
| 275 | |||
| 276 | return debugfiles.keys() | ||
| 277 | |||
| 278 | def source_info(file, d, fatal=True): | ||
| 279 | import subprocess | ||
| 280 | |||
| 281 | cmd = ["dwarfsrcfiles", file] | ||
| 282 | try: | ||
| 283 | output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT) | ||
| 284 | retval = 0 | ||
| 285 | except subprocess.CalledProcessError as exc: | ||
| 286 | output = exc.output | ||
| 287 | retval = exc.returncode | ||
| 288 | |||
| 289 | # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure | ||
| 290 | if retval != 0 and retval != 255: | ||
| 291 | msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "") | ||
| 292 | if fatal: | ||
| 293 | bb.fatal(msg) | ||
| 294 | bb.note(msg) | ||
| 295 | |||
| 296 | debugsources = parse_debugsources_from_dwarfsrcfiles_output(output) | ||
| 297 | |||
| 298 | return list(debugsources) | ||
| 299 | |||
| 300 | def splitdebuginfo(file, dvar, dv, d): | ||
| 301 | # Function to split a single file into two components, one is the stripped | ||
| 302 | # target system binary, the other contains any debugging information. The | ||
| 303 | # two files are linked to reference each other. | ||
| 304 | # | ||
| 305 | # return a mapping of files:debugsources | ||
| 306 | |||
| 307 | import stat | ||
| 308 | import subprocess | ||
| 309 | |||
| 310 | src = file[len(dvar):] | ||
| 311 | dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"] | ||
| 312 | debugfile = dvar + dest | ||
| 313 | sources = [] | ||
| 314 | |||
| 315 | if file.endswith(".ko") and file.find("/lib/modules/") != -1: | ||
| 316 | if oe.package.is_kernel_module_signed(file): | ||
| 317 | bb.debug(1, "Skip strip on signed module %s" % file) | ||
| 318 | return (file, sources) | ||
| 319 | |||
| 320 | # Split the file... | ||
| 321 | bb.utils.mkdirhier(os.path.dirname(debugfile)) | ||
| 322 | #bb.note("Split %s -> %s" % (file, debugfile)) | ||
| 323 | # Only store off the hard link reference if we successfully split! | ||
| 324 | |||
| 325 | dvar = d.getVar('PKGD') | ||
| 326 | objcopy = d.getVar("OBJCOPY") | ||
| 327 | |||
| 328 | newmode = None | ||
| 329 | if not os.access(file, os.W_OK) or os.access(file, os.R_OK): | ||
| 330 | origmode = os.stat(file)[stat.ST_MODE] | ||
| 331 | newmode = origmode | stat.S_IWRITE | stat.S_IREAD | ||
| 332 | os.chmod(file, newmode) | ||
| 333 | |||
| 334 | # We need to extract the debug src information here... | ||
| 335 | if dv["srcdir"]: | ||
| 336 | sources = source_info(file, d) | ||
| 337 | |||
| 338 | bb.utils.mkdirhier(os.path.dirname(debugfile)) | ||
| 339 | |||
| 340 | subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT) | ||
| 341 | |||
| 342 | # Set the debuglink to have the view of the file path on the target | ||
| 343 | subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT) | ||
| 344 | |||
| 345 | if newmode: | ||
| 346 | os.chmod(file, origmode) | ||
| 347 | |||
| 348 | return (file, sources) | ||
| 349 | |||
| 350 | def splitstaticdebuginfo(file, dvar, dv, d): | ||
| 351 | # Unlike the function above, there is no way to split a static library | ||
| 352 | # two components. So to get similar results we will copy the unmodified | ||
| 353 | # static library (containing the debug symbols) into a new directory. | ||
| 354 | # We will then strip (preserving symbols) the static library in the | ||
| 355 | # typical location. | ||
| 356 | # | ||
| 357 | # return a mapping of files:debugsources | ||
| 358 | |||
| 359 | import stat | ||
| 360 | |||
| 361 | src = file[len(dvar):] | ||
| 362 | dest = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(src) + dv["staticappend"] | ||
| 363 | debugfile = dvar + dest | ||
| 364 | sources = [] | ||
| 365 | |||
| 366 | # Copy the file... | ||
| 367 | bb.utils.mkdirhier(os.path.dirname(debugfile)) | ||
| 368 | #bb.note("Copy %s -> %s" % (file, debugfile)) | ||
| 369 | |||
| 370 | dvar = d.getVar('PKGD') | ||
| 371 | |||
| 372 | newmode = None | ||
| 373 | if not os.access(file, os.W_OK) or os.access(file, os.R_OK): | ||
| 374 | origmode = os.stat(file)[stat.ST_MODE] | ||
| 375 | newmode = origmode | stat.S_IWRITE | stat.S_IREAD | ||
| 376 | os.chmod(file, newmode) | ||
| 377 | |||
| 378 | # We need to extract the debug src information here... | ||
| 379 | if dv["srcdir"]: | ||
| 380 | sources = source_info(file, d) | ||
| 381 | |||
| 382 | bb.utils.mkdirhier(os.path.dirname(debugfile)) | ||
| 383 | |||
| 384 | # Copy the unmodified item to the debug directory | ||
| 385 | shutil.copy2(file, debugfile) | ||
| 386 | |||
| 387 | if newmode: | ||
| 388 | os.chmod(file, origmode) | ||
| 389 | |||
| 390 | return (file, sources) | ||
| 391 | |||
| 392 | def inject_minidebuginfo(file, dvar, dv, d): | ||
| 393 | # Extract just the symbols from debuginfo into minidebuginfo, | ||
| 394 | # compress it with xz and inject it back into the binary in a .gnu_debugdata section. | ||
| 395 | # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html | ||
| 396 | |||
| 397 | import subprocess | ||
| 398 | |||
| 399 | readelf = d.getVar('READELF') | ||
| 400 | nm = d.getVar('NM') | ||
| 401 | objcopy = d.getVar('OBJCOPY') | ||
| 402 | |||
| 403 | minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo') | ||
| 404 | |||
| 405 | src = file[len(dvar):] | ||
| 406 | dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"] | ||
| 407 | debugfile = dvar + dest | ||
| 408 | minidebugfile = minidebuginfodir + src + '.minidebug' | ||
| 409 | bb.utils.mkdirhier(os.path.dirname(minidebugfile)) | ||
| 410 | |||
| 411 | # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either | ||
| 412 | # so skip it. | ||
| 413 | if not os.path.exists(debugfile): | ||
| 414 | bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file)) | ||
| 415 | return | ||
| 416 | |||
| 417 | # minidebuginfo does not make sense to apply to ELF objects other than | ||
| 418 | # executables and shared libraries, skip applying the minidebuginfo | ||
| 419 | # generation for objects like kernel modules. | ||
| 420 | for line in subprocess.check_output([readelf, '-h', debugfile], universal_newlines=True).splitlines(): | ||
| 421 | if not line.strip().startswith("Type:"): | ||
| 422 | continue | ||
| 423 | elftype = line.split(":")[1].strip() | ||
| 424 | if not any(elftype.startswith(i) for i in ["EXEC", "DYN"]): | ||
| 425 | bb.debug(1, 'ELF file {} is not executable/shared, skipping minidebuginfo injection'.format(file)) | ||
| 426 | return | ||
| 427 | break | ||
| 428 | |||
| 429 | # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo. | ||
| 430 | # We will exclude all of these from minidebuginfo to save space. | ||
| 431 | remove_section_names = [] | ||
| 432 | for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines(): | ||
| 433 | # strip the leading " [ 1]" section index to allow splitting on space | ||
| 434 | if ']' not in line: | ||
| 435 | continue | ||
| 436 | fields = line[line.index(']') + 1:].split() | ||
| 437 | if len(fields) < 7: | ||
| 438 | continue | ||
| 439 | name = fields[0] | ||
| 440 | type = fields[1] | ||
| 441 | flags = fields[6] | ||
| 442 | # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them | ||
| 443 | if name.startswith('.debug_'): | ||
| 444 | continue | ||
| 445 | if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']: | ||
| 446 | remove_section_names.append(name) | ||
| 447 | |||
| 448 | # List dynamic symbols in the binary. We can exclude these from minidebuginfo | ||
| 449 | # because they are always present in the binary. | ||
| 450 | dynsyms = set() | ||
| 451 | for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines(): | ||
| 452 | dynsyms.add(line.split()[0]) | ||
| 453 | |||
| 454 | # Find all function symbols from debuginfo which aren't in the dynamic symbols table. | ||
| 455 | # These are the ones we want to keep in minidebuginfo. | ||
| 456 | keep_symbols_file = minidebugfile + '.symlist' | ||
| 457 | found_any_symbols = False | ||
| 458 | with open(keep_symbols_file, 'w') as f: | ||
| 459 | for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines(): | ||
| 460 | fields = line.split('|') | ||
| 461 | if len(fields) < 7: | ||
| 462 | continue | ||
| 463 | name = fields[0].strip() | ||
| 464 | type = fields[3].strip() | ||
| 465 | if type == 'FUNC' and name not in dynsyms: | ||
| 466 | f.write('{}\n'.format(name)) | ||
| 467 | found_any_symbols = True | ||
| 468 | |||
| 469 | if not found_any_symbols: | ||
| 470 | bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file)) | ||
| 471 | return | ||
| 472 | |||
| 473 | bb.utils.remove(minidebugfile) | ||
| 474 | bb.utils.remove(minidebugfile + '.xz') | ||
| 475 | |||
| 476 | subprocess.check_call([objcopy, '-S'] + | ||
| 477 | ['--remove-section={}'.format(s) for s in remove_section_names] + | ||
| 478 | ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile]) | ||
| 479 | |||
| 480 | subprocess.check_call(['xz', '--keep', minidebugfile]) | ||
| 481 | |||
| 482 | subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file]) | ||
| 483 | |||
| 484 | def copydebugsources(debugsrcdir, sources, d): | ||
| 485 | # The debug src information written out to sourcefile is further processed | ||
| 486 | # and copied to the destination here. | ||
| 487 | |||
| 488 | import stat | ||
| 489 | import subprocess | ||
| 490 | |||
| 491 | if debugsrcdir and sources: | ||
| 492 | sourcefile = d.expand("${WORKDIR}/debugsources.list") | ||
| 493 | bb.utils.remove(sourcefile) | ||
| 494 | |||
| 495 | # filenames are null-separated - this is an artefact of the previous use | ||
| 496 | # of rpm's debugedit, which was writing them out that way, and the code elsewhere | ||
| 497 | # is still assuming that. | ||
| 498 | debuglistoutput = '\0'.join(sources) + '\0' | ||
| 499 | with open(sourcefile, 'a') as sf: | ||
| 500 | sf.write(debuglistoutput) | ||
| 501 | |||
| 502 | dvar = d.getVar('PKGD') | ||
| 503 | strip = d.getVar("STRIP") | ||
| 504 | objcopy = d.getVar("OBJCOPY") | ||
| 505 | workdir = d.getVar("WORKDIR") | ||
| 506 | sdir = d.getVar("S") | ||
| 507 | cflags = d.expand("${CFLAGS}") | ||
| 508 | |||
| 509 | prefixmap = {} | ||
| 510 | for flag in cflags.split(): | ||
| 511 | if not flag.startswith("-fdebug-prefix-map"): | ||
| 512 | continue | ||
| 513 | if "recipe-sysroot" in flag: | ||
| 514 | continue | ||
| 515 | flag = flag.split("=") | ||
| 516 | prefixmap[flag[1]] = flag[2] | ||
| 517 | |||
| 518 | nosuchdir = [] | ||
| 519 | basepath = dvar | ||
| 520 | for p in debugsrcdir.split("/"): | ||
| 521 | basepath = basepath + "/" + p | ||
| 522 | if not cpath.exists(basepath): | ||
| 523 | nosuchdir.append(basepath) | ||
| 524 | bb.utils.mkdirhier(basepath) | ||
| 525 | cpath.updatecache(basepath) | ||
| 526 | |||
| 527 | for pmap in prefixmap: | ||
| 528 | # Ignore files from the recipe sysroots (target and native) | ||
| 529 | cmd = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | " % sourcefile | ||
| 530 | # We need to ignore files that are not actually ours | ||
| 531 | # we do this by only paying attention to items from this package | ||
| 532 | cmd += "fgrep -zw '%s' | " % prefixmap[pmap] | ||
| 533 | # Remove prefix in the source paths | ||
| 534 | cmd += "sed 's#%s/##g' | " % (prefixmap[pmap]) | ||
| 535 | cmd += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)" % (pmap, dvar, prefixmap[pmap]) | ||
| 536 | |||
| 537 | try: | ||
| 538 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | ||
| 539 | except subprocess.CalledProcessError: | ||
| 540 | # Can "fail" if internal headers/transient sources are attempted | ||
| 541 | pass | ||
| 542 | # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced. | ||
| 543 | # Work around this by manually finding and copying any symbolic links that made it through. | ||
| 544 | cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \ | ||
| 545 | (dvar, prefixmap[pmap], dvar, prefixmap[pmap], pmap, dvar, prefixmap[pmap]) | ||
| 546 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | ||
| 547 | |||
| 548 | # debugsources.list may be polluted from the host if we used externalsrc, | ||
| 549 | # cpio uses copy-pass and may have just created a directory structure | ||
| 550 | # matching the one from the host, if thats the case move those files to | ||
| 551 | # debugsrcdir to avoid host contamination. | ||
| 552 | # Empty dir structure will be deleted in the next step. | ||
| 553 | |||
| 554 | # Same check as above for externalsrc | ||
| 555 | if workdir not in sdir: | ||
| 556 | if os.path.exists(dvar + debugsrcdir + sdir): | ||
| 557 | cmd = "mv %s%s%s/* %s%s" % (dvar, debugsrcdir, sdir, dvar,debugsrcdir) | ||
| 558 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | ||
| 559 | |||
| 560 | # The copy by cpio may have resulted in some empty directories! Remove these | ||
| 561 | cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir) | ||
| 562 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | ||
| 563 | |||
| 564 | # Also remove debugsrcdir if its empty | ||
| 565 | for p in nosuchdir[::-1]: | ||
| 566 | if os.path.exists(p) and not os.listdir(p): | ||
| 567 | os.rmdir(p) | ||
| 568 | |||
| 569 | # | 255 | # |
| 570 | # Used by do_packagedata (and possibly other routines post do_package) | 256 | # Used by do_packagedata (and possibly other routines post do_package) |
| 571 | # | 257 | # |
| @@ -656,58 +342,7 @@ python package_convert_pr_autoinc() { | |||
| 656 | LOCALEBASEPN ??= "${PN}" | 342 | LOCALEBASEPN ??= "${PN}" |
| 657 | 343 | ||
| 658 | python package_do_split_locales() { | 344 | python package_do_split_locales() { |
| 659 | if (d.getVar('PACKAGE_NO_LOCALE') == '1'): | 345 | oe.package.split_locales(d) |
| 660 | bb.debug(1, "package requested not splitting locales") | ||
| 661 | return | ||
| 662 | |||
| 663 | packages = (d.getVar('PACKAGES') or "").split() | ||
| 664 | |||
| 665 | datadir = d.getVar('datadir') | ||
| 666 | if not datadir: | ||
| 667 | bb.note("datadir not defined") | ||
| 668 | return | ||
| 669 | |||
| 670 | dvar = d.getVar('PKGD') | ||
| 671 | pn = d.getVar('LOCALEBASEPN') | ||
| 672 | |||
| 673 | if pn + '-locale' in packages: | ||
| 674 | packages.remove(pn + '-locale') | ||
| 675 | |||
| 676 | localedir = os.path.join(dvar + datadir, 'locale') | ||
| 677 | |||
| 678 | if not cpath.isdir(localedir): | ||
| 679 | bb.debug(1, "No locale files in this package") | ||
| 680 | return | ||
| 681 | |||
| 682 | locales = os.listdir(localedir) | ||
| 683 | |||
| 684 | summary = d.getVar('SUMMARY') or pn | ||
| 685 | description = d.getVar('DESCRIPTION') or "" | ||
| 686 | locale_section = d.getVar('LOCALE_SECTION') | ||
| 687 | mlprefix = d.getVar('MLPREFIX') or "" | ||
| 688 | for l in sorted(locales): | ||
| 689 | ln = legitimize_package_name(l) | ||
| 690 | pkg = pn + '-locale-' + ln | ||
| 691 | packages.append(pkg) | ||
| 692 | d.setVar('FILES:' + pkg, os.path.join(datadir, 'locale', l)) | ||
| 693 | d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln)) | ||
| 694 | d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln)) | ||
| 695 | d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l)) | ||
| 696 | d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l)) | ||
| 697 | if locale_section: | ||
| 698 | d.setVar('SECTION:' + pkg, locale_section) | ||
| 699 | |||
| 700 | d.setVar('PACKAGES', ' '.join(packages)) | ||
| 701 | |||
| 702 | # Disabled by RP 18/06/07 | ||
| 703 | # Wildcards aren't supported in debian | ||
| 704 | # They break with ipkg since glibc-locale* will mean that | ||
| 705 | # glibc-localedata-translit* won't install as a dependency | ||
| 706 | # for some other package which breaks meta-toolchain | ||
| 707 | # Probably breaks since virtual-locale- isn't provided anywhere | ||
| 708 | #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split() | ||
| 709 | #rdep.append('%s-locale*' % pn) | ||
| 710 | #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep)) | ||
| 711 | } | 346 | } |
| 712 | 347 | ||
| 713 | python perform_packagecopy () { | 348 | python perform_packagecopy () { |
| @@ -734,488 +369,19 @@ python fixup_perms () { | |||
| 734 | oe.package.fixup_perms(d) | 369 | oe.package.fixup_perms(d) |
| 735 | } | 370 | } |
| 736 | 371 | ||
| 737 | def package_debug_vars(d): | ||
| 738 | # We default to '.debug' style | ||
| 739 | if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory': | ||
| 740 | # Single debug-file-directory style debug info | ||
| 741 | debug_vars = { | ||
| 742 | "append": ".debug", | ||
| 743 | "staticappend": "", | ||
| 744 | "dir": "", | ||
| 745 | "staticdir": "", | ||
| 746 | "libdir": "/usr/lib/debug", | ||
| 747 | "staticlibdir": "/usr/lib/debug-static", | ||
| 748 | "srcdir": "/usr/src/debug", | ||
| 749 | } | ||
| 750 | elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src': | ||
| 751 | # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug | ||
| 752 | debug_vars = { | ||
| 753 | "append": "", | ||
| 754 | "staticappend": "", | ||
| 755 | "dir": "/.debug", | ||
| 756 | "staticdir": "/.debug-static", | ||
| 757 | "libdir": "", | ||
| 758 | "staticlibdir": "", | ||
| 759 | "srcdir": "", | ||
| 760 | } | ||
| 761 | elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg': | ||
| 762 | debug_vars = { | ||
| 763 | "append": "", | ||
| 764 | "staticappend": "", | ||
| 765 | "dir": "/.debug", | ||
| 766 | "staticdir": "/.debug-static", | ||
| 767 | "libdir": "", | ||
| 768 | "staticlibdir": "", | ||
| 769 | "srcdir": "/usr/src/debug", | ||
| 770 | } | ||
| 771 | else: | ||
| 772 | # Original OE-core, a.k.a. ".debug", style debug info | ||
| 773 | debug_vars = { | ||
| 774 | "append": "", | ||
| 775 | "staticappend": "", | ||
| 776 | "dir": "/.debug", | ||
| 777 | "staticdir": "/.debug-static", | ||
| 778 | "libdir": "", | ||
| 779 | "staticlibdir": "", | ||
| 780 | "srcdir": "/usr/src/debug", | ||
| 781 | } | ||
| 782 | |||
| 783 | return debug_vars | ||
| 784 | |||
| 785 | python split_and_strip_files () { | 372 | python split_and_strip_files () { |
| 786 | import stat, errno | 373 | oe.package.process_split_and_strip_files(d) |
| 787 | import subprocess | ||
| 788 | |||
| 789 | dvar = d.getVar('PKGD') | ||
| 790 | pn = d.getVar('PN') | ||
| 791 | hostos = d.getVar('HOST_OS') | ||
| 792 | |||
| 793 | oldcwd = os.getcwd() | ||
| 794 | os.chdir(dvar) | ||
| 795 | |||
| 796 | dv = package_debug_vars(d) | ||
| 797 | |||
| 798 | # | ||
| 799 | # First lets figure out all of the files we may have to process ... do this only once! | ||
| 800 | # | ||
| 801 | elffiles = {} | ||
| 802 | symlinks = {} | ||
| 803 | staticlibs = [] | ||
| 804 | inodes = {} | ||
| 805 | libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir")) | ||
| 806 | baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir")) | ||
| 807 | skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split() | ||
| 808 | if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \ | ||
| 809 | d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'): | ||
| 810 | checkelf = {} | ||
| 811 | checkelflinks = {} | ||
| 812 | for root, dirs, files in cpath.walk(dvar): | ||
| 813 | for f in files: | ||
| 814 | file = os.path.join(root, f) | ||
| 815 | |||
| 816 | # Skip debug files | ||
| 817 | if dv["append"] and file.endswith(dv["append"]): | ||
| 818 | continue | ||
| 819 | if dv["dir"] and dv["dir"] in os.path.dirname(file[len(dvar):]): | ||
| 820 | continue | ||
| 821 | |||
| 822 | if file in skipfiles: | ||
| 823 | continue | ||
| 824 | |||
| 825 | if oe.package.is_static_lib(file): | ||
| 826 | staticlibs.append(file) | ||
| 827 | continue | ||
| 828 | |||
| 829 | try: | ||
| 830 | ltarget = cpath.realpath(file, dvar, False) | ||
| 831 | s = cpath.lstat(ltarget) | ||
| 832 | except OSError as e: | ||
| 833 | (err, strerror) = e.args | ||
| 834 | if err != errno.ENOENT: | ||
| 835 | raise | ||
| 836 | # Skip broken symlinks | ||
| 837 | continue | ||
| 838 | if not s: | ||
| 839 | continue | ||
| 840 | # Check its an executable | ||
| 841 | if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \ | ||
| 842 | or (s[stat.ST_MODE] & stat.S_IXOTH) \ | ||
| 843 | or ((file.startswith(libdir) or file.startswith(baselibdir)) \ | ||
| 844 | and (".so" in f or ".node" in f)) \ | ||
| 845 | or (f.startswith('vmlinux') or ".ko" in f): | ||
| 846 | |||
| 847 | if cpath.islink(file): | ||
| 848 | checkelflinks[file] = ltarget | ||
| 849 | continue | ||
| 850 | # Use a reference of device ID and inode number to identify files | ||
| 851 | file_reference = "%d_%d" % (s.st_dev, s.st_ino) | ||
| 852 | checkelf[file] = (file, file_reference) | ||
| 853 | |||
| 854 | results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d) | ||
| 855 | results_map = {} | ||
| 856 | for (ltarget, elf_file) in results: | ||
| 857 | results_map[ltarget] = elf_file | ||
| 858 | for file in checkelflinks: | ||
| 859 | ltarget = checkelflinks[file] | ||
| 860 | # If it's a symlink, and points to an ELF file, we capture the readlink target | ||
| 861 | if results_map[ltarget]: | ||
| 862 | target = os.readlink(file) | ||
| 863 | #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget])) | ||
| 864 | symlinks[file] = target | ||
| 865 | |||
| 866 | results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d) | ||
| 867 | |||
| 868 | # Sort results by file path. This ensures that the files are always | ||
| 869 | # processed in the same order, which is important to make sure builds | ||
| 870 | # are reproducible when dealing with hardlinks | ||
| 871 | results.sort(key=lambda x: x[0]) | ||
| 872 | |||
| 873 | for (file, elf_file) in results: | ||
| 874 | # It's a file (or hardlink), not a link | ||
| 875 | # ...but is it ELF, and is it already stripped? | ||
| 876 | if elf_file & 1: | ||
| 877 | if elf_file & 2: | ||
| 878 | if 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split(): | ||
| 879 | bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn)) | ||
| 880 | else: | ||
| 881 | msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn) | ||
| 882 | oe.qa.handle_error("already-stripped", msg, d) | ||
| 883 | continue | ||
| 884 | |||
| 885 | # At this point we have an unstripped elf file. We need to: | ||
| 886 | # a) Make sure any file we strip is not hardlinked to anything else outside this tree | ||
| 887 | # b) Only strip any hardlinked file once (no races) | ||
| 888 | # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks | ||
| 889 | |||
| 890 | # Use a reference of device ID and inode number to identify files | ||
| 891 | file_reference = checkelf[file][1] | ||
| 892 | if file_reference in inodes: | ||
| 893 | os.unlink(file) | ||
| 894 | os.link(inodes[file_reference][0], file) | ||
| 895 | inodes[file_reference].append(file) | ||
| 896 | else: | ||
| 897 | inodes[file_reference] = [file] | ||
| 898 | # break hardlink | ||
| 899 | bb.utils.break_hardlinks(file) | ||
| 900 | elffiles[file] = elf_file | ||
| 901 | # Modified the file so clear the cache | ||
| 902 | cpath.updatecache(file) | ||
| 903 | |||
| 904 | def strip_pkgd_prefix(f): | ||
| 905 | nonlocal dvar | ||
| 906 | |||
| 907 | if f.startswith(dvar): | ||
| 908 | return f[len(dvar):] | ||
| 909 | |||
| 910 | return f | ||
| 911 | |||
| 912 | # | ||
| 913 | # First lets process debug splitting | ||
| 914 | # | ||
| 915 | if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'): | ||
| 916 | results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, dv, d)) | ||
| 917 | |||
| 918 | if dv["srcdir"] and not hostos.startswith("mingw"): | ||
| 919 | if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'): | ||
| 920 | results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, dv, d)) | ||
| 921 | else: | ||
| 922 | for file in staticlibs: | ||
| 923 | results.append( (file,source_info(file, d)) ) | ||
| 924 | |||
| 925 | d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results}) | ||
| 926 | |||
| 927 | sources = set() | ||
| 928 | for r in results: | ||
| 929 | sources.update(r[1]) | ||
| 930 | |||
| 931 | # Hardlink our debug symbols to the other hardlink copies | ||
| 932 | for ref in inodes: | ||
| 933 | if len(inodes[ref]) == 1: | ||
| 934 | continue | ||
| 935 | |||
| 936 | target = inodes[ref][0][len(dvar):] | ||
| 937 | for file in inodes[ref][1:]: | ||
| 938 | src = file[len(dvar):] | ||
| 939 | dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"] | ||
| 940 | fpath = dvar + dest | ||
| 941 | ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"] | ||
| 942 | bb.utils.mkdirhier(os.path.dirname(fpath)) | ||
| 943 | # Only one hardlink of separated debug info file in each directory | ||
| 944 | if not os.access(fpath, os.R_OK): | ||
| 945 | #bb.note("Link %s -> %s" % (fpath, ftarget)) | ||
| 946 | os.link(ftarget, fpath) | ||
| 947 | |||
| 948 | # Create symlinks for all cases we were able to split symbols | ||
| 949 | for file in symlinks: | ||
| 950 | src = file[len(dvar):] | ||
| 951 | dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"] | ||
| 952 | fpath = dvar + dest | ||
| 953 | # Skip it if the target doesn't exist | ||
| 954 | try: | ||
| 955 | s = os.stat(fpath) | ||
| 956 | except OSError as e: | ||
| 957 | (err, strerror) = e.args | ||
| 958 | if err != errno.ENOENT: | ||
| 959 | raise | ||
| 960 | continue | ||
| 961 | |||
| 962 | ltarget = symlinks[file] | ||
| 963 | lpath = os.path.dirname(ltarget) | ||
| 964 | lbase = os.path.basename(ltarget) | ||
| 965 | ftarget = "" | ||
| 966 | if lpath and lpath != ".": | ||
| 967 | ftarget += lpath + dv["dir"] + "/" | ||
| 968 | ftarget += lbase + dv["append"] | ||
| 969 | if lpath.startswith(".."): | ||
| 970 | ftarget = os.path.join("..", ftarget) | ||
| 971 | bb.utils.mkdirhier(os.path.dirname(fpath)) | ||
| 972 | #bb.note("Symlink %s -> %s" % (fpath, ftarget)) | ||
| 973 | os.symlink(ftarget, fpath) | ||
| 974 | |||
| 975 | # Process the dv["srcdir"] if requested... | ||
| 976 | # This copies and places the referenced sources for later debugging... | ||
| 977 | copydebugsources(dv["srcdir"], sources, d) | ||
| 978 | # | ||
| 979 | # End of debug splitting | ||
| 980 | # | ||
| 981 | |||
| 982 | # | ||
| 983 | # Now lets go back over things and strip them | ||
| 984 | # | ||
| 985 | if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'): | ||
| 986 | strip = d.getVar("STRIP") | ||
| 987 | sfiles = [] | ||
| 988 | for file in elffiles: | ||
| 989 | elf_file = int(elffiles[file]) | ||
| 990 | #bb.note("Strip %s" % file) | ||
| 991 | sfiles.append((file, elf_file, strip)) | ||
| 992 | if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'): | ||
| 993 | for f in staticlibs: | ||
| 994 | sfiles.append((f, 16, strip)) | ||
| 995 | |||
| 996 | oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d) | ||
| 997 | |||
| 998 | # Build "minidebuginfo" and reinject it back into the stripped binaries | ||
| 999 | if d.getVar('PACKAGE_MINIDEBUGINFO') == '1': | ||
| 1000 | oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d, | ||
| 1001 | extraargs=(dvar, dv, d)) | ||
| 1002 | |||
| 1003 | # | ||
| 1004 | # End of strip | ||
| 1005 | # | ||
| 1006 | os.chdir(oldcwd) | ||
| 1007 | } | 374 | } |
| 1008 | 375 | ||
| 1009 | python populate_packages () { | 376 | python populate_packages () { |
| 1010 | import glob, re | 377 | oe.package.populate_packages(d) |
| 1011 | |||
| 1012 | workdir = d.getVar('WORKDIR') | ||
| 1013 | outdir = d.getVar('DEPLOY_DIR') | ||
| 1014 | dvar = d.getVar('PKGD') | ||
| 1015 | packages = d.getVar('PACKAGES').split() | ||
| 1016 | pn = d.getVar('PN') | ||
| 1017 | |||
| 1018 | bb.utils.mkdirhier(outdir) | ||
| 1019 | os.chdir(dvar) | ||
| 1020 | |||
| 1021 | autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False) | ||
| 1022 | |||
| 1023 | split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg') | ||
| 1024 | |||
| 1025 | # If debug-with-srcpkg mode is enabled then add the source package if it | ||
| 1026 | # doesn't exist and add the source file contents to the source package. | ||
| 1027 | if split_source_package: | ||
| 1028 | src_package_name = ('%s-src' % d.getVar('PN')) | ||
| 1029 | if not src_package_name in packages: | ||
| 1030 | packages.append(src_package_name) | ||
| 1031 | d.setVar('FILES:%s' % src_package_name, '/usr/src/debug') | ||
| 1032 | |||
| 1033 | # Sanity check PACKAGES for duplicates | ||
| 1034 | # Sanity should be moved to sanity.bbclass once we have the infrastructure | ||
| 1035 | package_dict = {} | ||
| 1036 | |||
| 1037 | for i, pkg in enumerate(packages): | ||
| 1038 | if pkg in package_dict: | ||
| 1039 | msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg | ||
| 1040 | oe.qa.handle_error("packages-list", msg, d) | ||
| 1041 | # Ensure the source package gets the chance to pick up the source files | ||
| 1042 | # before the debug package by ordering it first in PACKAGES. Whether it | ||
| 1043 | # actually picks up any source files is controlled by | ||
| 1044 | # PACKAGE_DEBUG_SPLIT_STYLE. | ||
| 1045 | elif pkg.endswith("-src"): | ||
| 1046 | package_dict[pkg] = (10, i) | ||
| 1047 | elif autodebug and pkg.endswith("-dbg"): | ||
| 1048 | package_dict[pkg] = (30, i) | ||
| 1049 | else: | ||
| 1050 | package_dict[pkg] = (50, i) | ||
| 1051 | packages = sorted(package_dict.keys(), key=package_dict.get) | ||
| 1052 | d.setVar('PACKAGES', ' '.join(packages)) | ||
| 1053 | pkgdest = d.getVar('PKGDEST') | ||
| 1054 | |||
| 1055 | seen = [] | ||
| 1056 | |||
| 1057 | # os.mkdir masks the permissions with umask so we have to unset it first | ||
| 1058 | oldumask = os.umask(0) | ||
| 1059 | |||
| 1060 | debug = [] | ||
| 1061 | for root, dirs, files in cpath.walk(dvar): | ||
| 1062 | dir = root[len(dvar):] | ||
| 1063 | if not dir: | ||
| 1064 | dir = os.sep | ||
| 1065 | for f in (files + dirs): | ||
| 1066 | path = "." + os.path.join(dir, f) | ||
| 1067 | if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"): | ||
| 1068 | debug.append(path) | ||
| 1069 | |||
| 1070 | for pkg in packages: | ||
| 1071 | root = os.path.join(pkgdest, pkg) | ||
| 1072 | bb.utils.mkdirhier(root) | ||
| 1073 | |||
| 1074 | filesvar = d.getVar('FILES:%s' % pkg) or "" | ||
| 1075 | if "//" in filesvar: | ||
| 1076 | msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg | ||
| 1077 | oe.qa.handle_error("files-invalid", msg, d) | ||
| 1078 | filesvar.replace("//", "/") | ||
| 1079 | |||
| 1080 | origfiles = filesvar.split() | ||
| 1081 | files, symlink_paths = oe.package.files_from_filevars(origfiles) | ||
| 1082 | |||
| 1083 | if autodebug and pkg.endswith("-dbg"): | ||
| 1084 | files.extend(debug) | ||
| 1085 | |||
| 1086 | for file in files: | ||
| 1087 | if (not cpath.islink(file)) and (not cpath.exists(file)): | ||
| 1088 | continue | ||
| 1089 | if file in seen: | ||
| 1090 | continue | ||
| 1091 | seen.append(file) | ||
| 1092 | |||
| 1093 | def mkdir(src, dest, p): | ||
| 1094 | src = os.path.join(src, p) | ||
| 1095 | dest = os.path.join(dest, p) | ||
| 1096 | fstat = cpath.stat(src) | ||
| 1097 | os.mkdir(dest) | ||
| 1098 | os.chmod(dest, fstat.st_mode) | ||
| 1099 | os.chown(dest, fstat.st_uid, fstat.st_gid) | ||
| 1100 | if p not in seen: | ||
| 1101 | seen.append(p) | ||
| 1102 | cpath.updatecache(dest) | ||
| 1103 | |||
| 1104 | def mkdir_recurse(src, dest, paths): | ||
| 1105 | if cpath.exists(dest + '/' + paths): | ||
| 1106 | return | ||
| 1107 | while paths.startswith("./"): | ||
| 1108 | paths = paths[2:] | ||
| 1109 | p = "." | ||
| 1110 | for c in paths.split("/"): | ||
| 1111 | p = os.path.join(p, c) | ||
| 1112 | if not cpath.exists(os.path.join(dest, p)): | ||
| 1113 | mkdir(src, dest, p) | ||
| 1114 | |||
| 1115 | if cpath.isdir(file) and not cpath.islink(file): | ||
| 1116 | mkdir_recurse(dvar, root, file) | ||
| 1117 | continue | ||
| 1118 | |||
| 1119 | mkdir_recurse(dvar, root, os.path.dirname(file)) | ||
| 1120 | fpath = os.path.join(root,file) | ||
| 1121 | if not cpath.islink(file): | ||
| 1122 | os.link(file, fpath) | ||
| 1123 | continue | ||
| 1124 | ret = bb.utils.copyfile(file, fpath) | ||
| 1125 | if ret is False or ret == 0: | ||
| 1126 | bb.fatal("File population failed") | ||
| 1127 | |||
| 1128 | # Check if symlink paths exist | ||
| 1129 | for file in symlink_paths: | ||
| 1130 | if not os.path.exists(os.path.join(root,file)): | ||
| 1131 | bb.fatal("File '%s' cannot be packaged into '%s' because its " | ||
| 1132 | "parent directory structure does not exist. One of " | ||
| 1133 | "its parent directories is a symlink whose target " | ||
| 1134 | "directory is not included in the package." % | ||
| 1135 | (file, pkg)) | ||
| 1136 | |||
| 1137 | os.umask(oldumask) | ||
| 1138 | os.chdir(workdir) | ||
| 1139 | |||
| 1140 | # Handle excluding packages with incompatible licenses | ||
| 1141 | package_list = [] | ||
| 1142 | for pkg in packages: | ||
| 1143 | licenses = d.getVar('_exclude_incompatible-' + pkg) | ||
| 1144 | if licenses: | ||
| 1145 | msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses) | ||
| 1146 | oe.qa.handle_error("incompatible-license", msg, d) | ||
| 1147 | else: | ||
| 1148 | package_list.append(pkg) | ||
| 1149 | d.setVar('PACKAGES', ' '.join(package_list)) | ||
| 1150 | |||
| 1151 | unshipped = [] | ||
| 1152 | for root, dirs, files in cpath.walk(dvar): | ||
| 1153 | dir = root[len(dvar):] | ||
| 1154 | if not dir: | ||
| 1155 | dir = os.sep | ||
| 1156 | for f in (files + dirs): | ||
| 1157 | path = os.path.join(dir, f) | ||
| 1158 | if ('.' + path) not in seen: | ||
| 1159 | unshipped.append(path) | ||
| 1160 | |||
| 1161 | if unshipped != []: | ||
| 1162 | msg = pn + ": Files/directories were installed but not shipped in any package:" | ||
| 1163 | if "installed-vs-shipped" in (d.getVar('INSANE_SKIP:' + pn) or "").split(): | ||
| 1164 | bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn) | ||
| 1165 | else: | ||
| 1166 | for f in unshipped: | ||
| 1167 | msg = msg + "\n " + f | ||
| 1168 | msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n" | ||
| 1169 | msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped)) | ||
| 1170 | oe.qa.handle_error("installed-vs-shipped", msg, d) | ||
| 1171 | } | 378 | } |
| 1172 | populate_packages[dirs] = "${D}" | 379 | populate_packages[dirs] = "${D}" |
| 1173 | 380 | ||
| 1174 | python package_fixsymlinks () { | 381 | python package_fixsymlinks () { |
| 1175 | import errno | 382 | oe.package.process_fixsymlinks(pkgfiles, d) |
| 1176 | pkgdest = d.getVar('PKGDEST') | ||
| 1177 | packages = d.getVar("PACKAGES", False).split() | ||
| 1178 | |||
| 1179 | dangling_links = {} | ||
| 1180 | pkg_files = {} | ||
| 1181 | for pkg in packages: | ||
| 1182 | dangling_links[pkg] = [] | ||
| 1183 | pkg_files[pkg] = [] | ||
| 1184 | inst_root = os.path.join(pkgdest, pkg) | ||
| 1185 | for path in pkgfiles[pkg]: | ||
| 1186 | rpath = path[len(inst_root):] | ||
| 1187 | pkg_files[pkg].append(rpath) | ||
| 1188 | rtarget = cpath.realpath(path, inst_root, True, assume_dir = True) | ||
| 1189 | if not cpath.lexists(rtarget): | ||
| 1190 | dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):])) | ||
| 1191 | |||
| 1192 | newrdepends = {} | ||
| 1193 | for pkg in dangling_links: | ||
| 1194 | for l in dangling_links[pkg]: | ||
| 1195 | found = False | ||
| 1196 | bb.debug(1, "%s contains dangling link %s" % (pkg, l)) | ||
| 1197 | for p in packages: | ||
| 1198 | if l in pkg_files[p]: | ||
| 1199 | found = True | ||
| 1200 | bb.debug(1, "target found in %s" % p) | ||
| 1201 | if p == pkg: | ||
| 1202 | break | ||
| 1203 | if pkg not in newrdepends: | ||
| 1204 | newrdepends[pkg] = [] | ||
| 1205 | newrdepends[pkg].append(p) | ||
| 1206 | break | ||
| 1207 | if found == False: | ||
| 1208 | bb.note("%s contains dangling symlink to %s" % (pkg, l)) | ||
| 1209 | |||
| 1210 | for pkg in newrdepends: | ||
| 1211 | rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "") | ||
| 1212 | for p in newrdepends[pkg]: | ||
| 1213 | if p not in rdepends: | ||
| 1214 | rdepends[p] = [] | ||
| 1215 | d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False)) | ||
| 1216 | } | 383 | } |
| 1217 | 384 | ||
| 1218 | |||
| 1219 | python package_package_name_hook() { | 385 | python package_package_name_hook() { |
| 1220 | """ | 386 | """ |
| 1221 | A package_name_hook function can be used to rewrite the package names by | 387 | A package_name_hook function can be used to rewrite the package names by |
| @@ -1245,456 +411,23 @@ fi | |||
| 1245 | 411 | ||
| 1246 | RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/rpmdeps --alldeps --define '__font_provides %{nil}'" | 412 | RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/rpmdeps --alldeps --define '__font_provides %{nil}'" |
| 1247 | 413 | ||
| 1248 | # Collect perfile run-time dependency metadata | ||
| 1249 | # Output: | ||
| 1250 | # FILERPROVIDESFLIST:pkg - list of all files w/ deps | ||
| 1251 | # FILERPROVIDES:filepath:pkg - per file dep | ||
| 1252 | # | ||
| 1253 | # FILERDEPENDSFLIST:pkg - list of all files w/ deps | ||
| 1254 | # FILERDEPENDS:filepath:pkg - per file dep | ||
| 1255 | |||
| 1256 | python package_do_filedeps() { | 414 | python package_do_filedeps() { |
| 1257 | if d.getVar('SKIP_FILEDEPS') == '1': | 415 | oe.package.process_filedeps(pkgfiles, d) |
| 1258 | return | ||
| 1259 | |||
| 1260 | pkgdest = d.getVar('PKGDEST') | ||
| 1261 | packages = d.getVar('PACKAGES') | ||
| 1262 | rpmdeps = d.getVar('RPMDEPS') | ||
| 1263 | |||
| 1264 | def chunks(files, n): | ||
| 1265 | return [files[i:i+n] for i in range(0, len(files), n)] | ||
| 1266 | |||
| 1267 | pkglist = [] | ||
| 1268 | for pkg in packages.split(): | ||
| 1269 | if d.getVar('SKIP_FILEDEPS:' + pkg) == '1': | ||
| 1270 | continue | ||
| 1271 | if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'): | ||
| 1272 | continue | ||
| 1273 | for files in chunks(pkgfiles[pkg], 100): | ||
| 1274 | pkglist.append((pkg, files, rpmdeps, pkgdest)) | ||
| 1275 | |||
| 1276 | processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d) | ||
| 1277 | |||
| 1278 | provides_files = {} | ||
| 1279 | requires_files = {} | ||
| 1280 | |||
| 1281 | for result in processed: | ||
| 1282 | (pkg, provides, requires) = result | ||
| 1283 | |||
| 1284 | if pkg not in provides_files: | ||
| 1285 | provides_files[pkg] = [] | ||
| 1286 | if pkg not in requires_files: | ||
| 1287 | requires_files[pkg] = [] | ||
| 1288 | |||
| 1289 | for file in sorted(provides): | ||
| 1290 | provides_files[pkg].append(file) | ||
| 1291 | key = "FILERPROVIDES:" + file + ":" + pkg | ||
| 1292 | d.appendVar(key, " " + " ".join(provides[file])) | ||
| 1293 | |||
| 1294 | for file in sorted(requires): | ||
| 1295 | requires_files[pkg].append(file) | ||
| 1296 | key = "FILERDEPENDS:" + file + ":" + pkg | ||
| 1297 | d.appendVar(key, " " + " ".join(requires[file])) | ||
| 1298 | |||
| 1299 | for pkg in requires_files: | ||
| 1300 | d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg]))) | ||
| 1301 | for pkg in provides_files: | ||
| 1302 | d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg]))) | ||
| 1303 | } | 416 | } |
| 1304 | 417 | ||
| 1305 | SHLIBSDIRS = "${WORKDIR_PKGDATA}/${MLPREFIX}shlibs2" | 418 | SHLIBSDIRS = "${WORKDIR_PKGDATA}/${MLPREFIX}shlibs2" |
| 1306 | SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs2" | 419 | SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs2" |
| 1307 | 420 | ||
| 1308 | python package_do_shlibs() { | 421 | python package_do_shlibs() { |
| 1309 | import itertools | 422 | oe.package.process_shlibs(pkgfiles, d) |
| 1310 | import re, pipes | ||
| 1311 | import subprocess | ||
| 1312 | |||
| 1313 | exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False) | ||
| 1314 | if exclude_shlibs: | ||
| 1315 | bb.note("not generating shlibs") | ||
| 1316 | return | ||
| 1317 | |||
| 1318 | lib_re = re.compile(r"^.*\.so") | ||
| 1319 | libdir_re = re.compile(r".*/%s$" % d.getVar('baselib')) | ||
| 1320 | |||
| 1321 | packages = d.getVar('PACKAGES') | ||
| 1322 | |||
| 1323 | shlib_pkgs = [] | ||
| 1324 | exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS") | ||
| 1325 | if exclusion_list: | ||
| 1326 | for pkg in packages.split(): | ||
| 1327 | if pkg not in exclusion_list.split(): | ||
| 1328 | shlib_pkgs.append(pkg) | ||
| 1329 | else: | ||
| 1330 | bb.note("not generating shlibs for %s" % pkg) | ||
| 1331 | else: | ||
| 1332 | shlib_pkgs = packages.split() | ||
| 1333 | |||
| 1334 | hostos = d.getVar('HOST_OS') | ||
| 1335 | |||
| 1336 | workdir = d.getVar('WORKDIR') | ||
| 1337 | |||
| 1338 | ver = d.getVar('PKGV') | ||
| 1339 | if not ver: | ||
| 1340 | msg = "PKGV not defined" | ||
| 1341 | oe.qa.handle_error("pkgv-undefined", msg, d) | ||
| 1342 | return | ||
| 1343 | |||
| 1344 | pkgdest = d.getVar('PKGDEST') | ||
| 1345 | |||
| 1346 | shlibswork_dir = d.getVar('SHLIBSWORKDIR') | ||
| 1347 | |||
| 1348 | def linux_so(file, pkg, pkgver, d): | ||
| 1349 | needs_ldconfig = False | ||
| 1350 | needed = set() | ||
| 1351 | sonames = set() | ||
| 1352 | renames = [] | ||
| 1353 | ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') | ||
| 1354 | cmd = d.getVar('OBJDUMP') + " -p " + pipes.quote(file) + " 2>/dev/null" | ||
| 1355 | fd = os.popen(cmd) | ||
| 1356 | lines = fd.readlines() | ||
| 1357 | fd.close() | ||
| 1358 | rpath = tuple() | ||
| 1359 | for l in lines: | ||
| 1360 | m = re.match(r"\s+RPATH\s+([^\s]*)", l) | ||
| 1361 | if m: | ||
| 1362 | rpaths = m.group(1).replace("$ORIGIN", ldir).split(":") | ||
| 1363 | rpath = tuple(map(os.path.normpath, rpaths)) | ||
| 1364 | for l in lines: | ||
| 1365 | m = re.match(r"\s+NEEDED\s+([^\s]*)", l) | ||
| 1366 | if m: | ||
| 1367 | dep = m.group(1) | ||
| 1368 | if dep not in needed: | ||
| 1369 | needed.add((dep, file, rpath)) | ||
| 1370 | m = re.match(r"\s+SONAME\s+([^\s]*)", l) | ||
| 1371 | if m: | ||
| 1372 | this_soname = m.group(1) | ||
| 1373 | prov = (this_soname, ldir, pkgver) | ||
| 1374 | if not prov in sonames: | ||
| 1375 | # if library is private (only used by package) then do not build shlib for it | ||
| 1376 | import fnmatch | ||
| 1377 | if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0: | ||
| 1378 | sonames.add(prov) | ||
| 1379 | if libdir_re.match(os.path.dirname(file)): | ||
| 1380 | needs_ldconfig = True | ||
| 1381 | if needs_ldconfig and snap_symlinks and (os.path.basename(file) != this_soname): | ||
| 1382 | renames.append((file, os.path.join(os.path.dirname(file), this_soname))) | ||
| 1383 | return (needs_ldconfig, needed, sonames, renames) | ||
| 1384 | |||
| 1385 | def darwin_so(file, needed, sonames, renames, pkgver): | ||
| 1386 | if not os.path.exists(file): | ||
| 1387 | return | ||
| 1388 | ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') | ||
| 1389 | |||
| 1390 | def get_combinations(base): | ||
| 1391 | # | ||
| 1392 | # Given a base library name, find all combinations of this split by "." and "-" | ||
| 1393 | # | ||
| 1394 | combos = [] | ||
| 1395 | options = base.split(".") | ||
| 1396 | for i in range(1, len(options) + 1): | ||
| 1397 | combos.append(".".join(options[0:i])) | ||
| 1398 | options = base.split("-") | ||
| 1399 | for i in range(1, len(options) + 1): | ||
| 1400 | combos.append("-".join(options[0:i])) | ||
| 1401 | return combos | ||
| 1402 | |||
| 1403 | if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'): | ||
| 1404 | # Drop suffix | ||
| 1405 | name = os.path.basename(file).rsplit(".",1)[0] | ||
| 1406 | # Find all combinations | ||
| 1407 | combos = get_combinations(name) | ||
| 1408 | for combo in combos: | ||
| 1409 | if not combo in sonames: | ||
| 1410 | prov = (combo, ldir, pkgver) | ||
| 1411 | sonames.add(prov) | ||
| 1412 | if file.endswith('.dylib') or file.endswith('.so'): | ||
| 1413 | rpath = [] | ||
| 1414 | p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) | ||
| 1415 | out, err = p.communicate() | ||
| 1416 | # If returned successfully, process stdout for results | ||
| 1417 | if p.returncode == 0: | ||
| 1418 | for l in out.split("\n"): | ||
| 1419 | l = l.strip() | ||
| 1420 | if l.startswith('path '): | ||
| 1421 | rpath.append(l.split()[1]) | ||
| 1422 | |||
| 1423 | p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) | ||
| 1424 | out, err = p.communicate() | ||
| 1425 | # If returned successfully, process stdout for results | ||
| 1426 | if p.returncode == 0: | ||
| 1427 | for l in out.split("\n"): | ||
| 1428 | l = l.strip() | ||
| 1429 | if not l or l.endswith(":"): | ||
| 1430 | continue | ||
| 1431 | if "is not an object file" in l: | ||
| 1432 | continue | ||
| 1433 | name = os.path.basename(l.split()[0]).rsplit(".", 1)[0] | ||
| 1434 | if name and name not in needed[pkg]: | ||
| 1435 | needed[pkg].add((name, file, tuple())) | ||
| 1436 | |||
| 1437 | def mingw_dll(file, needed, sonames, renames, pkgver): | ||
| 1438 | if not os.path.exists(file): | ||
| 1439 | return | ||
| 1440 | |||
| 1441 | if file.endswith(".dll"): | ||
| 1442 | # assume all dlls are shared objects provided by the package | ||
| 1443 | sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver)) | ||
| 1444 | |||
| 1445 | if (file.endswith(".dll") or file.endswith(".exe")): | ||
| 1446 | # use objdump to search for "DLL Name: .*\.dll" | ||
| 1447 | p = subprocess.Popen([d.expand("${HOST_PREFIX}objdump"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) | ||
| 1448 | out, err = p.communicate() | ||
| 1449 | # process the output, grabbing all .dll names | ||
| 1450 | if p.returncode == 0: | ||
| 1451 | for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE): | ||
| 1452 | dllname = m.group(1) | ||
| 1453 | if dllname: | ||
| 1454 | needed[pkg].add((dllname, file, tuple())) | ||
| 1455 | |||
| 1456 | if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1": | ||
| 1457 | snap_symlinks = True | ||
| 1458 | else: | ||
| 1459 | snap_symlinks = False | ||
| 1460 | |||
| 1461 | needed = {} | ||
| 1462 | |||
| 1463 | shlib_provider = oe.package.read_shlib_providers(d) | ||
| 1464 | |||
| 1465 | for pkg in shlib_pkgs: | ||
| 1466 | private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or "" | ||
| 1467 | private_libs = private_libs.split() | ||
| 1468 | needs_ldconfig = False | ||
| 1469 | bb.debug(2, "calculating shlib provides for %s" % pkg) | ||
| 1470 | |||
| 1471 | pkgver = d.getVar('PKGV:' + pkg) | ||
| 1472 | if not pkgver: | ||
| 1473 | pkgver = d.getVar('PV_' + pkg) | ||
| 1474 | if not pkgver: | ||
| 1475 | pkgver = ver | ||
| 1476 | |||
| 1477 | needed[pkg] = set() | ||
| 1478 | sonames = set() | ||
| 1479 | renames = [] | ||
| 1480 | linuxlist = [] | ||
| 1481 | for file in pkgfiles[pkg]: | ||
| 1482 | soname = None | ||
| 1483 | if cpath.islink(file): | ||
| 1484 | continue | ||
| 1485 | if hostos == "darwin" or hostos == "darwin8": | ||
| 1486 | darwin_so(file, needed, sonames, renames, pkgver) | ||
| 1487 | elif hostos.startswith("mingw"): | ||
| 1488 | mingw_dll(file, needed, sonames, renames, pkgver) | ||
| 1489 | elif os.access(file, os.X_OK) or lib_re.match(file): | ||
| 1490 | linuxlist.append(file) | ||
| 1491 | |||
| 1492 | if linuxlist: | ||
| 1493 | results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d)) | ||
| 1494 | for r in results: | ||
| 1495 | ldconfig = r[0] | ||
| 1496 | needed[pkg] |= r[1] | ||
| 1497 | sonames |= r[2] | ||
| 1498 | renames.extend(r[3]) | ||
| 1499 | needs_ldconfig = needs_ldconfig or ldconfig | ||
| 1500 | |||
| 1501 | for (old, new) in renames: | ||
| 1502 | bb.note("Renaming %s to %s" % (old, new)) | ||
| 1503 | bb.utils.rename(old, new) | ||
| 1504 | pkgfiles[pkg].remove(old) | ||
| 1505 | |||
| 1506 | shlibs_file = os.path.join(shlibswork_dir, pkg + ".list") | ||
| 1507 | if len(sonames): | ||
| 1508 | with open(shlibs_file, 'w') as fd: | ||
| 1509 | for s in sorted(sonames): | ||
| 1510 | if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]: | ||
| 1511 | (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]] | ||
| 1512 | if old_pkg != pkg: | ||
| 1513 | bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver)) | ||
| 1514 | bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0])) | ||
| 1515 | fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n') | ||
| 1516 | if s[0] not in shlib_provider: | ||
| 1517 | shlib_provider[s[0]] = {} | ||
| 1518 | shlib_provider[s[0]][s[1]] = (pkg, pkgver) | ||
| 1519 | if needs_ldconfig: | ||
| 1520 | bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg) | ||
| 1521 | postinst = d.getVar('pkg_postinst:%s' % pkg) | ||
| 1522 | if not postinst: | ||
| 1523 | postinst = '#!/bin/sh\n' | ||
| 1524 | postinst += d.getVar('ldconfig_postinst_fragment') | ||
| 1525 | d.setVar('pkg_postinst:%s' % pkg, postinst) | ||
| 1526 | bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames)) | ||
| 1527 | |||
| 1528 | assumed_libs = d.getVar('ASSUME_SHLIBS') | ||
| 1529 | if assumed_libs: | ||
| 1530 | libdir = d.getVar("libdir") | ||
| 1531 | for e in assumed_libs.split(): | ||
| 1532 | l, dep_pkg = e.split(":") | ||
| 1533 | lib_ver = None | ||
| 1534 | dep_pkg = dep_pkg.rsplit("_", 1) | ||
| 1535 | if len(dep_pkg) == 2: | ||
| 1536 | lib_ver = dep_pkg[1] | ||
| 1537 | dep_pkg = dep_pkg[0] | ||
| 1538 | if l not in shlib_provider: | ||
| 1539 | shlib_provider[l] = {} | ||
| 1540 | shlib_provider[l][libdir] = (dep_pkg, lib_ver) | ||
| 1541 | |||
| 1542 | libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')] | ||
| 1543 | |||
| 1544 | for pkg in shlib_pkgs: | ||
| 1545 | bb.debug(2, "calculating shlib requirements for %s" % pkg) | ||
| 1546 | |||
| 1547 | private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or "" | ||
| 1548 | private_libs = private_libs.split() | ||
| 1549 | |||
| 1550 | deps = list() | ||
| 1551 | for n in needed[pkg]: | ||
| 1552 | # if n is in private libraries, don't try to search provider for it | ||
| 1553 | # this could cause problem in case some abc.bb provides private | ||
| 1554 | # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1 | ||
| 1555 | # but skipping it is still better alternative than providing own | ||
| 1556 | # version and then adding runtime dependency for the same system library | ||
| 1557 | import fnmatch | ||
| 1558 | if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0: | ||
| 1559 | bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0])) | ||
| 1560 | continue | ||
| 1561 | if n[0] in shlib_provider.keys(): | ||
| 1562 | shlib_provider_map = shlib_provider[n[0]] | ||
| 1563 | matches = set() | ||
| 1564 | for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath): | ||
| 1565 | if p in shlib_provider_map: | ||
| 1566 | matches.add(p) | ||
| 1567 | if len(matches) > 1: | ||
| 1568 | matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches]) | ||
| 1569 | bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1])) | ||
| 1570 | elif len(matches) == 1: | ||
| 1571 | (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()] | ||
| 1572 | |||
| 1573 | bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1])) | ||
| 1574 | |||
| 1575 | if dep_pkg == pkg: | ||
| 1576 | continue | ||
| 1577 | |||
| 1578 | if ver_needed: | ||
| 1579 | dep = "%s (>= %s)" % (dep_pkg, ver_needed) | ||
| 1580 | else: | ||
| 1581 | dep = dep_pkg | ||
| 1582 | if not dep in deps: | ||
| 1583 | deps.append(dep) | ||
| 1584 | continue | ||
| 1585 | bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1])) | ||
| 1586 | |||
| 1587 | deps_file = os.path.join(pkgdest, pkg + ".shlibdeps") | ||
| 1588 | if os.path.exists(deps_file): | ||
| 1589 | os.remove(deps_file) | ||
| 1590 | if deps: | ||
| 1591 | with open(deps_file, 'w') as fd: | ||
| 1592 | for dep in sorted(deps): | ||
| 1593 | fd.write(dep + '\n') | ||
| 1594 | } | 423 | } |
| 1595 | 424 | ||
| 1596 | python package_do_pkgconfig () { | 425 | python package_do_pkgconfig () { |
| 1597 | import re | 426 | oe.package.process_pkgconfig(pkgfiles, d) |
| 1598 | |||
| 1599 | packages = d.getVar('PACKAGES') | ||
| 1600 | workdir = d.getVar('WORKDIR') | ||
| 1601 | pkgdest = d.getVar('PKGDEST') | ||
| 1602 | |||
| 1603 | shlibs_dirs = d.getVar('SHLIBSDIRS').split() | ||
| 1604 | shlibswork_dir = d.getVar('SHLIBSWORKDIR') | ||
| 1605 | |||
| 1606 | pc_re = re.compile(r'(.*)\.pc$') | ||
| 1607 | var_re = re.compile(r'(.*)=(.*)') | ||
| 1608 | field_re = re.compile(r'(.*): (.*)') | ||
| 1609 | |||
| 1610 | pkgconfig_provided = {} | ||
| 1611 | pkgconfig_needed = {} | ||
| 1612 | for pkg in packages.split(): | ||
| 1613 | pkgconfig_provided[pkg] = [] | ||
| 1614 | pkgconfig_needed[pkg] = [] | ||
| 1615 | for file in sorted(pkgfiles[pkg]): | ||
| 1616 | m = pc_re.match(file) | ||
| 1617 | if m: | ||
| 1618 | pd = bb.data.init() | ||
| 1619 | name = m.group(1) | ||
| 1620 | pkgconfig_provided[pkg].append(os.path.basename(name)) | ||
| 1621 | if not os.access(file, os.R_OK): | ||
| 1622 | continue | ||
| 1623 | with open(file, 'r') as f: | ||
| 1624 | lines = f.readlines() | ||
| 1625 | for l in lines: | ||
| 1626 | m = var_re.match(l) | ||
| 1627 | if m: | ||
| 1628 | name = m.group(1) | ||
| 1629 | val = m.group(2) | ||
| 1630 | pd.setVar(name, pd.expand(val)) | ||
| 1631 | continue | ||
| 1632 | m = field_re.match(l) | ||
| 1633 | if m: | ||
| 1634 | hdr = m.group(1) | ||
| 1635 | exp = pd.expand(m.group(2)) | ||
| 1636 | if hdr == 'Requires': | ||
| 1637 | pkgconfig_needed[pkg] += exp.replace(',', ' ').split() | ||
| 1638 | |||
| 1639 | for pkg in packages.split(): | ||
| 1640 | pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist") | ||
| 1641 | if pkgconfig_provided[pkg] != []: | ||
| 1642 | with open(pkgs_file, 'w') as f: | ||
| 1643 | for p in sorted(pkgconfig_provided[pkg]): | ||
| 1644 | f.write('%s\n' % p) | ||
| 1645 | |||
| 1646 | # Go from least to most specific since the last one found wins | ||
| 1647 | for dir in reversed(shlibs_dirs): | ||
| 1648 | if not os.path.exists(dir): | ||
| 1649 | continue | ||
| 1650 | for file in sorted(os.listdir(dir)): | ||
| 1651 | m = re.match(r'^(.*)\.pclist$', file) | ||
| 1652 | if m: | ||
| 1653 | pkg = m.group(1) | ||
| 1654 | with open(os.path.join(dir, file)) as fd: | ||
| 1655 | lines = fd.readlines() | ||
| 1656 | pkgconfig_provided[pkg] = [] | ||
| 1657 | for l in lines: | ||
| 1658 | pkgconfig_provided[pkg].append(l.rstrip()) | ||
| 1659 | |||
| 1660 | for pkg in packages.split(): | ||
| 1661 | deps = [] | ||
| 1662 | for n in pkgconfig_needed[pkg]: | ||
| 1663 | found = False | ||
| 1664 | for k in pkgconfig_provided.keys(): | ||
| 1665 | if n in pkgconfig_provided[k]: | ||
| 1666 | if k != pkg and not (k in deps): | ||
| 1667 | deps.append(k) | ||
| 1668 | found = True | ||
| 1669 | if found == False: | ||
| 1670 | bb.note("couldn't find pkgconfig module '%s' in any package" % n) | ||
| 1671 | deps_file = os.path.join(pkgdest, pkg + ".pcdeps") | ||
| 1672 | if len(deps): | ||
| 1673 | with open(deps_file, 'w') as fd: | ||
| 1674 | for dep in deps: | ||
| 1675 | fd.write(dep + '\n') | ||
| 1676 | } | 427 | } |
| 1677 | 428 | ||
| 1678 | def read_libdep_files(d): | ||
| 1679 | pkglibdeps = {} | ||
| 1680 | packages = d.getVar('PACKAGES').split() | ||
| 1681 | for pkg in packages: | ||
| 1682 | pkglibdeps[pkg] = {} | ||
| 1683 | for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": | ||
| 1684 | depsfile = d.expand("${PKGDEST}/" + pkg + extension) | ||
| 1685 | if os.access(depsfile, os.R_OK): | ||
| 1686 | with open(depsfile) as fd: | ||
| 1687 | lines = fd.readlines() | ||
| 1688 | for l in lines: | ||
| 1689 | l.rstrip() | ||
| 1690 | deps = bb.utils.explode_dep_versions2(l) | ||
| 1691 | for dep in deps: | ||
| 1692 | if not dep in pkglibdeps[pkg]: | ||
| 1693 | pkglibdeps[pkg][dep] = deps[dep] | ||
| 1694 | return pkglibdeps | ||
| 1695 | |||
| 1696 | python read_shlibdeps () { | 429 | python read_shlibdeps () { |
| 1697 | pkglibdeps = read_libdep_files(d) | 430 | pkglibdeps = oe.package.read_libdep_files(d) |
| 1698 | 431 | ||
| 1699 | packages = d.getVar('PACKAGES').split() | 432 | packages = d.getVar('PACKAGES').split() |
| 1700 | for pkg in packages: | 433 | for pkg in packages: |
| @@ -1710,125 +443,7 @@ python read_shlibdeps () { | |||
| 1710 | } | 443 | } |
| 1711 | 444 | ||
| 1712 | python package_depchains() { | 445 | python package_depchains() { |
| 1713 | """ | 446 | oe.package.process_depchains(pkgfiles, d) |
| 1714 | For a given set of prefix and postfix modifiers, make those packages | ||
| 1715 | RRECOMMENDS on the corresponding packages for its RDEPENDS. | ||
| 1716 | |||
| 1717 | Example: If package A depends upon package B, and A's .bb emits an | ||
| 1718 | A-dev package, this would make A-dev Recommends: B-dev. | ||
| 1719 | |||
| 1720 | If only one of a given suffix is specified, it will take the RRECOMMENDS | ||
| 1721 | based on the RDEPENDS of *all* other packages. If more than one of a given | ||
| 1722 | suffix is specified, its will only use the RDEPENDS of the single parent | ||
| 1723 | package. | ||
| 1724 | """ | ||
| 1725 | |||
| 1726 | packages = d.getVar('PACKAGES') | ||
| 1727 | postfixes = (d.getVar('DEPCHAIN_POST') or '').split() | ||
| 1728 | prefixes = (d.getVar('DEPCHAIN_PRE') or '').split() | ||
| 1729 | |||
| 1730 | def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d): | ||
| 1731 | |||
| 1732 | #bb.note('depends for %s is %s' % (base, depends)) | ||
| 1733 | rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "") | ||
| 1734 | |||
| 1735 | for depend in sorted(depends): | ||
| 1736 | if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'): | ||
| 1737 | #bb.note("Skipping %s" % depend) | ||
| 1738 | continue | ||
| 1739 | if depend.endswith('-dev'): | ||
| 1740 | depend = depend[:-4] | ||
| 1741 | if depend.endswith('-dbg'): | ||
| 1742 | depend = depend[:-4] | ||
| 1743 | pkgname = getname(depend, suffix) | ||
| 1744 | #bb.note("Adding %s for %s" % (pkgname, depend)) | ||
| 1745 | if pkgname not in rreclist and pkgname != pkg: | ||
| 1746 | rreclist[pkgname] = [] | ||
| 1747 | |||
| 1748 | #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist))) | ||
| 1749 | d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) | ||
| 1750 | |||
| 1751 | def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): | ||
| 1752 | |||
| 1753 | #bb.note('rdepends for %s is %s' % (base, rdepends)) | ||
| 1754 | rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "") | ||
| 1755 | |||
| 1756 | for depend in sorted(rdepends): | ||
| 1757 | if depend.find('virtual-locale-') != -1: | ||
| 1758 | #bb.note("Skipping %s" % depend) | ||
| 1759 | continue | ||
| 1760 | if depend.endswith('-dev'): | ||
| 1761 | depend = depend[:-4] | ||
| 1762 | if depend.endswith('-dbg'): | ||
| 1763 | depend = depend[:-4] | ||
| 1764 | pkgname = getname(depend, suffix) | ||
| 1765 | #bb.note("Adding %s for %s" % (pkgname, depend)) | ||
| 1766 | if pkgname not in rreclist and pkgname != pkg: | ||
| 1767 | rreclist[pkgname] = [] | ||
| 1768 | |||
| 1769 | #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist))) | ||
| 1770 | d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) | ||
| 1771 | |||
| 1772 | def add_dep(list, dep): | ||
| 1773 | if dep not in list: | ||
| 1774 | list.append(dep) | ||
| 1775 | |||
| 1776 | depends = [] | ||
| 1777 | for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""): | ||
| 1778 | add_dep(depends, dep) | ||
| 1779 | |||
| 1780 | rdepends = [] | ||
| 1781 | for pkg in packages.split(): | ||
| 1782 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + pkg) or ""): | ||
| 1783 | add_dep(rdepends, dep) | ||
| 1784 | |||
| 1785 | #bb.note('rdepends is %s' % rdepends) | ||
| 1786 | |||
| 1787 | def post_getname(name, suffix): | ||
| 1788 | return '%s%s' % (name, suffix) | ||
| 1789 | def pre_getname(name, suffix): | ||
| 1790 | return '%s%s' % (suffix, name) | ||
| 1791 | |||
| 1792 | pkgs = {} | ||
| 1793 | for pkg in packages.split(): | ||
| 1794 | for postfix in postfixes: | ||
| 1795 | if pkg.endswith(postfix): | ||
| 1796 | if not postfix in pkgs: | ||
| 1797 | pkgs[postfix] = {} | ||
| 1798 | pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname) | ||
| 1799 | |||
| 1800 | for prefix in prefixes: | ||
| 1801 | if pkg.startswith(prefix): | ||
| 1802 | if not prefix in pkgs: | ||
| 1803 | pkgs[prefix] = {} | ||
| 1804 | pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname) | ||
| 1805 | |||
| 1806 | if "-dbg" in pkgs: | ||
| 1807 | pkglibdeps = read_libdep_files(d) | ||
| 1808 | pkglibdeplist = [] | ||
| 1809 | for pkg in pkglibdeps: | ||
| 1810 | for k in pkglibdeps[pkg]: | ||
| 1811 | add_dep(pkglibdeplist, k) | ||
| 1812 | dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d))) | ||
| 1813 | |||
| 1814 | for suffix in pkgs: | ||
| 1815 | for pkg in pkgs[suffix]: | ||
| 1816 | if d.getVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs'): | ||
| 1817 | continue | ||
| 1818 | (base, func) = pkgs[suffix][pkg] | ||
| 1819 | if suffix == "-dev": | ||
| 1820 | pkg_adddeprrecs(pkg, base, suffix, func, depends, d) | ||
| 1821 | elif suffix == "-dbg": | ||
| 1822 | if not dbgdefaultdeps: | ||
| 1823 | pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d) | ||
| 1824 | continue | ||
| 1825 | if len(pkgs[suffix]) == 1: | ||
| 1826 | pkg_addrrecs(pkg, base, suffix, func, rdepends, d) | ||
| 1827 | else: | ||
| 1828 | rdeps = [] | ||
| 1829 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + base) or ""): | ||
| 1830 | add_dep(rdeps, dep) | ||
| 1831 | pkg_addrrecs(pkg, base, suffix, func, rdeps, d) | ||
| 1832 | } | 447 | } |
| 1833 | 448 | ||
| 1834 | # Since bitbake can't determine which variables are accessed during package | 449 | # Since bitbake can't determine which variables are accessed during package |
diff --git a/meta/lib/oe/package.py b/meta/lib/oe/package.py index b4c8ab7222..c9eb75d852 100644 --- a/meta/lib/oe/package.py +++ b/meta/lib/oe/package.py | |||
| @@ -4,12 +4,19 @@ | |||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
| 5 | # | 5 | # |
| 6 | 6 | ||
| 7 | import errno | ||
| 8 | import fnmatch | ||
| 9 | import itertools | ||
| 7 | import os | 10 | import os |
| 11 | import pipes | ||
| 12 | import re | ||
| 8 | import glob | 13 | import glob |
| 9 | import stat | 14 | import stat |
| 10 | import mmap | 15 | import mmap |
| 11 | import subprocess | 16 | import subprocess |
| 12 | 17 | ||
| 18 | import oe.cachedpath | ||
| 19 | |||
| 13 | def runstrip(arg): | 20 | def runstrip(arg): |
| 14 | # Function to strip a single file, called from split_and_strip_files below | 21 | # Function to strip a single file, called from split_and_strip_files below |
| 15 | # A working 'file' (one which works on the target architecture) | 22 | # A working 'file' (one which works on the target architecture) |
| @@ -300,7 +307,6 @@ def read_shlib_providers(d): | |||
| 300 | # the fs-perms.txt files | 307 | # the fs-perms.txt files |
| 301 | def fixup_perms(d): | 308 | def fixup_perms(d): |
| 302 | import pwd, grp | 309 | import pwd, grp |
| 303 | import oe.cachedpath | ||
| 304 | 310 | ||
| 305 | cpath = oe.cachedpath.CachedPath() | 311 | cpath = oe.cachedpath.CachedPath() |
| 306 | dvar = d.getVar('PKGD') | 312 | dvar = d.getVar('PKGD') |
| @@ -537,8 +543,6 @@ def fixup_perms(d): | |||
| 537 | # Get a list of files from file vars by searching files under current working directory | 543 | # Get a list of files from file vars by searching files under current working directory |
| 538 | # The list contains symlinks, directories and normal files. | 544 | # The list contains symlinks, directories and normal files. |
| 539 | def files_from_filevars(filevars): | 545 | def files_from_filevars(filevars): |
| 540 | import oe.cachedpath | ||
| 541 | |||
| 542 | cpath = oe.cachedpath.CachedPath() | 546 | cpath = oe.cachedpath.CachedPath() |
| 543 | files = [] | 547 | files = [] |
| 544 | for f in filevars: | 548 | for f in filevars: |
| @@ -611,3 +615,1401 @@ def get_conffiles(pkg, d): | |||
| 611 | os.chdir(cwd) | 615 | os.chdir(cwd) |
| 612 | return conf_list | 616 | return conf_list |
| 613 | 617 | ||
| 618 | def legitimize_package_name(s): | ||
| 619 | """ | ||
| 620 | Make sure package names are legitimate strings | ||
| 621 | """ | ||
| 622 | |||
| 623 | def fixutf(m): | ||
| 624 | cp = m.group(1) | ||
| 625 | if cp: | ||
| 626 | return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape') | ||
| 627 | |||
| 628 | # Handle unicode codepoints encoded as <U0123>, as in glibc locale files. | ||
| 629 | s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s) | ||
| 630 | |||
| 631 | # Remaining package name validity fixes | ||
| 632 | return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-') | ||
| 633 | |||
| 634 | def split_locales(d): | ||
| 635 | cpath = oe.cachedpath.CachedPath() | ||
| 636 | if (d.getVar('PACKAGE_NO_LOCALE') == '1'): | ||
| 637 | bb.debug(1, "package requested not splitting locales") | ||
| 638 | return | ||
| 639 | |||
| 640 | packages = (d.getVar('PACKAGES') or "").split() | ||
| 641 | |||
| 642 | datadir = d.getVar('datadir') | ||
| 643 | if not datadir: | ||
| 644 | bb.note("datadir not defined") | ||
| 645 | return | ||
| 646 | |||
| 647 | dvar = d.getVar('PKGD') | ||
| 648 | pn = d.getVar('LOCALEBASEPN') | ||
| 649 | |||
| 650 | if pn + '-locale' in packages: | ||
| 651 | packages.remove(pn + '-locale') | ||
| 652 | |||
| 653 | localedir = os.path.join(dvar + datadir, 'locale') | ||
| 654 | |||
| 655 | if not cpath.isdir(localedir): | ||
| 656 | bb.debug(1, "No locale files in this package") | ||
| 657 | return | ||
| 658 | |||
| 659 | locales = os.listdir(localedir) | ||
| 660 | |||
| 661 | summary = d.getVar('SUMMARY') or pn | ||
| 662 | description = d.getVar('DESCRIPTION') or "" | ||
| 663 | locale_section = d.getVar('LOCALE_SECTION') | ||
| 664 | mlprefix = d.getVar('MLPREFIX') or "" | ||
| 665 | for l in sorted(locales): | ||
| 666 | ln = legitimize_package_name(l) | ||
| 667 | pkg = pn + '-locale-' + ln | ||
| 668 | packages.append(pkg) | ||
| 669 | d.setVar('FILES:' + pkg, os.path.join(datadir, 'locale', l)) | ||
| 670 | d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln)) | ||
| 671 | d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln)) | ||
| 672 | d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l)) | ||
| 673 | d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l)) | ||
| 674 | if locale_section: | ||
| 675 | d.setVar('SECTION:' + pkg, locale_section) | ||
| 676 | |||
| 677 | d.setVar('PACKAGES', ' '.join(packages)) | ||
| 678 | |||
| 679 | # Disabled by RP 18/06/07 | ||
| 680 | # Wildcards aren't supported in debian | ||
| 681 | # They break with ipkg since glibc-locale* will mean that | ||
| 682 | # glibc-localedata-translit* won't install as a dependency | ||
| 683 | # for some other package which breaks meta-toolchain | ||
| 684 | # Probably breaks since virtual-locale- isn't provided anywhere | ||
| 685 | #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split() | ||
| 686 | #rdep.append('%s-locale*' % pn) | ||
| 687 | #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep)) | ||
| 688 | |||
| 689 | def package_debug_vars(d): | ||
| 690 | # We default to '.debug' style | ||
| 691 | if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory': | ||
| 692 | # Single debug-file-directory style debug info | ||
| 693 | debug_vars = { | ||
| 694 | "append": ".debug", | ||
| 695 | "staticappend": "", | ||
| 696 | "dir": "", | ||
| 697 | "staticdir": "", | ||
| 698 | "libdir": "/usr/lib/debug", | ||
| 699 | "staticlibdir": "/usr/lib/debug-static", | ||
| 700 | "srcdir": "/usr/src/debug", | ||
| 701 | } | ||
| 702 | elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src': | ||
| 703 | # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug | ||
| 704 | debug_vars = { | ||
| 705 | "append": "", | ||
| 706 | "staticappend": "", | ||
| 707 | "dir": "/.debug", | ||
| 708 | "staticdir": "/.debug-static", | ||
| 709 | "libdir": "", | ||
| 710 | "staticlibdir": "", | ||
| 711 | "srcdir": "", | ||
| 712 | } | ||
| 713 | elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg': | ||
| 714 | debug_vars = { | ||
| 715 | "append": "", | ||
| 716 | "staticappend": "", | ||
| 717 | "dir": "/.debug", | ||
| 718 | "staticdir": "/.debug-static", | ||
| 719 | "libdir": "", | ||
| 720 | "staticlibdir": "", | ||
| 721 | "srcdir": "/usr/src/debug", | ||
| 722 | } | ||
| 723 | else: | ||
| 724 | # Original OE-core, a.k.a. ".debug", style debug info | ||
| 725 | debug_vars = { | ||
| 726 | "append": "", | ||
| 727 | "staticappend": "", | ||
| 728 | "dir": "/.debug", | ||
| 729 | "staticdir": "/.debug-static", | ||
| 730 | "libdir": "", | ||
| 731 | "staticlibdir": "", | ||
| 732 | "srcdir": "/usr/src/debug", | ||
| 733 | } | ||
| 734 | |||
| 735 | return debug_vars | ||
| 736 | |||
| 737 | |||
| 738 | def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output): | ||
| 739 | debugfiles = {} | ||
| 740 | |||
| 741 | for line in dwarfsrcfiles_output.splitlines(): | ||
| 742 | if line.startswith("\t"): | ||
| 743 | debugfiles[os.path.normpath(line.split()[0])] = "" | ||
| 744 | |||
| 745 | return debugfiles.keys() | ||
| 746 | |||
| 747 | def source_info(file, d, fatal=True): | ||
| 748 | cmd = ["dwarfsrcfiles", file] | ||
| 749 | try: | ||
| 750 | output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT) | ||
| 751 | retval = 0 | ||
| 752 | except subprocess.CalledProcessError as exc: | ||
| 753 | output = exc.output | ||
| 754 | retval = exc.returncode | ||
| 755 | |||
| 756 | # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure | ||
| 757 | if retval != 0 and retval != 255: | ||
| 758 | msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "") | ||
| 759 | if fatal: | ||
| 760 | bb.fatal(msg) | ||
| 761 | bb.note(msg) | ||
| 762 | |||
| 763 | debugsources = parse_debugsources_from_dwarfsrcfiles_output(output) | ||
| 764 | |||
| 765 | return list(debugsources) | ||
| 766 | |||
| 767 | def splitdebuginfo(file, dvar, dv, d): | ||
| 768 | # Function to split a single file into two components, one is the stripped | ||
| 769 | # target system binary, the other contains any debugging information. The | ||
| 770 | # two files are linked to reference each other. | ||
| 771 | # | ||
| 772 | # return a mapping of files:debugsources | ||
| 773 | |||
| 774 | src = file[len(dvar):] | ||
| 775 | dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"] | ||
| 776 | debugfile = dvar + dest | ||
| 777 | sources = [] | ||
| 778 | |||
| 779 | if file.endswith(".ko") and file.find("/lib/modules/") != -1: | ||
| 780 | if oe.package.is_kernel_module_signed(file): | ||
| 781 | bb.debug(1, "Skip strip on signed module %s" % file) | ||
| 782 | return (file, sources) | ||
| 783 | |||
| 784 | # Split the file... | ||
| 785 | bb.utils.mkdirhier(os.path.dirname(debugfile)) | ||
| 786 | #bb.note("Split %s -> %s" % (file, debugfile)) | ||
| 787 | # Only store off the hard link reference if we successfully split! | ||
| 788 | |||
| 789 | dvar = d.getVar('PKGD') | ||
| 790 | objcopy = d.getVar("OBJCOPY") | ||
| 791 | |||
| 792 | newmode = None | ||
| 793 | if not os.access(file, os.W_OK) or os.access(file, os.R_OK): | ||
| 794 | origmode = os.stat(file)[stat.ST_MODE] | ||
| 795 | newmode = origmode | stat.S_IWRITE | stat.S_IREAD | ||
| 796 | os.chmod(file, newmode) | ||
| 797 | |||
| 798 | # We need to extract the debug src information here... | ||
| 799 | if dv["srcdir"]: | ||
| 800 | sources = source_info(file, d) | ||
| 801 | |||
| 802 | bb.utils.mkdirhier(os.path.dirname(debugfile)) | ||
| 803 | |||
| 804 | subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT) | ||
| 805 | |||
| 806 | # Set the debuglink to have the view of the file path on the target | ||
| 807 | subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT) | ||
| 808 | |||
| 809 | if newmode: | ||
| 810 | os.chmod(file, origmode) | ||
| 811 | |||
| 812 | return (file, sources) | ||
| 813 | |||
| 814 | def splitstaticdebuginfo(file, dvar, dv, d): | ||
| 815 | # Unlike the function above, there is no way to split a static library | ||
| 816 | # two components. So to get similar results we will copy the unmodified | ||
| 817 | # static library (containing the debug symbols) into a new directory. | ||
| 818 | # We will then strip (preserving symbols) the static library in the | ||
| 819 | # typical location. | ||
| 820 | # | ||
| 821 | # return a mapping of files:debugsources | ||
| 822 | |||
| 823 | src = file[len(dvar):] | ||
| 824 | dest = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(src) + dv["staticappend"] | ||
| 825 | debugfile = dvar + dest | ||
| 826 | sources = [] | ||
| 827 | |||
| 828 | # Copy the file... | ||
| 829 | bb.utils.mkdirhier(os.path.dirname(debugfile)) | ||
| 830 | #bb.note("Copy %s -> %s" % (file, debugfile)) | ||
| 831 | |||
| 832 | dvar = d.getVar('PKGD') | ||
| 833 | |||
| 834 | newmode = None | ||
| 835 | if not os.access(file, os.W_OK) or os.access(file, os.R_OK): | ||
| 836 | origmode = os.stat(file)[stat.ST_MODE] | ||
| 837 | newmode = origmode | stat.S_IWRITE | stat.S_IREAD | ||
| 838 | os.chmod(file, newmode) | ||
| 839 | |||
| 840 | # We need to extract the debug src information here... | ||
| 841 | if dv["srcdir"]: | ||
| 842 | sources = source_info(file, d) | ||
| 843 | |||
| 844 | bb.utils.mkdirhier(os.path.dirname(debugfile)) | ||
| 845 | |||
| 846 | # Copy the unmodified item to the debug directory | ||
| 847 | shutil.copy2(file, debugfile) | ||
| 848 | |||
| 849 | if newmode: | ||
| 850 | os.chmod(file, origmode) | ||
| 851 | |||
| 852 | return (file, sources) | ||
| 853 | |||
| 854 | def inject_minidebuginfo(file, dvar, dv, d): | ||
| 855 | # Extract just the symbols from debuginfo into minidebuginfo, | ||
| 856 | # compress it with xz and inject it back into the binary in a .gnu_debugdata section. | ||
| 857 | # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html | ||
| 858 | |||
| 859 | readelf = d.getVar('READELF') | ||
| 860 | nm = d.getVar('NM') | ||
| 861 | objcopy = d.getVar('OBJCOPY') | ||
| 862 | |||
| 863 | minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo') | ||
| 864 | |||
| 865 | src = file[len(dvar):] | ||
| 866 | dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"] | ||
| 867 | debugfile = dvar + dest | ||
| 868 | minidebugfile = minidebuginfodir + src + '.minidebug' | ||
| 869 | bb.utils.mkdirhier(os.path.dirname(minidebugfile)) | ||
| 870 | |||
| 871 | # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either | ||
| 872 | # so skip it. | ||
| 873 | if not os.path.exists(debugfile): | ||
| 874 | bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file)) | ||
| 875 | return | ||
| 876 | |||
| 877 | # minidebuginfo does not make sense to apply to ELF objects other than | ||
| 878 | # executables and shared libraries, skip applying the minidebuginfo | ||
| 879 | # generation for objects like kernel modules. | ||
| 880 | for line in subprocess.check_output([readelf, '-h', debugfile], universal_newlines=True).splitlines(): | ||
| 881 | if not line.strip().startswith("Type:"): | ||
| 882 | continue | ||
| 883 | elftype = line.split(":")[1].strip() | ||
| 884 | if not any(elftype.startswith(i) for i in ["EXEC", "DYN"]): | ||
| 885 | bb.debug(1, 'ELF file {} is not executable/shared, skipping minidebuginfo injection'.format(file)) | ||
| 886 | return | ||
| 887 | break | ||
| 888 | |||
| 889 | # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo. | ||
| 890 | # We will exclude all of these from minidebuginfo to save space. | ||
| 891 | remove_section_names = [] | ||
| 892 | for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines(): | ||
| 893 | # strip the leading " [ 1]" section index to allow splitting on space | ||
| 894 | if ']' not in line: | ||
| 895 | continue | ||
| 896 | fields = line[line.index(']') + 1:].split() | ||
| 897 | if len(fields) < 7: | ||
| 898 | continue | ||
| 899 | name = fields[0] | ||
| 900 | type = fields[1] | ||
| 901 | flags = fields[6] | ||
| 902 | # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them | ||
| 903 | if name.startswith('.debug_'): | ||
| 904 | continue | ||
| 905 | if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']: | ||
| 906 | remove_section_names.append(name) | ||
| 907 | |||
| 908 | # List dynamic symbols in the binary. We can exclude these from minidebuginfo | ||
| 909 | # because they are always present in the binary. | ||
| 910 | dynsyms = set() | ||
| 911 | for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines(): | ||
| 912 | dynsyms.add(line.split()[0]) | ||
| 913 | |||
| 914 | # Find all function symbols from debuginfo which aren't in the dynamic symbols table. | ||
| 915 | # These are the ones we want to keep in minidebuginfo. | ||
| 916 | keep_symbols_file = minidebugfile + '.symlist' | ||
| 917 | found_any_symbols = False | ||
| 918 | with open(keep_symbols_file, 'w') as f: | ||
| 919 | for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines(): | ||
| 920 | fields = line.split('|') | ||
| 921 | if len(fields) < 7: | ||
| 922 | continue | ||
| 923 | name = fields[0].strip() | ||
| 924 | type = fields[3].strip() | ||
| 925 | if type == 'FUNC' and name not in dynsyms: | ||
| 926 | f.write('{}\n'.format(name)) | ||
| 927 | found_any_symbols = True | ||
| 928 | |||
| 929 | if not found_any_symbols: | ||
| 930 | bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file)) | ||
| 931 | return | ||
| 932 | |||
| 933 | bb.utils.remove(minidebugfile) | ||
| 934 | bb.utils.remove(minidebugfile + '.xz') | ||
| 935 | |||
| 936 | subprocess.check_call([objcopy, '-S'] + | ||
| 937 | ['--remove-section={}'.format(s) for s in remove_section_names] + | ||
| 938 | ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile]) | ||
| 939 | |||
| 940 | subprocess.check_call(['xz', '--keep', minidebugfile]) | ||
| 941 | |||
| 942 | subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file]) | ||
| 943 | |||
| 944 | def copydebugsources(debugsrcdir, sources, d): | ||
| 945 | # The debug src information written out to sourcefile is further processed | ||
| 946 | # and copied to the destination here. | ||
| 947 | |||
| 948 | cpath = oe.cachedpath.CachedPath() | ||
| 949 | |||
| 950 | if debugsrcdir and sources: | ||
| 951 | sourcefile = d.expand("${WORKDIR}/debugsources.list") | ||
| 952 | bb.utils.remove(sourcefile) | ||
| 953 | |||
| 954 | # filenames are null-separated - this is an artefact of the previous use | ||
| 955 | # of rpm's debugedit, which was writing them out that way, and the code elsewhere | ||
| 956 | # is still assuming that. | ||
| 957 | debuglistoutput = '\0'.join(sources) + '\0' | ||
| 958 | with open(sourcefile, 'a') as sf: | ||
| 959 | sf.write(debuglistoutput) | ||
| 960 | |||
| 961 | dvar = d.getVar('PKGD') | ||
| 962 | strip = d.getVar("STRIP") | ||
| 963 | objcopy = d.getVar("OBJCOPY") | ||
| 964 | workdir = d.getVar("WORKDIR") | ||
| 965 | sdir = d.getVar("S") | ||
| 966 | cflags = d.expand("${CFLAGS}") | ||
| 967 | |||
| 968 | prefixmap = {} | ||
| 969 | for flag in cflags.split(): | ||
| 970 | if not flag.startswith("-fdebug-prefix-map"): | ||
| 971 | continue | ||
| 972 | if "recipe-sysroot" in flag: | ||
| 973 | continue | ||
| 974 | flag = flag.split("=") | ||
| 975 | prefixmap[flag[1]] = flag[2] | ||
| 976 | |||
| 977 | nosuchdir = [] | ||
| 978 | basepath = dvar | ||
| 979 | for p in debugsrcdir.split("/"): | ||
| 980 | basepath = basepath + "/" + p | ||
| 981 | if not cpath.exists(basepath): | ||
| 982 | nosuchdir.append(basepath) | ||
| 983 | bb.utils.mkdirhier(basepath) | ||
| 984 | cpath.updatecache(basepath) | ||
| 985 | |||
| 986 | for pmap in prefixmap: | ||
| 987 | # Ignore files from the recipe sysroots (target and native) | ||
| 988 | cmd = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | " % sourcefile | ||
| 989 | # We need to ignore files that are not actually ours | ||
| 990 | # we do this by only paying attention to items from this package | ||
| 991 | cmd += "fgrep -zw '%s' | " % prefixmap[pmap] | ||
| 992 | # Remove prefix in the source paths | ||
| 993 | cmd += "sed 's#%s/##g' | " % (prefixmap[pmap]) | ||
| 994 | cmd += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)" % (pmap, dvar, prefixmap[pmap]) | ||
| 995 | |||
| 996 | try: | ||
| 997 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | ||
| 998 | except subprocess.CalledProcessError: | ||
| 999 | # Can "fail" if internal headers/transient sources are attempted | ||
| 1000 | pass | ||
| 1001 | # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced. | ||
| 1002 | # Work around this by manually finding and copying any symbolic links that made it through. | ||
| 1003 | cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \ | ||
| 1004 | (dvar, prefixmap[pmap], dvar, prefixmap[pmap], pmap, dvar, prefixmap[pmap]) | ||
| 1005 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | ||
| 1006 | |||
| 1007 | # debugsources.list may be polluted from the host if we used externalsrc, | ||
| 1008 | # cpio uses copy-pass and may have just created a directory structure | ||
| 1009 | # matching the one from the host, if thats the case move those files to | ||
| 1010 | # debugsrcdir to avoid host contamination. | ||
| 1011 | # Empty dir structure will be deleted in the next step. | ||
| 1012 | |||
| 1013 | # Same check as above for externalsrc | ||
| 1014 | if workdir not in sdir: | ||
| 1015 | if os.path.exists(dvar + debugsrcdir + sdir): | ||
| 1016 | cmd = "mv %s%s%s/* %s%s" % (dvar, debugsrcdir, sdir, dvar,debugsrcdir) | ||
| 1017 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | ||
| 1018 | |||
| 1019 | # The copy by cpio may have resulted in some empty directories! Remove these | ||
| 1020 | cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir) | ||
| 1021 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | ||
| 1022 | |||
| 1023 | # Also remove debugsrcdir if its empty | ||
| 1024 | for p in nosuchdir[::-1]: | ||
| 1025 | if os.path.exists(p) and not os.listdir(p): | ||
| 1026 | os.rmdir(p) | ||
| 1027 | |||
| 1028 | |||
| 1029 | def process_split_and_strip_files(d): | ||
| 1030 | cpath = oe.cachedpath.CachedPath() | ||
| 1031 | |||
| 1032 | dvar = d.getVar('PKGD') | ||
| 1033 | pn = d.getVar('PN') | ||
| 1034 | hostos = d.getVar('HOST_OS') | ||
| 1035 | |||
| 1036 | oldcwd = os.getcwd() | ||
| 1037 | os.chdir(dvar) | ||
| 1038 | |||
| 1039 | dv = package_debug_vars(d) | ||
| 1040 | |||
| 1041 | # | ||
| 1042 | # First lets figure out all of the files we may have to process ... do this only once! | ||
| 1043 | # | ||
| 1044 | elffiles = {} | ||
| 1045 | symlinks = {} | ||
| 1046 | staticlibs = [] | ||
| 1047 | inodes = {} | ||
| 1048 | libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir")) | ||
| 1049 | baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir")) | ||
| 1050 | skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split() | ||
| 1051 | if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \ | ||
| 1052 | d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'): | ||
| 1053 | checkelf = {} | ||
| 1054 | checkelflinks = {} | ||
| 1055 | for root, dirs, files in cpath.walk(dvar): | ||
| 1056 | for f in files: | ||
| 1057 | file = os.path.join(root, f) | ||
| 1058 | |||
| 1059 | # Skip debug files | ||
| 1060 | if dv["append"] and file.endswith(dv["append"]): | ||
| 1061 | continue | ||
| 1062 | if dv["dir"] and dv["dir"] in os.path.dirname(file[len(dvar):]): | ||
| 1063 | continue | ||
| 1064 | |||
| 1065 | if file in skipfiles: | ||
| 1066 | continue | ||
| 1067 | |||
| 1068 | if oe.package.is_static_lib(file): | ||
| 1069 | staticlibs.append(file) | ||
| 1070 | continue | ||
| 1071 | |||
| 1072 | try: | ||
| 1073 | ltarget = cpath.realpath(file, dvar, False) | ||
| 1074 | s = cpath.lstat(ltarget) | ||
| 1075 | except OSError as e: | ||
| 1076 | (err, strerror) = e.args | ||
| 1077 | if err != errno.ENOENT: | ||
| 1078 | raise | ||
| 1079 | # Skip broken symlinks | ||
| 1080 | continue | ||
| 1081 | if not s: | ||
| 1082 | continue | ||
| 1083 | # Check its an executable | ||
| 1084 | if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \ | ||
| 1085 | or (s[stat.ST_MODE] & stat.S_IXOTH) \ | ||
| 1086 | or ((file.startswith(libdir) or file.startswith(baselibdir)) \ | ||
| 1087 | and (".so" in f or ".node" in f)) \ | ||
| 1088 | or (f.startswith('vmlinux') or ".ko" in f): | ||
| 1089 | |||
| 1090 | if cpath.islink(file): | ||
| 1091 | checkelflinks[file] = ltarget | ||
| 1092 | continue | ||
| 1093 | # Use a reference of device ID and inode number to identify files | ||
| 1094 | file_reference = "%d_%d" % (s.st_dev, s.st_ino) | ||
| 1095 | checkelf[file] = (file, file_reference) | ||
| 1096 | |||
| 1097 | results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d) | ||
| 1098 | results_map = {} | ||
| 1099 | for (ltarget, elf_file) in results: | ||
| 1100 | results_map[ltarget] = elf_file | ||
| 1101 | for file in checkelflinks: | ||
| 1102 | ltarget = checkelflinks[file] | ||
| 1103 | # If it's a symlink, and points to an ELF file, we capture the readlink target | ||
| 1104 | if results_map[ltarget]: | ||
| 1105 | target = os.readlink(file) | ||
| 1106 | #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget])) | ||
| 1107 | symlinks[file] = target | ||
| 1108 | |||
| 1109 | results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d) | ||
| 1110 | |||
| 1111 | # Sort results by file path. This ensures that the files are always | ||
| 1112 | # processed in the same order, which is important to make sure builds | ||
| 1113 | # are reproducible when dealing with hardlinks | ||
| 1114 | results.sort(key=lambda x: x[0]) | ||
| 1115 | |||
| 1116 | for (file, elf_file) in results: | ||
| 1117 | # It's a file (or hardlink), not a link | ||
| 1118 | # ...but is it ELF, and is it already stripped? | ||
| 1119 | if elf_file & 1: | ||
| 1120 | if elf_file & 2: | ||
| 1121 | if 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split(): | ||
| 1122 | bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn)) | ||
| 1123 | else: | ||
| 1124 | msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn) | ||
| 1125 | oe.qa.handle_error("already-stripped", msg, d) | ||
| 1126 | continue | ||
| 1127 | |||
| 1128 | # At this point we have an unstripped elf file. We need to: | ||
| 1129 | # a) Make sure any file we strip is not hardlinked to anything else outside this tree | ||
| 1130 | # b) Only strip any hardlinked file once (no races) | ||
| 1131 | # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks | ||
| 1132 | |||
| 1133 | # Use a reference of device ID and inode number to identify files | ||
| 1134 | file_reference = checkelf[file][1] | ||
| 1135 | if file_reference in inodes: | ||
| 1136 | os.unlink(file) | ||
| 1137 | os.link(inodes[file_reference][0], file) | ||
| 1138 | inodes[file_reference].append(file) | ||
| 1139 | else: | ||
| 1140 | inodes[file_reference] = [file] | ||
| 1141 | # break hardlink | ||
| 1142 | bb.utils.break_hardlinks(file) | ||
| 1143 | elffiles[file] = elf_file | ||
| 1144 | # Modified the file so clear the cache | ||
| 1145 | cpath.updatecache(file) | ||
| 1146 | |||
| 1147 | def strip_pkgd_prefix(f): | ||
| 1148 | nonlocal dvar | ||
| 1149 | |||
| 1150 | if f.startswith(dvar): | ||
| 1151 | return f[len(dvar):] | ||
| 1152 | |||
| 1153 | return f | ||
| 1154 | |||
| 1155 | # | ||
| 1156 | # First lets process debug splitting | ||
| 1157 | # | ||
| 1158 | if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'): | ||
| 1159 | results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, dv, d)) | ||
| 1160 | |||
| 1161 | if dv["srcdir"] and not hostos.startswith("mingw"): | ||
| 1162 | if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'): | ||
| 1163 | results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, dv, d)) | ||
| 1164 | else: | ||
| 1165 | for file in staticlibs: | ||
| 1166 | results.append( (file,source_info(file, d)) ) | ||
| 1167 | |||
| 1168 | d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results}) | ||
| 1169 | |||
| 1170 | sources = set() | ||
| 1171 | for r in results: | ||
| 1172 | sources.update(r[1]) | ||
| 1173 | |||
| 1174 | # Hardlink our debug symbols to the other hardlink copies | ||
| 1175 | for ref in inodes: | ||
| 1176 | if len(inodes[ref]) == 1: | ||
| 1177 | continue | ||
| 1178 | |||
| 1179 | target = inodes[ref][0][len(dvar):] | ||
| 1180 | for file in inodes[ref][1:]: | ||
| 1181 | src = file[len(dvar):] | ||
| 1182 | dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"] | ||
| 1183 | fpath = dvar + dest | ||
| 1184 | ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"] | ||
| 1185 | bb.utils.mkdirhier(os.path.dirname(fpath)) | ||
| 1186 | # Only one hardlink of separated debug info file in each directory | ||
| 1187 | if not os.access(fpath, os.R_OK): | ||
| 1188 | #bb.note("Link %s -> %s" % (fpath, ftarget)) | ||
| 1189 | os.link(ftarget, fpath) | ||
| 1190 | |||
| 1191 | # Create symlinks for all cases we were able to split symbols | ||
| 1192 | for file in symlinks: | ||
| 1193 | src = file[len(dvar):] | ||
| 1194 | dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"] | ||
| 1195 | fpath = dvar + dest | ||
| 1196 | # Skip it if the target doesn't exist | ||
| 1197 | try: | ||
| 1198 | s = os.stat(fpath) | ||
| 1199 | except OSError as e: | ||
| 1200 | (err, strerror) = e.args | ||
| 1201 | if err != errno.ENOENT: | ||
| 1202 | raise | ||
| 1203 | continue | ||
| 1204 | |||
| 1205 | ltarget = symlinks[file] | ||
| 1206 | lpath = os.path.dirname(ltarget) | ||
| 1207 | lbase = os.path.basename(ltarget) | ||
| 1208 | ftarget = "" | ||
| 1209 | if lpath and lpath != ".": | ||
| 1210 | ftarget += lpath + dv["dir"] + "/" | ||
| 1211 | ftarget += lbase + dv["append"] | ||
| 1212 | if lpath.startswith(".."): | ||
| 1213 | ftarget = os.path.join("..", ftarget) | ||
| 1214 | bb.utils.mkdirhier(os.path.dirname(fpath)) | ||
| 1215 | #bb.note("Symlink %s -> %s" % (fpath, ftarget)) | ||
| 1216 | os.symlink(ftarget, fpath) | ||
| 1217 | |||
| 1218 | # Process the dv["srcdir"] if requested... | ||
| 1219 | # This copies and places the referenced sources for later debugging... | ||
| 1220 | copydebugsources(dv["srcdir"], sources, d) | ||
| 1221 | # | ||
| 1222 | # End of debug splitting | ||
| 1223 | # | ||
| 1224 | |||
| 1225 | # | ||
| 1226 | # Now lets go back over things and strip them | ||
| 1227 | # | ||
| 1228 | if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'): | ||
| 1229 | strip = d.getVar("STRIP") | ||
| 1230 | sfiles = [] | ||
| 1231 | for file in elffiles: | ||
| 1232 | elf_file = int(elffiles[file]) | ||
| 1233 | #bb.note("Strip %s" % file) | ||
| 1234 | sfiles.append((file, elf_file, strip)) | ||
| 1235 | if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'): | ||
| 1236 | for f in staticlibs: | ||
| 1237 | sfiles.append((f, 16, strip)) | ||
| 1238 | |||
| 1239 | oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d) | ||
| 1240 | |||
| 1241 | # Build "minidebuginfo" and reinject it back into the stripped binaries | ||
| 1242 | if d.getVar('PACKAGE_MINIDEBUGINFO') == '1': | ||
| 1243 | oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d, | ||
| 1244 | extraargs=(dvar, dv, d)) | ||
| 1245 | |||
| 1246 | # | ||
| 1247 | # End of strip | ||
| 1248 | # | ||
| 1249 | os.chdir(oldcwd) | ||
| 1250 | |||
| 1251 | |||
| 1252 | def populate_packages(d): | ||
| 1253 | cpath = oe.cachedpath.CachedPath() | ||
| 1254 | |||
| 1255 | workdir = d.getVar('WORKDIR') | ||
| 1256 | outdir = d.getVar('DEPLOY_DIR') | ||
| 1257 | dvar = d.getVar('PKGD') | ||
| 1258 | packages = d.getVar('PACKAGES').split() | ||
| 1259 | pn = d.getVar('PN') | ||
| 1260 | |||
| 1261 | bb.utils.mkdirhier(outdir) | ||
| 1262 | os.chdir(dvar) | ||
| 1263 | |||
| 1264 | autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False) | ||
| 1265 | |||
| 1266 | split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg') | ||
| 1267 | |||
| 1268 | # If debug-with-srcpkg mode is enabled then add the source package if it | ||
| 1269 | # doesn't exist and add the source file contents to the source package. | ||
| 1270 | if split_source_package: | ||
| 1271 | src_package_name = ('%s-src' % d.getVar('PN')) | ||
| 1272 | if not src_package_name in packages: | ||
| 1273 | packages.append(src_package_name) | ||
| 1274 | d.setVar('FILES:%s' % src_package_name, '/usr/src/debug') | ||
| 1275 | |||
| 1276 | # Sanity check PACKAGES for duplicates | ||
| 1277 | # Sanity should be moved to sanity.bbclass once we have the infrastructure | ||
| 1278 | package_dict = {} | ||
| 1279 | |||
| 1280 | for i, pkg in enumerate(packages): | ||
| 1281 | if pkg in package_dict: | ||
| 1282 | msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg | ||
| 1283 | oe.qa.handle_error("packages-list", msg, d) | ||
| 1284 | # Ensure the source package gets the chance to pick up the source files | ||
| 1285 | # before the debug package by ordering it first in PACKAGES. Whether it | ||
| 1286 | # actually picks up any source files is controlled by | ||
| 1287 | # PACKAGE_DEBUG_SPLIT_STYLE. | ||
| 1288 | elif pkg.endswith("-src"): | ||
| 1289 | package_dict[pkg] = (10, i) | ||
| 1290 | elif autodebug and pkg.endswith("-dbg"): | ||
| 1291 | package_dict[pkg] = (30, i) | ||
| 1292 | else: | ||
| 1293 | package_dict[pkg] = (50, i) | ||
| 1294 | packages = sorted(package_dict.keys(), key=package_dict.get) | ||
| 1295 | d.setVar('PACKAGES', ' '.join(packages)) | ||
| 1296 | pkgdest = d.getVar('PKGDEST') | ||
| 1297 | |||
| 1298 | seen = [] | ||
| 1299 | |||
| 1300 | # os.mkdir masks the permissions with umask so we have to unset it first | ||
| 1301 | oldumask = os.umask(0) | ||
| 1302 | |||
| 1303 | debug = [] | ||
| 1304 | for root, dirs, files in cpath.walk(dvar): | ||
| 1305 | dir = root[len(dvar):] | ||
| 1306 | if not dir: | ||
| 1307 | dir = os.sep | ||
| 1308 | for f in (files + dirs): | ||
| 1309 | path = "." + os.path.join(dir, f) | ||
| 1310 | if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"): | ||
| 1311 | debug.append(path) | ||
| 1312 | |||
| 1313 | for pkg in packages: | ||
| 1314 | root = os.path.join(pkgdest, pkg) | ||
| 1315 | bb.utils.mkdirhier(root) | ||
| 1316 | |||
| 1317 | filesvar = d.getVar('FILES:%s' % pkg) or "" | ||
| 1318 | if "//" in filesvar: | ||
| 1319 | msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg | ||
| 1320 | oe.qa.handle_error("files-invalid", msg, d) | ||
| 1321 | filesvar.replace("//", "/") | ||
| 1322 | |||
| 1323 | origfiles = filesvar.split() | ||
| 1324 | files, symlink_paths = oe.package.files_from_filevars(origfiles) | ||
| 1325 | |||
| 1326 | if autodebug and pkg.endswith("-dbg"): | ||
| 1327 | files.extend(debug) | ||
| 1328 | |||
| 1329 | for file in files: | ||
| 1330 | if (not cpath.islink(file)) and (not cpath.exists(file)): | ||
| 1331 | continue | ||
| 1332 | if file in seen: | ||
| 1333 | continue | ||
| 1334 | seen.append(file) | ||
| 1335 | |||
| 1336 | def mkdir(src, dest, p): | ||
| 1337 | src = os.path.join(src, p) | ||
| 1338 | dest = os.path.join(dest, p) | ||
| 1339 | fstat = cpath.stat(src) | ||
| 1340 | os.mkdir(dest) | ||
| 1341 | os.chmod(dest, fstat.st_mode) | ||
| 1342 | os.chown(dest, fstat.st_uid, fstat.st_gid) | ||
| 1343 | if p not in seen: | ||
| 1344 | seen.append(p) | ||
| 1345 | cpath.updatecache(dest) | ||
| 1346 | |||
| 1347 | def mkdir_recurse(src, dest, paths): | ||
| 1348 | if cpath.exists(dest + '/' + paths): | ||
| 1349 | return | ||
| 1350 | while paths.startswith("./"): | ||
| 1351 | paths = paths[2:] | ||
| 1352 | p = "." | ||
| 1353 | for c in paths.split("/"): | ||
| 1354 | p = os.path.join(p, c) | ||
| 1355 | if not cpath.exists(os.path.join(dest, p)): | ||
| 1356 | mkdir(src, dest, p) | ||
| 1357 | |||
| 1358 | if cpath.isdir(file) and not cpath.islink(file): | ||
| 1359 | mkdir_recurse(dvar, root, file) | ||
| 1360 | continue | ||
| 1361 | |||
| 1362 | mkdir_recurse(dvar, root, os.path.dirname(file)) | ||
| 1363 | fpath = os.path.join(root,file) | ||
| 1364 | if not cpath.islink(file): | ||
| 1365 | os.link(file, fpath) | ||
| 1366 | continue | ||
| 1367 | ret = bb.utils.copyfile(file, fpath) | ||
| 1368 | if ret is False or ret == 0: | ||
| 1369 | bb.fatal("File population failed") | ||
| 1370 | |||
| 1371 | # Check if symlink paths exist | ||
| 1372 | for file in symlink_paths: | ||
| 1373 | if not os.path.exists(os.path.join(root,file)): | ||
| 1374 | bb.fatal("File '%s' cannot be packaged into '%s' because its " | ||
| 1375 | "parent directory structure does not exist. One of " | ||
| 1376 | "its parent directories is a symlink whose target " | ||
| 1377 | "directory is not included in the package." % | ||
| 1378 | (file, pkg)) | ||
| 1379 | |||
| 1380 | os.umask(oldumask) | ||
| 1381 | os.chdir(workdir) | ||
| 1382 | |||
| 1383 | # Handle excluding packages with incompatible licenses | ||
| 1384 | package_list = [] | ||
| 1385 | for pkg in packages: | ||
| 1386 | licenses = d.getVar('_exclude_incompatible-' + pkg) | ||
| 1387 | if licenses: | ||
| 1388 | msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses) | ||
| 1389 | oe.qa.handle_error("incompatible-license", msg, d) | ||
| 1390 | else: | ||
| 1391 | package_list.append(pkg) | ||
| 1392 | d.setVar('PACKAGES', ' '.join(package_list)) | ||
| 1393 | |||
| 1394 | unshipped = [] | ||
| 1395 | for root, dirs, files in cpath.walk(dvar): | ||
| 1396 | dir = root[len(dvar):] | ||
| 1397 | if not dir: | ||
| 1398 | dir = os.sep | ||
| 1399 | for f in (files + dirs): | ||
| 1400 | path = os.path.join(dir, f) | ||
| 1401 | if ('.' + path) not in seen: | ||
| 1402 | unshipped.append(path) | ||
| 1403 | |||
| 1404 | if unshipped != []: | ||
| 1405 | msg = pn + ": Files/directories were installed but not shipped in any package:" | ||
| 1406 | if "installed-vs-shipped" in (d.getVar('INSANE_SKIP:' + pn) or "").split(): | ||
| 1407 | bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn) | ||
| 1408 | else: | ||
| 1409 | for f in unshipped: | ||
| 1410 | msg = msg + "\n " + f | ||
| 1411 | msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n" | ||
| 1412 | msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped)) | ||
| 1413 | oe.qa.handle_error("installed-vs-shipped", msg, d) | ||
| 1414 | |||
| 1415 | def process_fixsymlinks(pkgfiles, d): | ||
| 1416 | cpath = oe.cachedpath.CachedPath() | ||
| 1417 | pkgdest = d.getVar('PKGDEST') | ||
| 1418 | packages = d.getVar("PACKAGES", False).split() | ||
| 1419 | |||
| 1420 | dangling_links = {} | ||
| 1421 | pkg_files = {} | ||
| 1422 | for pkg in packages: | ||
| 1423 | dangling_links[pkg] = [] | ||
| 1424 | pkg_files[pkg] = [] | ||
| 1425 | inst_root = os.path.join(pkgdest, pkg) | ||
| 1426 | for path in pkgfiles[pkg]: | ||
| 1427 | rpath = path[len(inst_root):] | ||
| 1428 | pkg_files[pkg].append(rpath) | ||
| 1429 | rtarget = cpath.realpath(path, inst_root, True, assume_dir = True) | ||
| 1430 | if not cpath.lexists(rtarget): | ||
| 1431 | dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):])) | ||
| 1432 | |||
| 1433 | newrdepends = {} | ||
| 1434 | for pkg in dangling_links: | ||
| 1435 | for l in dangling_links[pkg]: | ||
| 1436 | found = False | ||
| 1437 | bb.debug(1, "%s contains dangling link %s" % (pkg, l)) | ||
| 1438 | for p in packages: | ||
| 1439 | if l in pkg_files[p]: | ||
| 1440 | found = True | ||
| 1441 | bb.debug(1, "target found in %s" % p) | ||
| 1442 | if p == pkg: | ||
| 1443 | break | ||
| 1444 | if pkg not in newrdepends: | ||
| 1445 | newrdepends[pkg] = [] | ||
| 1446 | newrdepends[pkg].append(p) | ||
| 1447 | break | ||
| 1448 | if found == False: | ||
| 1449 | bb.note("%s contains dangling symlink to %s" % (pkg, l)) | ||
| 1450 | |||
| 1451 | for pkg in newrdepends: | ||
| 1452 | rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "") | ||
| 1453 | for p in newrdepends[pkg]: | ||
| 1454 | if p not in rdepends: | ||
| 1455 | rdepends[p] = [] | ||
| 1456 | d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False)) | ||
| 1457 | |||
| 1458 | def process_filedeps(pkgfiles, d): | ||
| 1459 | """ | ||
| 1460 | Collect perfile run-time dependency metadata | ||
| 1461 | Output: | ||
| 1462 | FILERPROVIDESFLIST:pkg - list of all files w/ deps | ||
| 1463 | FILERPROVIDES:filepath:pkg - per file dep | ||
| 1464 | |||
| 1465 | FILERDEPENDSFLIST:pkg - list of all files w/ deps | ||
| 1466 | FILERDEPENDS:filepath:pkg - per file dep | ||
| 1467 | """ | ||
| 1468 | if d.getVar('SKIP_FILEDEPS') == '1': | ||
| 1469 | return | ||
| 1470 | |||
| 1471 | pkgdest = d.getVar('PKGDEST') | ||
| 1472 | packages = d.getVar('PACKAGES') | ||
| 1473 | rpmdeps = d.getVar('RPMDEPS') | ||
| 1474 | |||
| 1475 | def chunks(files, n): | ||
| 1476 | return [files[i:i+n] for i in range(0, len(files), n)] | ||
| 1477 | |||
| 1478 | pkglist = [] | ||
| 1479 | for pkg in packages.split(): | ||
| 1480 | if d.getVar('SKIP_FILEDEPS:' + pkg) == '1': | ||
| 1481 | continue | ||
| 1482 | if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'): | ||
| 1483 | continue | ||
| 1484 | for files in chunks(pkgfiles[pkg], 100): | ||
| 1485 | pkglist.append((pkg, files, rpmdeps, pkgdest)) | ||
| 1486 | |||
| 1487 | processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d) | ||
| 1488 | |||
| 1489 | provides_files = {} | ||
| 1490 | requires_files = {} | ||
| 1491 | |||
| 1492 | for result in processed: | ||
| 1493 | (pkg, provides, requires) = result | ||
| 1494 | |||
| 1495 | if pkg not in provides_files: | ||
| 1496 | provides_files[pkg] = [] | ||
| 1497 | if pkg not in requires_files: | ||
| 1498 | requires_files[pkg] = [] | ||
| 1499 | |||
| 1500 | for file in sorted(provides): | ||
| 1501 | provides_files[pkg].append(file) | ||
| 1502 | key = "FILERPROVIDES:" + file + ":" + pkg | ||
| 1503 | d.appendVar(key, " " + " ".join(provides[file])) | ||
| 1504 | |||
| 1505 | for file in sorted(requires): | ||
| 1506 | requires_files[pkg].append(file) | ||
| 1507 | key = "FILERDEPENDS:" + file + ":" + pkg | ||
| 1508 | d.appendVar(key, " " + " ".join(requires[file])) | ||
| 1509 | |||
| 1510 | for pkg in requires_files: | ||
| 1511 | d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg]))) | ||
| 1512 | for pkg in provides_files: | ||
| 1513 | d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg]))) | ||
| 1514 | |||
| 1515 | def process_shlibs(pkgfiles, d): | ||
| 1516 | cpath = oe.cachedpath.CachedPath() | ||
| 1517 | |||
| 1518 | exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False) | ||
| 1519 | if exclude_shlibs: | ||
| 1520 | bb.note("not generating shlibs") | ||
| 1521 | return | ||
| 1522 | |||
| 1523 | lib_re = re.compile(r"^.*\.so") | ||
| 1524 | libdir_re = re.compile(r".*/%s$" % d.getVar('baselib')) | ||
| 1525 | |||
| 1526 | packages = d.getVar('PACKAGES') | ||
| 1527 | |||
| 1528 | shlib_pkgs = [] | ||
| 1529 | exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS") | ||
| 1530 | if exclusion_list: | ||
| 1531 | for pkg in packages.split(): | ||
| 1532 | if pkg not in exclusion_list.split(): | ||
| 1533 | shlib_pkgs.append(pkg) | ||
| 1534 | else: | ||
| 1535 | bb.note("not generating shlibs for %s" % pkg) | ||
| 1536 | else: | ||
| 1537 | shlib_pkgs = packages.split() | ||
| 1538 | |||
| 1539 | hostos = d.getVar('HOST_OS') | ||
| 1540 | |||
| 1541 | workdir = d.getVar('WORKDIR') | ||
| 1542 | |||
| 1543 | ver = d.getVar('PKGV') | ||
| 1544 | if not ver: | ||
| 1545 | msg = "PKGV not defined" | ||
| 1546 | oe.qa.handle_error("pkgv-undefined", msg, d) | ||
| 1547 | return | ||
| 1548 | |||
| 1549 | pkgdest = d.getVar('PKGDEST') | ||
| 1550 | |||
| 1551 | shlibswork_dir = d.getVar('SHLIBSWORKDIR') | ||
| 1552 | |||
| 1553 | def linux_so(file, pkg, pkgver, d): | ||
| 1554 | needs_ldconfig = False | ||
| 1555 | needed = set() | ||
| 1556 | sonames = set() | ||
| 1557 | renames = [] | ||
| 1558 | ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') | ||
| 1559 | cmd = d.getVar('OBJDUMP') + " -p " + pipes.quote(file) + " 2>/dev/null" | ||
| 1560 | fd = os.popen(cmd) | ||
| 1561 | lines = fd.readlines() | ||
| 1562 | fd.close() | ||
| 1563 | rpath = tuple() | ||
| 1564 | for l in lines: | ||
| 1565 | m = re.match(r"\s+RPATH\s+([^\s]*)", l) | ||
| 1566 | if m: | ||
| 1567 | rpaths = m.group(1).replace("$ORIGIN", ldir).split(":") | ||
| 1568 | rpath = tuple(map(os.path.normpath, rpaths)) | ||
| 1569 | for l in lines: | ||
| 1570 | m = re.match(r"\s+NEEDED\s+([^\s]*)", l) | ||
| 1571 | if m: | ||
| 1572 | dep = m.group(1) | ||
| 1573 | if dep not in needed: | ||
| 1574 | needed.add((dep, file, rpath)) | ||
| 1575 | m = re.match(r"\s+SONAME\s+([^\s]*)", l) | ||
| 1576 | if m: | ||
| 1577 | this_soname = m.group(1) | ||
| 1578 | prov = (this_soname, ldir, pkgver) | ||
| 1579 | if not prov in sonames: | ||
| 1580 | # if library is private (only used by package) then do not build shlib for it | ||
| 1581 | if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0: | ||
| 1582 | sonames.add(prov) | ||
| 1583 | if libdir_re.match(os.path.dirname(file)): | ||
| 1584 | needs_ldconfig = True | ||
| 1585 | if needs_ldconfig and snap_symlinks and (os.path.basename(file) != this_soname): | ||
| 1586 | renames.append((file, os.path.join(os.path.dirname(file), this_soname))) | ||
| 1587 | return (needs_ldconfig, needed, sonames, renames) | ||
| 1588 | |||
| 1589 | def darwin_so(file, needed, sonames, renames, pkgver): | ||
| 1590 | if not os.path.exists(file): | ||
| 1591 | return | ||
| 1592 | ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') | ||
| 1593 | |||
| 1594 | def get_combinations(base): | ||
| 1595 | # | ||
| 1596 | # Given a base library name, find all combinations of this split by "." and "-" | ||
| 1597 | # | ||
| 1598 | combos = [] | ||
| 1599 | options = base.split(".") | ||
| 1600 | for i in range(1, len(options) + 1): | ||
| 1601 | combos.append(".".join(options[0:i])) | ||
| 1602 | options = base.split("-") | ||
| 1603 | for i in range(1, len(options) + 1): | ||
| 1604 | combos.append("-".join(options[0:i])) | ||
| 1605 | return combos | ||
| 1606 | |||
| 1607 | if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'): | ||
| 1608 | # Drop suffix | ||
| 1609 | name = os.path.basename(file).rsplit(".",1)[0] | ||
| 1610 | # Find all combinations | ||
| 1611 | combos = get_combinations(name) | ||
| 1612 | for combo in combos: | ||
| 1613 | if not combo in sonames: | ||
| 1614 | prov = (combo, ldir, pkgver) | ||
| 1615 | sonames.add(prov) | ||
| 1616 | if file.endswith('.dylib') or file.endswith('.so'): | ||
| 1617 | rpath = [] | ||
| 1618 | p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) | ||
| 1619 | out, err = p.communicate() | ||
| 1620 | # If returned successfully, process stdout for results | ||
| 1621 | if p.returncode == 0: | ||
| 1622 | for l in out.split("\n"): | ||
| 1623 | l = l.strip() | ||
| 1624 | if l.startswith('path '): | ||
| 1625 | rpath.append(l.split()[1]) | ||
| 1626 | |||
| 1627 | p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) | ||
| 1628 | out, err = p.communicate() | ||
| 1629 | # If returned successfully, process stdout for results | ||
| 1630 | if p.returncode == 0: | ||
| 1631 | for l in out.split("\n"): | ||
| 1632 | l = l.strip() | ||
| 1633 | if not l or l.endswith(":"): | ||
| 1634 | continue | ||
| 1635 | if "is not an object file" in l: | ||
| 1636 | continue | ||
| 1637 | name = os.path.basename(l.split()[0]).rsplit(".", 1)[0] | ||
| 1638 | if name and name not in needed[pkg]: | ||
| 1639 | needed[pkg].add((name, file, tuple())) | ||
| 1640 | |||
| 1641 | def mingw_dll(file, needed, sonames, renames, pkgver): | ||
| 1642 | if not os.path.exists(file): | ||
| 1643 | return | ||
| 1644 | |||
| 1645 | if file.endswith(".dll"): | ||
| 1646 | # assume all dlls are shared objects provided by the package | ||
| 1647 | sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver)) | ||
| 1648 | |||
| 1649 | if (file.endswith(".dll") or file.endswith(".exe")): | ||
| 1650 | # use objdump to search for "DLL Name: .*\.dll" | ||
| 1651 | p = subprocess.Popen([d.expand("${HOST_PREFIX}objdump"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) | ||
| 1652 | out, err = p.communicate() | ||
| 1653 | # process the output, grabbing all .dll names | ||
| 1654 | if p.returncode == 0: | ||
| 1655 | for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE): | ||
| 1656 | dllname = m.group(1) | ||
| 1657 | if dllname: | ||
| 1658 | needed[pkg].add((dllname, file, tuple())) | ||
| 1659 | |||
| 1660 | if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1": | ||
| 1661 | snap_symlinks = True | ||
| 1662 | else: | ||
| 1663 | snap_symlinks = False | ||
| 1664 | |||
| 1665 | needed = {} | ||
| 1666 | |||
| 1667 | shlib_provider = oe.package.read_shlib_providers(d) | ||
| 1668 | |||
| 1669 | for pkg in shlib_pkgs: | ||
| 1670 | private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or "" | ||
| 1671 | private_libs = private_libs.split() | ||
| 1672 | needs_ldconfig = False | ||
| 1673 | bb.debug(2, "calculating shlib provides for %s" % pkg) | ||
| 1674 | |||
| 1675 | pkgver = d.getVar('PKGV:' + pkg) | ||
| 1676 | if not pkgver: | ||
| 1677 | pkgver = d.getVar('PV_' + pkg) | ||
| 1678 | if not pkgver: | ||
| 1679 | pkgver = ver | ||
| 1680 | |||
| 1681 | needed[pkg] = set() | ||
| 1682 | sonames = set() | ||
| 1683 | renames = [] | ||
| 1684 | linuxlist = [] | ||
| 1685 | for file in pkgfiles[pkg]: | ||
| 1686 | soname = None | ||
| 1687 | if cpath.islink(file): | ||
| 1688 | continue | ||
| 1689 | if hostos == "darwin" or hostos == "darwin8": | ||
| 1690 | darwin_so(file, needed, sonames, renames, pkgver) | ||
| 1691 | elif hostos.startswith("mingw"): | ||
| 1692 | mingw_dll(file, needed, sonames, renames, pkgver) | ||
| 1693 | elif os.access(file, os.X_OK) or lib_re.match(file): | ||
| 1694 | linuxlist.append(file) | ||
| 1695 | |||
| 1696 | if linuxlist: | ||
| 1697 | results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d)) | ||
| 1698 | for r in results: | ||
| 1699 | ldconfig = r[0] | ||
| 1700 | needed[pkg] |= r[1] | ||
| 1701 | sonames |= r[2] | ||
| 1702 | renames.extend(r[3]) | ||
| 1703 | needs_ldconfig = needs_ldconfig or ldconfig | ||
| 1704 | |||
| 1705 | for (old, new) in renames: | ||
| 1706 | bb.note("Renaming %s to %s" % (old, new)) | ||
| 1707 | bb.utils.rename(old, new) | ||
| 1708 | pkgfiles[pkg].remove(old) | ||
| 1709 | |||
| 1710 | shlibs_file = os.path.join(shlibswork_dir, pkg + ".list") | ||
| 1711 | if len(sonames): | ||
| 1712 | with open(shlibs_file, 'w') as fd: | ||
| 1713 | for s in sorted(sonames): | ||
| 1714 | if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]: | ||
| 1715 | (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]] | ||
| 1716 | if old_pkg != pkg: | ||
| 1717 | bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver)) | ||
| 1718 | bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0])) | ||
| 1719 | fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n') | ||
| 1720 | if s[0] not in shlib_provider: | ||
| 1721 | shlib_provider[s[0]] = {} | ||
| 1722 | shlib_provider[s[0]][s[1]] = (pkg, pkgver) | ||
| 1723 | if needs_ldconfig: | ||
| 1724 | bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg) | ||
| 1725 | postinst = d.getVar('pkg_postinst:%s' % pkg) | ||
| 1726 | if not postinst: | ||
| 1727 | postinst = '#!/bin/sh\n' | ||
| 1728 | postinst += d.getVar('ldconfig_postinst_fragment') | ||
| 1729 | d.setVar('pkg_postinst:%s' % pkg, postinst) | ||
| 1730 | bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames)) | ||
| 1731 | |||
| 1732 | assumed_libs = d.getVar('ASSUME_SHLIBS') | ||
| 1733 | if assumed_libs: | ||
| 1734 | libdir = d.getVar("libdir") | ||
| 1735 | for e in assumed_libs.split(): | ||
| 1736 | l, dep_pkg = e.split(":") | ||
| 1737 | lib_ver = None | ||
| 1738 | dep_pkg = dep_pkg.rsplit("_", 1) | ||
| 1739 | if len(dep_pkg) == 2: | ||
| 1740 | lib_ver = dep_pkg[1] | ||
| 1741 | dep_pkg = dep_pkg[0] | ||
| 1742 | if l not in shlib_provider: | ||
| 1743 | shlib_provider[l] = {} | ||
| 1744 | shlib_provider[l][libdir] = (dep_pkg, lib_ver) | ||
| 1745 | |||
| 1746 | libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')] | ||
| 1747 | |||
| 1748 | for pkg in shlib_pkgs: | ||
| 1749 | bb.debug(2, "calculating shlib requirements for %s" % pkg) | ||
| 1750 | |||
| 1751 | private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or "" | ||
| 1752 | private_libs = private_libs.split() | ||
| 1753 | |||
| 1754 | deps = list() | ||
| 1755 | for n in needed[pkg]: | ||
| 1756 | # if n is in private libraries, don't try to search provider for it | ||
| 1757 | # this could cause problem in case some abc.bb provides private | ||
| 1758 | # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1 | ||
| 1759 | # but skipping it is still better alternative than providing own | ||
| 1760 | # version and then adding runtime dependency for the same system library | ||
| 1761 | if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0: | ||
| 1762 | bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0])) | ||
| 1763 | continue | ||
| 1764 | if n[0] in shlib_provider.keys(): | ||
| 1765 | shlib_provider_map = shlib_provider[n[0]] | ||
| 1766 | matches = set() | ||
| 1767 | for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath): | ||
| 1768 | if p in shlib_provider_map: | ||
| 1769 | matches.add(p) | ||
| 1770 | if len(matches) > 1: | ||
| 1771 | matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches]) | ||
| 1772 | bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1])) | ||
| 1773 | elif len(matches) == 1: | ||
| 1774 | (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()] | ||
| 1775 | |||
| 1776 | bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1])) | ||
| 1777 | |||
| 1778 | if dep_pkg == pkg: | ||
| 1779 | continue | ||
| 1780 | |||
| 1781 | if ver_needed: | ||
| 1782 | dep = "%s (>= %s)" % (dep_pkg, ver_needed) | ||
| 1783 | else: | ||
| 1784 | dep = dep_pkg | ||
| 1785 | if not dep in deps: | ||
| 1786 | deps.append(dep) | ||
| 1787 | continue | ||
| 1788 | bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1])) | ||
| 1789 | |||
| 1790 | deps_file = os.path.join(pkgdest, pkg + ".shlibdeps") | ||
| 1791 | if os.path.exists(deps_file): | ||
| 1792 | os.remove(deps_file) | ||
| 1793 | if deps: | ||
| 1794 | with open(deps_file, 'w') as fd: | ||
| 1795 | for dep in sorted(deps): | ||
| 1796 | fd.write(dep + '\n') | ||
| 1797 | |||
| 1798 | def process_pkgconfig(pkgfiles, d): | ||
| 1799 | packages = d.getVar('PACKAGES') | ||
| 1800 | workdir = d.getVar('WORKDIR') | ||
| 1801 | pkgdest = d.getVar('PKGDEST') | ||
| 1802 | |||
| 1803 | shlibs_dirs = d.getVar('SHLIBSDIRS').split() | ||
| 1804 | shlibswork_dir = d.getVar('SHLIBSWORKDIR') | ||
| 1805 | |||
| 1806 | pc_re = re.compile(r'(.*)\.pc$') | ||
| 1807 | var_re = re.compile(r'(.*)=(.*)') | ||
| 1808 | field_re = re.compile(r'(.*): (.*)') | ||
| 1809 | |||
| 1810 | pkgconfig_provided = {} | ||
| 1811 | pkgconfig_needed = {} | ||
| 1812 | for pkg in packages.split(): | ||
| 1813 | pkgconfig_provided[pkg] = [] | ||
| 1814 | pkgconfig_needed[pkg] = [] | ||
| 1815 | for file in sorted(pkgfiles[pkg]): | ||
| 1816 | m = pc_re.match(file) | ||
| 1817 | if m: | ||
| 1818 | pd = bb.data.init() | ||
| 1819 | name = m.group(1) | ||
| 1820 | pkgconfig_provided[pkg].append(os.path.basename(name)) | ||
| 1821 | if not os.access(file, os.R_OK): | ||
| 1822 | continue | ||
| 1823 | with open(file, 'r') as f: | ||
| 1824 | lines = f.readlines() | ||
| 1825 | for l in lines: | ||
| 1826 | m = var_re.match(l) | ||
| 1827 | if m: | ||
| 1828 | name = m.group(1) | ||
| 1829 | val = m.group(2) | ||
| 1830 | pd.setVar(name, pd.expand(val)) | ||
| 1831 | continue | ||
| 1832 | m = field_re.match(l) | ||
| 1833 | if m: | ||
| 1834 | hdr = m.group(1) | ||
| 1835 | exp = pd.expand(m.group(2)) | ||
| 1836 | if hdr == 'Requires': | ||
| 1837 | pkgconfig_needed[pkg] += exp.replace(',', ' ').split() | ||
| 1838 | |||
| 1839 | for pkg in packages.split(): | ||
| 1840 | pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist") | ||
| 1841 | if pkgconfig_provided[pkg] != []: | ||
| 1842 | with open(pkgs_file, 'w') as f: | ||
| 1843 | for p in sorted(pkgconfig_provided[pkg]): | ||
| 1844 | f.write('%s\n' % p) | ||
| 1845 | |||
| 1846 | # Go from least to most specific since the last one found wins | ||
| 1847 | for dir in reversed(shlibs_dirs): | ||
| 1848 | if not os.path.exists(dir): | ||
| 1849 | continue | ||
| 1850 | for file in sorted(os.listdir(dir)): | ||
| 1851 | m = re.match(r'^(.*)\.pclist$', file) | ||
| 1852 | if m: | ||
| 1853 | pkg = m.group(1) | ||
| 1854 | with open(os.path.join(dir, file)) as fd: | ||
| 1855 | lines = fd.readlines() | ||
| 1856 | pkgconfig_provided[pkg] = [] | ||
| 1857 | for l in lines: | ||
| 1858 | pkgconfig_provided[pkg].append(l.rstrip()) | ||
| 1859 | |||
| 1860 | for pkg in packages.split(): | ||
| 1861 | deps = [] | ||
| 1862 | for n in pkgconfig_needed[pkg]: | ||
| 1863 | found = False | ||
| 1864 | for k in pkgconfig_provided.keys(): | ||
| 1865 | if n in pkgconfig_provided[k]: | ||
| 1866 | if k != pkg and not (k in deps): | ||
| 1867 | deps.append(k) | ||
| 1868 | found = True | ||
| 1869 | if found == False: | ||
| 1870 | bb.note("couldn't find pkgconfig module '%s' in any package" % n) | ||
| 1871 | deps_file = os.path.join(pkgdest, pkg + ".pcdeps") | ||
| 1872 | if len(deps): | ||
| 1873 | with open(deps_file, 'w') as fd: | ||
| 1874 | for dep in deps: | ||
| 1875 | fd.write(dep + '\n') | ||
| 1876 | |||
| 1877 | def read_libdep_files(d): | ||
| 1878 | pkglibdeps = {} | ||
| 1879 | packages = d.getVar('PACKAGES').split() | ||
| 1880 | for pkg in packages: | ||
| 1881 | pkglibdeps[pkg] = {} | ||
| 1882 | for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": | ||
| 1883 | depsfile = d.expand("${PKGDEST}/" + pkg + extension) | ||
| 1884 | if os.access(depsfile, os.R_OK): | ||
| 1885 | with open(depsfile) as fd: | ||
| 1886 | lines = fd.readlines() | ||
| 1887 | for l in lines: | ||
| 1888 | l.rstrip() | ||
| 1889 | deps = bb.utils.explode_dep_versions2(l) | ||
| 1890 | for dep in deps: | ||
| 1891 | if not dep in pkglibdeps[pkg]: | ||
| 1892 | pkglibdeps[pkg][dep] = deps[dep] | ||
| 1893 | return pkglibdeps | ||
| 1894 | |||
| 1895 | def process_depchains(pkgfiles, d): | ||
| 1896 | """ | ||
| 1897 | For a given set of prefix and postfix modifiers, make those packages | ||
| 1898 | RRECOMMENDS on the corresponding packages for its RDEPENDS. | ||
| 1899 | |||
| 1900 | Example: If package A depends upon package B, and A's .bb emits an | ||
| 1901 | A-dev package, this would make A-dev Recommends: B-dev. | ||
| 1902 | |||
| 1903 | If only one of a given suffix is specified, it will take the RRECOMMENDS | ||
| 1904 | based on the RDEPENDS of *all* other packages. If more than one of a given | ||
| 1905 | suffix is specified, its will only use the RDEPENDS of the single parent | ||
| 1906 | package. | ||
| 1907 | """ | ||
| 1908 | |||
| 1909 | packages = d.getVar('PACKAGES') | ||
| 1910 | postfixes = (d.getVar('DEPCHAIN_POST') or '').split() | ||
| 1911 | prefixes = (d.getVar('DEPCHAIN_PRE') or '').split() | ||
| 1912 | |||
| 1913 | def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d): | ||
| 1914 | |||
| 1915 | #bb.note('depends for %s is %s' % (base, depends)) | ||
| 1916 | rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "") | ||
| 1917 | |||
| 1918 | for depend in sorted(depends): | ||
| 1919 | if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'): | ||
| 1920 | #bb.note("Skipping %s" % depend) | ||
| 1921 | continue | ||
| 1922 | if depend.endswith('-dev'): | ||
| 1923 | depend = depend[:-4] | ||
| 1924 | if depend.endswith('-dbg'): | ||
| 1925 | depend = depend[:-4] | ||
| 1926 | pkgname = getname(depend, suffix) | ||
| 1927 | #bb.note("Adding %s for %s" % (pkgname, depend)) | ||
| 1928 | if pkgname not in rreclist and pkgname != pkg: | ||
| 1929 | rreclist[pkgname] = [] | ||
| 1930 | |||
| 1931 | #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist))) | ||
| 1932 | d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) | ||
| 1933 | |||
| 1934 | def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): | ||
| 1935 | |||
| 1936 | #bb.note('rdepends for %s is %s' % (base, rdepends)) | ||
| 1937 | rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "") | ||
| 1938 | |||
| 1939 | for depend in sorted(rdepends): | ||
| 1940 | if depend.find('virtual-locale-') != -1: | ||
| 1941 | #bb.note("Skipping %s" % depend) | ||
| 1942 | continue | ||
| 1943 | if depend.endswith('-dev'): | ||
| 1944 | depend = depend[:-4] | ||
| 1945 | if depend.endswith('-dbg'): | ||
| 1946 | depend = depend[:-4] | ||
| 1947 | pkgname = getname(depend, suffix) | ||
| 1948 | #bb.note("Adding %s for %s" % (pkgname, depend)) | ||
| 1949 | if pkgname not in rreclist and pkgname != pkg: | ||
| 1950 | rreclist[pkgname] = [] | ||
| 1951 | |||
| 1952 | #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist))) | ||
| 1953 | d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) | ||
| 1954 | |||
| 1955 | def add_dep(list, dep): | ||
| 1956 | if dep not in list: | ||
| 1957 | list.append(dep) | ||
| 1958 | |||
| 1959 | depends = [] | ||
| 1960 | for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""): | ||
| 1961 | add_dep(depends, dep) | ||
| 1962 | |||
| 1963 | rdepends = [] | ||
| 1964 | for pkg in packages.split(): | ||
| 1965 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + pkg) or ""): | ||
| 1966 | add_dep(rdepends, dep) | ||
| 1967 | |||
| 1968 | #bb.note('rdepends is %s' % rdepends) | ||
| 1969 | |||
| 1970 | def post_getname(name, suffix): | ||
| 1971 | return '%s%s' % (name, suffix) | ||
| 1972 | def pre_getname(name, suffix): | ||
| 1973 | return '%s%s' % (suffix, name) | ||
| 1974 | |||
| 1975 | pkgs = {} | ||
| 1976 | for pkg in packages.split(): | ||
| 1977 | for postfix in postfixes: | ||
| 1978 | if pkg.endswith(postfix): | ||
| 1979 | if not postfix in pkgs: | ||
| 1980 | pkgs[postfix] = {} | ||
| 1981 | pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname) | ||
| 1982 | |||
| 1983 | for prefix in prefixes: | ||
| 1984 | if pkg.startswith(prefix): | ||
| 1985 | if not prefix in pkgs: | ||
| 1986 | pkgs[prefix] = {} | ||
| 1987 | pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname) | ||
| 1988 | |||
| 1989 | if "-dbg" in pkgs: | ||
| 1990 | pkglibdeps = read_libdep_files(d) | ||
| 1991 | pkglibdeplist = [] | ||
| 1992 | for pkg in pkglibdeps: | ||
| 1993 | for k in pkglibdeps[pkg]: | ||
| 1994 | add_dep(pkglibdeplist, k) | ||
| 1995 | dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d))) | ||
| 1996 | |||
| 1997 | for suffix in pkgs: | ||
| 1998 | for pkg in pkgs[suffix]: | ||
| 1999 | if d.getVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs'): | ||
| 2000 | continue | ||
| 2001 | (base, func) = pkgs[suffix][pkg] | ||
| 2002 | if suffix == "-dev": | ||
| 2003 | pkg_adddeprrecs(pkg, base, suffix, func, depends, d) | ||
| 2004 | elif suffix == "-dbg": | ||
| 2005 | if not dbgdefaultdeps: | ||
| 2006 | pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d) | ||
| 2007 | continue | ||
| 2008 | if len(pkgs[suffix]) == 1: | ||
| 2009 | pkg_addrrecs(pkg, base, suffix, func, rdepends, d) | ||
| 2010 | else: | ||
| 2011 | rdeps = [] | ||
| 2012 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + base) or ""): | ||
| 2013 | add_dep(rdeps, dep) | ||
| 2014 | pkg_addrrecs(pkg, base, suffix, func, rdeps, d) | ||
| 2015 | |||
