summaryrefslogtreecommitdiffstats
path: root/meta/lib/oe/package.py
diff options
context:
space:
mode:
authorRichard Purdie <richard.purdie@linuxfoundation.org>2023-01-04 14:14:24 +0000
committerRichard Purdie <richard.purdie@linuxfoundation.org>2023-01-05 11:52:50 +0000
commite4f17fd72d0bc9c1b2f245abf9dd6a64b2544796 (patch)
treee42a6adff6ddc8adb8bd36b67ae4a69e9940f17a /meta/lib/oe/package.py
parentb3573d38ef2653a6da58a73fa8aa647009086bee (diff)
downloadpoky-e4f17fd72d0bc9c1b2f245abf9dd6a64b2544796.tar.gz
package: Move package functions to function library
Move the bulk of the remaining package "processing" functions over to the package function library for parsing efficiency. (From OE-Core rev: f8785117074908330faca0b99afa7f60ed6ad952) Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta/lib/oe/package.py')
-rw-r--r--meta/lib/oe/package.py1408
1 files changed, 1405 insertions, 3 deletions
diff --git a/meta/lib/oe/package.py b/meta/lib/oe/package.py
index b4c8ab7222..c9eb75d852 100644
--- a/meta/lib/oe/package.py
+++ b/meta/lib/oe/package.py
@@ -4,12 +4,19 @@
4# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
5# 5#
6 6
7import errno
8import fnmatch
9import itertools
7import os 10import os
11import pipes
12import re
8import glob 13import glob
9import stat 14import stat
10import mmap 15import mmap
11import subprocess 16import subprocess
12 17
18import oe.cachedpath
19
13def runstrip(arg): 20def runstrip(arg):
14 # Function to strip a single file, called from split_and_strip_files below 21 # Function to strip a single file, called from split_and_strip_files below
15 # A working 'file' (one which works on the target architecture) 22 # A working 'file' (one which works on the target architecture)
@@ -300,7 +307,6 @@ def read_shlib_providers(d):
300# the fs-perms.txt files 307# the fs-perms.txt files
301def fixup_perms(d): 308def fixup_perms(d):
302 import pwd, grp 309 import pwd, grp
303 import oe.cachedpath
304 310
305 cpath = oe.cachedpath.CachedPath() 311 cpath = oe.cachedpath.CachedPath()
306 dvar = d.getVar('PKGD') 312 dvar = d.getVar('PKGD')
@@ -537,8 +543,6 @@ def fixup_perms(d):
537# Get a list of files from file vars by searching files under current working directory 543# Get a list of files from file vars by searching files under current working directory
538# The list contains symlinks, directories and normal files. 544# The list contains symlinks, directories and normal files.
539def files_from_filevars(filevars): 545def files_from_filevars(filevars):
540 import oe.cachedpath
541
542 cpath = oe.cachedpath.CachedPath() 546 cpath = oe.cachedpath.CachedPath()
543 files = [] 547 files = []
544 for f in filevars: 548 for f in filevars:
@@ -611,3 +615,1401 @@ def get_conffiles(pkg, d):
611 os.chdir(cwd) 615 os.chdir(cwd)
612 return conf_list 616 return conf_list
613 617
618def legitimize_package_name(s):
619 """
620 Make sure package names are legitimate strings
621 """
622
623 def fixutf(m):
624 cp = m.group(1)
625 if cp:
626 return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape')
627
628 # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
629 s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s)
630
631 # Remaining package name validity fixes
632 return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
633
634def split_locales(d):
635 cpath = oe.cachedpath.CachedPath()
636 if (d.getVar('PACKAGE_NO_LOCALE') == '1'):
637 bb.debug(1, "package requested not splitting locales")
638 return
639
640 packages = (d.getVar('PACKAGES') or "").split()
641
642 datadir = d.getVar('datadir')
643 if not datadir:
644 bb.note("datadir not defined")
645 return
646
647 dvar = d.getVar('PKGD')
648 pn = d.getVar('LOCALEBASEPN')
649
650 if pn + '-locale' in packages:
651 packages.remove(pn + '-locale')
652
653 localedir = os.path.join(dvar + datadir, 'locale')
654
655 if not cpath.isdir(localedir):
656 bb.debug(1, "No locale files in this package")
657 return
658
659 locales = os.listdir(localedir)
660
661 summary = d.getVar('SUMMARY') or pn
662 description = d.getVar('DESCRIPTION') or ""
663 locale_section = d.getVar('LOCALE_SECTION')
664 mlprefix = d.getVar('MLPREFIX') or ""
665 for l in sorted(locales):
666 ln = legitimize_package_name(l)
667 pkg = pn + '-locale-' + ln
668 packages.append(pkg)
669 d.setVar('FILES:' + pkg, os.path.join(datadir, 'locale', l))
670 d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
671 d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
672 d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l))
673 d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l))
674 if locale_section:
675 d.setVar('SECTION:' + pkg, locale_section)
676
677 d.setVar('PACKAGES', ' '.join(packages))
678
679 # Disabled by RP 18/06/07
680 # Wildcards aren't supported in debian
681 # They break with ipkg since glibc-locale* will mean that
682 # glibc-localedata-translit* won't install as a dependency
683 # for some other package which breaks meta-toolchain
684 # Probably breaks since virtual-locale- isn't provided anywhere
685 #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split()
686 #rdep.append('%s-locale*' % pn)
687 #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep))
688
689def package_debug_vars(d):
690 # We default to '.debug' style
691 if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory':
692 # Single debug-file-directory style debug info
693 debug_vars = {
694 "append": ".debug",
695 "staticappend": "",
696 "dir": "",
697 "staticdir": "",
698 "libdir": "/usr/lib/debug",
699 "staticlibdir": "/usr/lib/debug-static",
700 "srcdir": "/usr/src/debug",
701 }
702 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src':
703 # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
704 debug_vars = {
705 "append": "",
706 "staticappend": "",
707 "dir": "/.debug",
708 "staticdir": "/.debug-static",
709 "libdir": "",
710 "staticlibdir": "",
711 "srcdir": "",
712 }
713 elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg':
714 debug_vars = {
715 "append": "",
716 "staticappend": "",
717 "dir": "/.debug",
718 "staticdir": "/.debug-static",
719 "libdir": "",
720 "staticlibdir": "",
721 "srcdir": "/usr/src/debug",
722 }
723 else:
724 # Original OE-core, a.k.a. ".debug", style debug info
725 debug_vars = {
726 "append": "",
727 "staticappend": "",
728 "dir": "/.debug",
729 "staticdir": "/.debug-static",
730 "libdir": "",
731 "staticlibdir": "",
732 "srcdir": "/usr/src/debug",
733 }
734
735 return debug_vars
736
737
738def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output):
739 debugfiles = {}
740
741 for line in dwarfsrcfiles_output.splitlines():
742 if line.startswith("\t"):
743 debugfiles[os.path.normpath(line.split()[0])] = ""
744
745 return debugfiles.keys()
746
747def source_info(file, d, fatal=True):
748 cmd = ["dwarfsrcfiles", file]
749 try:
750 output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT)
751 retval = 0
752 except subprocess.CalledProcessError as exc:
753 output = exc.output
754 retval = exc.returncode
755
756 # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure
757 if retval != 0 and retval != 255:
758 msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "")
759 if fatal:
760 bb.fatal(msg)
761 bb.note(msg)
762
763 debugsources = parse_debugsources_from_dwarfsrcfiles_output(output)
764
765 return list(debugsources)
766
767def splitdebuginfo(file, dvar, dv, d):
768 # Function to split a single file into two components, one is the stripped
769 # target system binary, the other contains any debugging information. The
770 # two files are linked to reference each other.
771 #
772 # return a mapping of files:debugsources
773
774 src = file[len(dvar):]
775 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
776 debugfile = dvar + dest
777 sources = []
778
779 if file.endswith(".ko") and file.find("/lib/modules/") != -1:
780 if oe.package.is_kernel_module_signed(file):
781 bb.debug(1, "Skip strip on signed module %s" % file)
782 return (file, sources)
783
784 # Split the file...
785 bb.utils.mkdirhier(os.path.dirname(debugfile))
786 #bb.note("Split %s -> %s" % (file, debugfile))
787 # Only store off the hard link reference if we successfully split!
788
789 dvar = d.getVar('PKGD')
790 objcopy = d.getVar("OBJCOPY")
791
792 newmode = None
793 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
794 origmode = os.stat(file)[stat.ST_MODE]
795 newmode = origmode | stat.S_IWRITE | stat.S_IREAD
796 os.chmod(file, newmode)
797
798 # We need to extract the debug src information here...
799 if dv["srcdir"]:
800 sources = source_info(file, d)
801
802 bb.utils.mkdirhier(os.path.dirname(debugfile))
803
804 subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT)
805
806 # Set the debuglink to have the view of the file path on the target
807 subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT)
808
809 if newmode:
810 os.chmod(file, origmode)
811
812 return (file, sources)
813
814def splitstaticdebuginfo(file, dvar, dv, d):
815 # Unlike the function above, there is no way to split a static library
816 # two components. So to get similar results we will copy the unmodified
817 # static library (containing the debug symbols) into a new directory.
818 # We will then strip (preserving symbols) the static library in the
819 # typical location.
820 #
821 # return a mapping of files:debugsources
822
823 src = file[len(dvar):]
824 dest = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(src) + dv["staticappend"]
825 debugfile = dvar + dest
826 sources = []
827
828 # Copy the file...
829 bb.utils.mkdirhier(os.path.dirname(debugfile))
830 #bb.note("Copy %s -> %s" % (file, debugfile))
831
832 dvar = d.getVar('PKGD')
833
834 newmode = None
835 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
836 origmode = os.stat(file)[stat.ST_MODE]
837 newmode = origmode | stat.S_IWRITE | stat.S_IREAD
838 os.chmod(file, newmode)
839
840 # We need to extract the debug src information here...
841 if dv["srcdir"]:
842 sources = source_info(file, d)
843
844 bb.utils.mkdirhier(os.path.dirname(debugfile))
845
846 # Copy the unmodified item to the debug directory
847 shutil.copy2(file, debugfile)
848
849 if newmode:
850 os.chmod(file, origmode)
851
852 return (file, sources)
853
854def inject_minidebuginfo(file, dvar, dv, d):
855 # Extract just the symbols from debuginfo into minidebuginfo,
856 # compress it with xz and inject it back into the binary in a .gnu_debugdata section.
857 # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html
858
859 readelf = d.getVar('READELF')
860 nm = d.getVar('NM')
861 objcopy = d.getVar('OBJCOPY')
862
863 minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo')
864
865 src = file[len(dvar):]
866 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
867 debugfile = dvar + dest
868 minidebugfile = minidebuginfodir + src + '.minidebug'
869 bb.utils.mkdirhier(os.path.dirname(minidebugfile))
870
871 # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either
872 # so skip it.
873 if not os.path.exists(debugfile):
874 bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file))
875 return
876
877 # minidebuginfo does not make sense to apply to ELF objects other than
878 # executables and shared libraries, skip applying the minidebuginfo
879 # generation for objects like kernel modules.
880 for line in subprocess.check_output([readelf, '-h', debugfile], universal_newlines=True).splitlines():
881 if not line.strip().startswith("Type:"):
882 continue
883 elftype = line.split(":")[1].strip()
884 if not any(elftype.startswith(i) for i in ["EXEC", "DYN"]):
885 bb.debug(1, 'ELF file {} is not executable/shared, skipping minidebuginfo injection'.format(file))
886 return
887 break
888
889 # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo.
890 # We will exclude all of these from minidebuginfo to save space.
891 remove_section_names = []
892 for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines():
893 # strip the leading " [ 1]" section index to allow splitting on space
894 if ']' not in line:
895 continue
896 fields = line[line.index(']') + 1:].split()
897 if len(fields) < 7:
898 continue
899 name = fields[0]
900 type = fields[1]
901 flags = fields[6]
902 # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them
903 if name.startswith('.debug_'):
904 continue
905 if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']:
906 remove_section_names.append(name)
907
908 # List dynamic symbols in the binary. We can exclude these from minidebuginfo
909 # because they are always present in the binary.
910 dynsyms = set()
911 for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines():
912 dynsyms.add(line.split()[0])
913
914 # Find all function symbols from debuginfo which aren't in the dynamic symbols table.
915 # These are the ones we want to keep in minidebuginfo.
916 keep_symbols_file = minidebugfile + '.symlist'
917 found_any_symbols = False
918 with open(keep_symbols_file, 'w') as f:
919 for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines():
920 fields = line.split('|')
921 if len(fields) < 7:
922 continue
923 name = fields[0].strip()
924 type = fields[3].strip()
925 if type == 'FUNC' and name not in dynsyms:
926 f.write('{}\n'.format(name))
927 found_any_symbols = True
928
929 if not found_any_symbols:
930 bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file))
931 return
932
933 bb.utils.remove(minidebugfile)
934 bb.utils.remove(minidebugfile + '.xz')
935
936 subprocess.check_call([objcopy, '-S'] +
937 ['--remove-section={}'.format(s) for s in remove_section_names] +
938 ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile])
939
940 subprocess.check_call(['xz', '--keep', minidebugfile])
941
942 subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file])
943
944def copydebugsources(debugsrcdir, sources, d):
945 # The debug src information written out to sourcefile is further processed
946 # and copied to the destination here.
947
948 cpath = oe.cachedpath.CachedPath()
949
950 if debugsrcdir and sources:
951 sourcefile = d.expand("${WORKDIR}/debugsources.list")
952 bb.utils.remove(sourcefile)
953
954 # filenames are null-separated - this is an artefact of the previous use
955 # of rpm's debugedit, which was writing them out that way, and the code elsewhere
956 # is still assuming that.
957 debuglistoutput = '\0'.join(sources) + '\0'
958 with open(sourcefile, 'a') as sf:
959 sf.write(debuglistoutput)
960
961 dvar = d.getVar('PKGD')
962 strip = d.getVar("STRIP")
963 objcopy = d.getVar("OBJCOPY")
964 workdir = d.getVar("WORKDIR")
965 sdir = d.getVar("S")
966 cflags = d.expand("${CFLAGS}")
967
968 prefixmap = {}
969 for flag in cflags.split():
970 if not flag.startswith("-fdebug-prefix-map"):
971 continue
972 if "recipe-sysroot" in flag:
973 continue
974 flag = flag.split("=")
975 prefixmap[flag[1]] = flag[2]
976
977 nosuchdir = []
978 basepath = dvar
979 for p in debugsrcdir.split("/"):
980 basepath = basepath + "/" + p
981 if not cpath.exists(basepath):
982 nosuchdir.append(basepath)
983 bb.utils.mkdirhier(basepath)
984 cpath.updatecache(basepath)
985
986 for pmap in prefixmap:
987 # Ignore files from the recipe sysroots (target and native)
988 cmd = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | " % sourcefile
989 # We need to ignore files that are not actually ours
990 # we do this by only paying attention to items from this package
991 cmd += "fgrep -zw '%s' | " % prefixmap[pmap]
992 # Remove prefix in the source paths
993 cmd += "sed 's#%s/##g' | " % (prefixmap[pmap])
994 cmd += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)" % (pmap, dvar, prefixmap[pmap])
995
996 try:
997 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
998 except subprocess.CalledProcessError:
999 # Can "fail" if internal headers/transient sources are attempted
1000 pass
1001 # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
1002 # Work around this by manually finding and copying any symbolic links that made it through.
1003 cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \
1004 (dvar, prefixmap[pmap], dvar, prefixmap[pmap], pmap, dvar, prefixmap[pmap])
1005 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
1006
1007 # debugsources.list may be polluted from the host if we used externalsrc,
1008 # cpio uses copy-pass and may have just created a directory structure
1009 # matching the one from the host, if thats the case move those files to
1010 # debugsrcdir to avoid host contamination.
1011 # Empty dir structure will be deleted in the next step.
1012
1013 # Same check as above for externalsrc
1014 if workdir not in sdir:
1015 if os.path.exists(dvar + debugsrcdir + sdir):
1016 cmd = "mv %s%s%s/* %s%s" % (dvar, debugsrcdir, sdir, dvar,debugsrcdir)
1017 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
1018
1019 # The copy by cpio may have resulted in some empty directories! Remove these
1020 cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
1021 subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
1022
1023 # Also remove debugsrcdir if its empty
1024 for p in nosuchdir[::-1]:
1025 if os.path.exists(p) and not os.listdir(p):
1026 os.rmdir(p)
1027
1028
1029def process_split_and_strip_files(d):
1030 cpath = oe.cachedpath.CachedPath()
1031
1032 dvar = d.getVar('PKGD')
1033 pn = d.getVar('PN')
1034 hostos = d.getVar('HOST_OS')
1035
1036 oldcwd = os.getcwd()
1037 os.chdir(dvar)
1038
1039 dv = package_debug_vars(d)
1040
1041 #
1042 # First lets figure out all of the files we may have to process ... do this only once!
1043 #
1044 elffiles = {}
1045 symlinks = {}
1046 staticlibs = []
1047 inodes = {}
1048 libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
1049 baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
1050 skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split()
1051 if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \
1052 d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
1053 checkelf = {}
1054 checkelflinks = {}
1055 for root, dirs, files in cpath.walk(dvar):
1056 for f in files:
1057 file = os.path.join(root, f)
1058
1059 # Skip debug files
1060 if dv["append"] and file.endswith(dv["append"]):
1061 continue
1062 if dv["dir"] and dv["dir"] in os.path.dirname(file[len(dvar):]):
1063 continue
1064
1065 if file in skipfiles:
1066 continue
1067
1068 if oe.package.is_static_lib(file):
1069 staticlibs.append(file)
1070 continue
1071
1072 try:
1073 ltarget = cpath.realpath(file, dvar, False)
1074 s = cpath.lstat(ltarget)
1075 except OSError as e:
1076 (err, strerror) = e.args
1077 if err != errno.ENOENT:
1078 raise
1079 # Skip broken symlinks
1080 continue
1081 if not s:
1082 continue
1083 # Check its an executable
1084 if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \
1085 or (s[stat.ST_MODE] & stat.S_IXOTH) \
1086 or ((file.startswith(libdir) or file.startswith(baselibdir)) \
1087 and (".so" in f or ".node" in f)) \
1088 or (f.startswith('vmlinux') or ".ko" in f):
1089
1090 if cpath.islink(file):
1091 checkelflinks[file] = ltarget
1092 continue
1093 # Use a reference of device ID and inode number to identify files
1094 file_reference = "%d_%d" % (s.st_dev, s.st_ino)
1095 checkelf[file] = (file, file_reference)
1096
1097 results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d)
1098 results_map = {}
1099 for (ltarget, elf_file) in results:
1100 results_map[ltarget] = elf_file
1101 for file in checkelflinks:
1102 ltarget = checkelflinks[file]
1103 # If it's a symlink, and points to an ELF file, we capture the readlink target
1104 if results_map[ltarget]:
1105 target = os.readlink(file)
1106 #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget]))
1107 symlinks[file] = target
1108
1109 results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d)
1110
1111 # Sort results by file path. This ensures that the files are always
1112 # processed in the same order, which is important to make sure builds
1113 # are reproducible when dealing with hardlinks
1114 results.sort(key=lambda x: x[0])
1115
1116 for (file, elf_file) in results:
1117 # It's a file (or hardlink), not a link
1118 # ...but is it ELF, and is it already stripped?
1119 if elf_file & 1:
1120 if elf_file & 2:
1121 if 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split():
1122 bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
1123 else:
1124 msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
1125 oe.qa.handle_error("already-stripped", msg, d)
1126 continue
1127
1128 # At this point we have an unstripped elf file. We need to:
1129 # a) Make sure any file we strip is not hardlinked to anything else outside this tree
1130 # b) Only strip any hardlinked file once (no races)
1131 # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks
1132
1133 # Use a reference of device ID and inode number to identify files
1134 file_reference = checkelf[file][1]
1135 if file_reference in inodes:
1136 os.unlink(file)
1137 os.link(inodes[file_reference][0], file)
1138 inodes[file_reference].append(file)
1139 else:
1140 inodes[file_reference] = [file]
1141 # break hardlink
1142 bb.utils.break_hardlinks(file)
1143 elffiles[file] = elf_file
1144 # Modified the file so clear the cache
1145 cpath.updatecache(file)
1146
1147 def strip_pkgd_prefix(f):
1148 nonlocal dvar
1149
1150 if f.startswith(dvar):
1151 return f[len(dvar):]
1152
1153 return f
1154
1155 #
1156 # First lets process debug splitting
1157 #
1158 if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
1159 results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, dv, d))
1160
1161 if dv["srcdir"] and not hostos.startswith("mingw"):
1162 if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1163 results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, dv, d))
1164 else:
1165 for file in staticlibs:
1166 results.append( (file,source_info(file, d)) )
1167
1168 d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results})
1169
1170 sources = set()
1171 for r in results:
1172 sources.update(r[1])
1173
1174 # Hardlink our debug symbols to the other hardlink copies
1175 for ref in inodes:
1176 if len(inodes[ref]) == 1:
1177 continue
1178
1179 target = inodes[ref][0][len(dvar):]
1180 for file in inodes[ref][1:]:
1181 src = file[len(dvar):]
1182 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
1183 fpath = dvar + dest
1184 ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
1185 bb.utils.mkdirhier(os.path.dirname(fpath))
1186 # Only one hardlink of separated debug info file in each directory
1187 if not os.access(fpath, os.R_OK):
1188 #bb.note("Link %s -> %s" % (fpath, ftarget))
1189 os.link(ftarget, fpath)
1190
1191 # Create symlinks for all cases we were able to split symbols
1192 for file in symlinks:
1193 src = file[len(dvar):]
1194 dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
1195 fpath = dvar + dest
1196 # Skip it if the target doesn't exist
1197 try:
1198 s = os.stat(fpath)
1199 except OSError as e:
1200 (err, strerror) = e.args
1201 if err != errno.ENOENT:
1202 raise
1203 continue
1204
1205 ltarget = symlinks[file]
1206 lpath = os.path.dirname(ltarget)
1207 lbase = os.path.basename(ltarget)
1208 ftarget = ""
1209 if lpath and lpath != ".":
1210 ftarget += lpath + dv["dir"] + "/"
1211 ftarget += lbase + dv["append"]
1212 if lpath.startswith(".."):
1213 ftarget = os.path.join("..", ftarget)
1214 bb.utils.mkdirhier(os.path.dirname(fpath))
1215 #bb.note("Symlink %s -> %s" % (fpath, ftarget))
1216 os.symlink(ftarget, fpath)
1217
1218 # Process the dv["srcdir"] if requested...
1219 # This copies and places the referenced sources for later debugging...
1220 copydebugsources(dv["srcdir"], sources, d)
1221 #
1222 # End of debug splitting
1223 #
1224
1225 #
1226 # Now lets go back over things and strip them
1227 #
1228 if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'):
1229 strip = d.getVar("STRIP")
1230 sfiles = []
1231 for file in elffiles:
1232 elf_file = int(elffiles[file])
1233 #bb.note("Strip %s" % file)
1234 sfiles.append((file, elf_file, strip))
1235 if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1236 for f in staticlibs:
1237 sfiles.append((f, 16, strip))
1238
1239 oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d)
1240
1241 # Build "minidebuginfo" and reinject it back into the stripped binaries
1242 if d.getVar('PACKAGE_MINIDEBUGINFO') == '1':
1243 oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d,
1244 extraargs=(dvar, dv, d))
1245
1246 #
1247 # End of strip
1248 #
1249 os.chdir(oldcwd)
1250
1251
1252def populate_packages(d):
1253 cpath = oe.cachedpath.CachedPath()
1254
1255 workdir = d.getVar('WORKDIR')
1256 outdir = d.getVar('DEPLOY_DIR')
1257 dvar = d.getVar('PKGD')
1258 packages = d.getVar('PACKAGES').split()
1259 pn = d.getVar('PN')
1260
1261 bb.utils.mkdirhier(outdir)
1262 os.chdir(dvar)
1263
1264 autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False)
1265
1266 split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg')
1267
1268 # If debug-with-srcpkg mode is enabled then add the source package if it
1269 # doesn't exist and add the source file contents to the source package.
1270 if split_source_package:
1271 src_package_name = ('%s-src' % d.getVar('PN'))
1272 if not src_package_name in packages:
1273 packages.append(src_package_name)
1274 d.setVar('FILES:%s' % src_package_name, '/usr/src/debug')
1275
1276 # Sanity check PACKAGES for duplicates
1277 # Sanity should be moved to sanity.bbclass once we have the infrastructure
1278 package_dict = {}
1279
1280 for i, pkg in enumerate(packages):
1281 if pkg in package_dict:
1282 msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
1283 oe.qa.handle_error("packages-list", msg, d)
1284 # Ensure the source package gets the chance to pick up the source files
1285 # before the debug package by ordering it first in PACKAGES. Whether it
1286 # actually picks up any source files is controlled by
1287 # PACKAGE_DEBUG_SPLIT_STYLE.
1288 elif pkg.endswith("-src"):
1289 package_dict[pkg] = (10, i)
1290 elif autodebug and pkg.endswith("-dbg"):
1291 package_dict[pkg] = (30, i)
1292 else:
1293 package_dict[pkg] = (50, i)
1294 packages = sorted(package_dict.keys(), key=package_dict.get)
1295 d.setVar('PACKAGES', ' '.join(packages))
1296 pkgdest = d.getVar('PKGDEST')
1297
1298 seen = []
1299
1300 # os.mkdir masks the permissions with umask so we have to unset it first
1301 oldumask = os.umask(0)
1302
1303 debug = []
1304 for root, dirs, files in cpath.walk(dvar):
1305 dir = root[len(dvar):]
1306 if not dir:
1307 dir = os.sep
1308 for f in (files + dirs):
1309 path = "." + os.path.join(dir, f)
1310 if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"):
1311 debug.append(path)
1312
1313 for pkg in packages:
1314 root = os.path.join(pkgdest, pkg)
1315 bb.utils.mkdirhier(root)
1316
1317 filesvar = d.getVar('FILES:%s' % pkg) or ""
1318 if "//" in filesvar:
1319 msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
1320 oe.qa.handle_error("files-invalid", msg, d)
1321 filesvar.replace("//", "/")
1322
1323 origfiles = filesvar.split()
1324 files, symlink_paths = oe.package.files_from_filevars(origfiles)
1325
1326 if autodebug and pkg.endswith("-dbg"):
1327 files.extend(debug)
1328
1329 for file in files:
1330 if (not cpath.islink(file)) and (not cpath.exists(file)):
1331 continue
1332 if file in seen:
1333 continue
1334 seen.append(file)
1335
1336 def mkdir(src, dest, p):
1337 src = os.path.join(src, p)
1338 dest = os.path.join(dest, p)
1339 fstat = cpath.stat(src)
1340 os.mkdir(dest)
1341 os.chmod(dest, fstat.st_mode)
1342 os.chown(dest, fstat.st_uid, fstat.st_gid)
1343 if p not in seen:
1344 seen.append(p)
1345 cpath.updatecache(dest)
1346
1347 def mkdir_recurse(src, dest, paths):
1348 if cpath.exists(dest + '/' + paths):
1349 return
1350 while paths.startswith("./"):
1351 paths = paths[2:]
1352 p = "."
1353 for c in paths.split("/"):
1354 p = os.path.join(p, c)
1355 if not cpath.exists(os.path.join(dest, p)):
1356 mkdir(src, dest, p)
1357
1358 if cpath.isdir(file) and not cpath.islink(file):
1359 mkdir_recurse(dvar, root, file)
1360 continue
1361
1362 mkdir_recurse(dvar, root, os.path.dirname(file))
1363 fpath = os.path.join(root,file)
1364 if not cpath.islink(file):
1365 os.link(file, fpath)
1366 continue
1367 ret = bb.utils.copyfile(file, fpath)
1368 if ret is False or ret == 0:
1369 bb.fatal("File population failed")
1370
1371 # Check if symlink paths exist
1372 for file in symlink_paths:
1373 if not os.path.exists(os.path.join(root,file)):
1374 bb.fatal("File '%s' cannot be packaged into '%s' because its "
1375 "parent directory structure does not exist. One of "
1376 "its parent directories is a symlink whose target "
1377 "directory is not included in the package." %
1378 (file, pkg))
1379
1380 os.umask(oldumask)
1381 os.chdir(workdir)
1382
1383 # Handle excluding packages with incompatible licenses
1384 package_list = []
1385 for pkg in packages:
1386 licenses = d.getVar('_exclude_incompatible-' + pkg)
1387 if licenses:
1388 msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses)
1389 oe.qa.handle_error("incompatible-license", msg, d)
1390 else:
1391 package_list.append(pkg)
1392 d.setVar('PACKAGES', ' '.join(package_list))
1393
1394 unshipped = []
1395 for root, dirs, files in cpath.walk(dvar):
1396 dir = root[len(dvar):]
1397 if not dir:
1398 dir = os.sep
1399 for f in (files + dirs):
1400 path = os.path.join(dir, f)
1401 if ('.' + path) not in seen:
1402 unshipped.append(path)
1403
1404 if unshipped != []:
1405 msg = pn + ": Files/directories were installed but not shipped in any package:"
1406 if "installed-vs-shipped" in (d.getVar('INSANE_SKIP:' + pn) or "").split():
1407 bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
1408 else:
1409 for f in unshipped:
1410 msg = msg + "\n " + f
1411 msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n"
1412 msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped))
1413 oe.qa.handle_error("installed-vs-shipped", msg, d)
1414
1415def process_fixsymlinks(pkgfiles, d):
1416 cpath = oe.cachedpath.CachedPath()
1417 pkgdest = d.getVar('PKGDEST')
1418 packages = d.getVar("PACKAGES", False).split()
1419
1420 dangling_links = {}
1421 pkg_files = {}
1422 for pkg in packages:
1423 dangling_links[pkg] = []
1424 pkg_files[pkg] = []
1425 inst_root = os.path.join(pkgdest, pkg)
1426 for path in pkgfiles[pkg]:
1427 rpath = path[len(inst_root):]
1428 pkg_files[pkg].append(rpath)
1429 rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
1430 if not cpath.lexists(rtarget):
1431 dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
1432
1433 newrdepends = {}
1434 for pkg in dangling_links:
1435 for l in dangling_links[pkg]:
1436 found = False
1437 bb.debug(1, "%s contains dangling link %s" % (pkg, l))
1438 for p in packages:
1439 if l in pkg_files[p]:
1440 found = True
1441 bb.debug(1, "target found in %s" % p)
1442 if p == pkg:
1443 break
1444 if pkg not in newrdepends:
1445 newrdepends[pkg] = []
1446 newrdepends[pkg].append(p)
1447 break
1448 if found == False:
1449 bb.note("%s contains dangling symlink to %s" % (pkg, l))
1450
1451 for pkg in newrdepends:
1452 rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
1453 for p in newrdepends[pkg]:
1454 if p not in rdepends:
1455 rdepends[p] = []
1456 d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1457
1458def process_filedeps(pkgfiles, d):
1459 """
1460 Collect perfile run-time dependency metadata
1461 Output:
1462 FILERPROVIDESFLIST:pkg - list of all files w/ deps
1463 FILERPROVIDES:filepath:pkg - per file dep
1464
1465 FILERDEPENDSFLIST:pkg - list of all files w/ deps
1466 FILERDEPENDS:filepath:pkg - per file dep
1467 """
1468 if d.getVar('SKIP_FILEDEPS') == '1':
1469 return
1470
1471 pkgdest = d.getVar('PKGDEST')
1472 packages = d.getVar('PACKAGES')
1473 rpmdeps = d.getVar('RPMDEPS')
1474
1475 def chunks(files, n):
1476 return [files[i:i+n] for i in range(0, len(files), n)]
1477
1478 pkglist = []
1479 for pkg in packages.split():
1480 if d.getVar('SKIP_FILEDEPS:' + pkg) == '1':
1481 continue
1482 if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'):
1483 continue
1484 for files in chunks(pkgfiles[pkg], 100):
1485 pkglist.append((pkg, files, rpmdeps, pkgdest))
1486
1487 processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d)
1488
1489 provides_files = {}
1490 requires_files = {}
1491
1492 for result in processed:
1493 (pkg, provides, requires) = result
1494
1495 if pkg not in provides_files:
1496 provides_files[pkg] = []
1497 if pkg not in requires_files:
1498 requires_files[pkg] = []
1499
1500 for file in sorted(provides):
1501 provides_files[pkg].append(file)
1502 key = "FILERPROVIDES:" + file + ":" + pkg
1503 d.appendVar(key, " " + " ".join(provides[file]))
1504
1505 for file in sorted(requires):
1506 requires_files[pkg].append(file)
1507 key = "FILERDEPENDS:" + file + ":" + pkg
1508 d.appendVar(key, " " + " ".join(requires[file]))
1509
1510 for pkg in requires_files:
1511 d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg])))
1512 for pkg in provides_files:
1513 d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg])))
1514
1515def process_shlibs(pkgfiles, d):
1516 cpath = oe.cachedpath.CachedPath()
1517
1518 exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False)
1519 if exclude_shlibs:
1520 bb.note("not generating shlibs")
1521 return
1522
1523 lib_re = re.compile(r"^.*\.so")
1524 libdir_re = re.compile(r".*/%s$" % d.getVar('baselib'))
1525
1526 packages = d.getVar('PACKAGES')
1527
1528 shlib_pkgs = []
1529 exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS")
1530 if exclusion_list:
1531 for pkg in packages.split():
1532 if pkg not in exclusion_list.split():
1533 shlib_pkgs.append(pkg)
1534 else:
1535 bb.note("not generating shlibs for %s" % pkg)
1536 else:
1537 shlib_pkgs = packages.split()
1538
1539 hostos = d.getVar('HOST_OS')
1540
1541 workdir = d.getVar('WORKDIR')
1542
1543 ver = d.getVar('PKGV')
1544 if not ver:
1545 msg = "PKGV not defined"
1546 oe.qa.handle_error("pkgv-undefined", msg, d)
1547 return
1548
1549 pkgdest = d.getVar('PKGDEST')
1550
1551 shlibswork_dir = d.getVar('SHLIBSWORKDIR')
1552
1553 def linux_so(file, pkg, pkgver, d):
1554 needs_ldconfig = False
1555 needed = set()
1556 sonames = set()
1557 renames = []
1558 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1559 cmd = d.getVar('OBJDUMP') + " -p " + pipes.quote(file) + " 2>/dev/null"
1560 fd = os.popen(cmd)
1561 lines = fd.readlines()
1562 fd.close()
1563 rpath = tuple()
1564 for l in lines:
1565 m = re.match(r"\s+RPATH\s+([^\s]*)", l)
1566 if m:
1567 rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
1568 rpath = tuple(map(os.path.normpath, rpaths))
1569 for l in lines:
1570 m = re.match(r"\s+NEEDED\s+([^\s]*)", l)
1571 if m:
1572 dep = m.group(1)
1573 if dep not in needed:
1574 needed.add((dep, file, rpath))
1575 m = re.match(r"\s+SONAME\s+([^\s]*)", l)
1576 if m:
1577 this_soname = m.group(1)
1578 prov = (this_soname, ldir, pkgver)
1579 if not prov in sonames:
1580 # if library is private (only used by package) then do not build shlib for it
1581 if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0:
1582 sonames.add(prov)
1583 if libdir_re.match(os.path.dirname(file)):
1584 needs_ldconfig = True
1585 if needs_ldconfig and snap_symlinks and (os.path.basename(file) != this_soname):
1586 renames.append((file, os.path.join(os.path.dirname(file), this_soname)))
1587 return (needs_ldconfig, needed, sonames, renames)
1588
1589 def darwin_so(file, needed, sonames, renames, pkgver):
1590 if not os.path.exists(file):
1591 return
1592 ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1593
1594 def get_combinations(base):
1595 #
1596 # Given a base library name, find all combinations of this split by "." and "-"
1597 #
1598 combos = []
1599 options = base.split(".")
1600 for i in range(1, len(options) + 1):
1601 combos.append(".".join(options[0:i]))
1602 options = base.split("-")
1603 for i in range(1, len(options) + 1):
1604 combos.append("-".join(options[0:i]))
1605 return combos
1606
1607 if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'):
1608 # Drop suffix
1609 name = os.path.basename(file).rsplit(".",1)[0]
1610 # Find all combinations
1611 combos = get_combinations(name)
1612 for combo in combos:
1613 if not combo in sonames:
1614 prov = (combo, ldir, pkgver)
1615 sonames.add(prov)
1616 if file.endswith('.dylib') or file.endswith('.so'):
1617 rpath = []
1618 p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1619 out, err = p.communicate()
1620 # If returned successfully, process stdout for results
1621 if p.returncode == 0:
1622 for l in out.split("\n"):
1623 l = l.strip()
1624 if l.startswith('path '):
1625 rpath.append(l.split()[1])
1626
1627 p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1628 out, err = p.communicate()
1629 # If returned successfully, process stdout for results
1630 if p.returncode == 0:
1631 for l in out.split("\n"):
1632 l = l.strip()
1633 if not l or l.endswith(":"):
1634 continue
1635 if "is not an object file" in l:
1636 continue
1637 name = os.path.basename(l.split()[0]).rsplit(".", 1)[0]
1638 if name and name not in needed[pkg]:
1639 needed[pkg].add((name, file, tuple()))
1640
1641 def mingw_dll(file, needed, sonames, renames, pkgver):
1642 if not os.path.exists(file):
1643 return
1644
1645 if file.endswith(".dll"):
1646 # assume all dlls are shared objects provided by the package
1647 sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver))
1648
1649 if (file.endswith(".dll") or file.endswith(".exe")):
1650 # use objdump to search for "DLL Name: .*\.dll"
1651 p = subprocess.Popen([d.expand("${HOST_PREFIX}objdump"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1652 out, err = p.communicate()
1653 # process the output, grabbing all .dll names
1654 if p.returncode == 0:
1655 for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE):
1656 dllname = m.group(1)
1657 if dllname:
1658 needed[pkg].add((dllname, file, tuple()))
1659
1660 if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1":
1661 snap_symlinks = True
1662 else:
1663 snap_symlinks = False
1664
1665 needed = {}
1666
1667 shlib_provider = oe.package.read_shlib_providers(d)
1668
1669 for pkg in shlib_pkgs:
1670 private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
1671 private_libs = private_libs.split()
1672 needs_ldconfig = False
1673 bb.debug(2, "calculating shlib provides for %s" % pkg)
1674
1675 pkgver = d.getVar('PKGV:' + pkg)
1676 if not pkgver:
1677 pkgver = d.getVar('PV_' + pkg)
1678 if not pkgver:
1679 pkgver = ver
1680
1681 needed[pkg] = set()
1682 sonames = set()
1683 renames = []
1684 linuxlist = []
1685 for file in pkgfiles[pkg]:
1686 soname = None
1687 if cpath.islink(file):
1688 continue
1689 if hostos == "darwin" or hostos == "darwin8":
1690 darwin_so(file, needed, sonames, renames, pkgver)
1691 elif hostos.startswith("mingw"):
1692 mingw_dll(file, needed, sonames, renames, pkgver)
1693 elif os.access(file, os.X_OK) or lib_re.match(file):
1694 linuxlist.append(file)
1695
1696 if linuxlist:
1697 results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d))
1698 for r in results:
1699 ldconfig = r[0]
1700 needed[pkg] |= r[1]
1701 sonames |= r[2]
1702 renames.extend(r[3])
1703 needs_ldconfig = needs_ldconfig or ldconfig
1704
1705 for (old, new) in renames:
1706 bb.note("Renaming %s to %s" % (old, new))
1707 bb.utils.rename(old, new)
1708 pkgfiles[pkg].remove(old)
1709
1710 shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
1711 if len(sonames):
1712 with open(shlibs_file, 'w') as fd:
1713 for s in sorted(sonames):
1714 if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]:
1715 (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]]
1716 if old_pkg != pkg:
1717 bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver))
1718 bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0]))
1719 fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n')
1720 if s[0] not in shlib_provider:
1721 shlib_provider[s[0]] = {}
1722 shlib_provider[s[0]][s[1]] = (pkg, pkgver)
1723 if needs_ldconfig:
1724 bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
1725 postinst = d.getVar('pkg_postinst:%s' % pkg)
1726 if not postinst:
1727 postinst = '#!/bin/sh\n'
1728 postinst += d.getVar('ldconfig_postinst_fragment')
1729 d.setVar('pkg_postinst:%s' % pkg, postinst)
1730 bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
1731
1732 assumed_libs = d.getVar('ASSUME_SHLIBS')
1733 if assumed_libs:
1734 libdir = d.getVar("libdir")
1735 for e in assumed_libs.split():
1736 l, dep_pkg = e.split(":")
1737 lib_ver = None
1738 dep_pkg = dep_pkg.rsplit("_", 1)
1739 if len(dep_pkg) == 2:
1740 lib_ver = dep_pkg[1]
1741 dep_pkg = dep_pkg[0]
1742 if l not in shlib_provider:
1743 shlib_provider[l] = {}
1744 shlib_provider[l][libdir] = (dep_pkg, lib_ver)
1745
1746 libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')]
1747
1748 for pkg in shlib_pkgs:
1749 bb.debug(2, "calculating shlib requirements for %s" % pkg)
1750
1751 private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
1752 private_libs = private_libs.split()
1753
1754 deps = list()
1755 for n in needed[pkg]:
1756 # if n is in private libraries, don't try to search provider for it
1757 # this could cause problem in case some abc.bb provides private
1758 # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
1759 # but skipping it is still better alternative than providing own
1760 # version and then adding runtime dependency for the same system library
1761 if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0:
1762 bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
1763 continue
1764 if n[0] in shlib_provider.keys():
1765 shlib_provider_map = shlib_provider[n[0]]
1766 matches = set()
1767 for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath):
1768 if p in shlib_provider_map:
1769 matches.add(p)
1770 if len(matches) > 1:
1771 matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches])
1772 bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1]))
1773 elif len(matches) == 1:
1774 (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()]
1775
1776 bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1]))
1777
1778 if dep_pkg == pkg:
1779 continue
1780
1781 if ver_needed:
1782 dep = "%s (>= %s)" % (dep_pkg, ver_needed)
1783 else:
1784 dep = dep_pkg
1785 if not dep in deps:
1786 deps.append(dep)
1787 continue
1788 bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1]))
1789
1790 deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
1791 if os.path.exists(deps_file):
1792 os.remove(deps_file)
1793 if deps:
1794 with open(deps_file, 'w') as fd:
1795 for dep in sorted(deps):
1796 fd.write(dep + '\n')
1797
1798def process_pkgconfig(pkgfiles, d):
1799 packages = d.getVar('PACKAGES')
1800 workdir = d.getVar('WORKDIR')
1801 pkgdest = d.getVar('PKGDEST')
1802
1803 shlibs_dirs = d.getVar('SHLIBSDIRS').split()
1804 shlibswork_dir = d.getVar('SHLIBSWORKDIR')
1805
1806 pc_re = re.compile(r'(.*)\.pc$')
1807 var_re = re.compile(r'(.*)=(.*)')
1808 field_re = re.compile(r'(.*): (.*)')
1809
1810 pkgconfig_provided = {}
1811 pkgconfig_needed = {}
1812 for pkg in packages.split():
1813 pkgconfig_provided[pkg] = []
1814 pkgconfig_needed[pkg] = []
1815 for file in sorted(pkgfiles[pkg]):
1816 m = pc_re.match(file)
1817 if m:
1818 pd = bb.data.init()
1819 name = m.group(1)
1820 pkgconfig_provided[pkg].append(os.path.basename(name))
1821 if not os.access(file, os.R_OK):
1822 continue
1823 with open(file, 'r') as f:
1824 lines = f.readlines()
1825 for l in lines:
1826 m = var_re.match(l)
1827 if m:
1828 name = m.group(1)
1829 val = m.group(2)
1830 pd.setVar(name, pd.expand(val))
1831 continue
1832 m = field_re.match(l)
1833 if m:
1834 hdr = m.group(1)
1835 exp = pd.expand(m.group(2))
1836 if hdr == 'Requires':
1837 pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
1838
1839 for pkg in packages.split():
1840 pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
1841 if pkgconfig_provided[pkg] != []:
1842 with open(pkgs_file, 'w') as f:
1843 for p in sorted(pkgconfig_provided[pkg]):
1844 f.write('%s\n' % p)
1845
1846 # Go from least to most specific since the last one found wins
1847 for dir in reversed(shlibs_dirs):
1848 if not os.path.exists(dir):
1849 continue
1850 for file in sorted(os.listdir(dir)):
1851 m = re.match(r'^(.*)\.pclist$', file)
1852 if m:
1853 pkg = m.group(1)
1854 with open(os.path.join(dir, file)) as fd:
1855 lines = fd.readlines()
1856 pkgconfig_provided[pkg] = []
1857 for l in lines:
1858 pkgconfig_provided[pkg].append(l.rstrip())
1859
1860 for pkg in packages.split():
1861 deps = []
1862 for n in pkgconfig_needed[pkg]:
1863 found = False
1864 for k in pkgconfig_provided.keys():
1865 if n in pkgconfig_provided[k]:
1866 if k != pkg and not (k in deps):
1867 deps.append(k)
1868 found = True
1869 if found == False:
1870 bb.note("couldn't find pkgconfig module '%s' in any package" % n)
1871 deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
1872 if len(deps):
1873 with open(deps_file, 'w') as fd:
1874 for dep in deps:
1875 fd.write(dep + '\n')
1876
1877def read_libdep_files(d):
1878 pkglibdeps = {}
1879 packages = d.getVar('PACKAGES').split()
1880 for pkg in packages:
1881 pkglibdeps[pkg] = {}
1882 for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
1883 depsfile = d.expand("${PKGDEST}/" + pkg + extension)
1884 if os.access(depsfile, os.R_OK):
1885 with open(depsfile) as fd:
1886 lines = fd.readlines()
1887 for l in lines:
1888 l.rstrip()
1889 deps = bb.utils.explode_dep_versions2(l)
1890 for dep in deps:
1891 if not dep in pkglibdeps[pkg]:
1892 pkglibdeps[pkg][dep] = deps[dep]
1893 return pkglibdeps
1894
1895def process_depchains(pkgfiles, d):
1896 """
1897 For a given set of prefix and postfix modifiers, make those packages
1898 RRECOMMENDS on the corresponding packages for its RDEPENDS.
1899
1900 Example: If package A depends upon package B, and A's .bb emits an
1901 A-dev package, this would make A-dev Recommends: B-dev.
1902
1903 If only one of a given suffix is specified, it will take the RRECOMMENDS
1904 based on the RDEPENDS of *all* other packages. If more than one of a given
1905 suffix is specified, its will only use the RDEPENDS of the single parent
1906 package.
1907 """
1908
1909 packages = d.getVar('PACKAGES')
1910 postfixes = (d.getVar('DEPCHAIN_POST') or '').split()
1911 prefixes = (d.getVar('DEPCHAIN_PRE') or '').split()
1912
1913 def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
1914
1915 #bb.note('depends for %s is %s' % (base, depends))
1916 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
1917
1918 for depend in sorted(depends):
1919 if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
1920 #bb.note("Skipping %s" % depend)
1921 continue
1922 if depend.endswith('-dev'):
1923 depend = depend[:-4]
1924 if depend.endswith('-dbg'):
1925 depend = depend[:-4]
1926 pkgname = getname(depend, suffix)
1927 #bb.note("Adding %s for %s" % (pkgname, depend))
1928 if pkgname not in rreclist and pkgname != pkg:
1929 rreclist[pkgname] = []
1930
1931 #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
1932 d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
1933
1934 def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
1935
1936 #bb.note('rdepends for %s is %s' % (base, rdepends))
1937 rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
1938
1939 for depend in sorted(rdepends):
1940 if depend.find('virtual-locale-') != -1:
1941 #bb.note("Skipping %s" % depend)
1942 continue
1943 if depend.endswith('-dev'):
1944 depend = depend[:-4]
1945 if depend.endswith('-dbg'):
1946 depend = depend[:-4]
1947 pkgname = getname(depend, suffix)
1948 #bb.note("Adding %s for %s" % (pkgname, depend))
1949 if pkgname not in rreclist and pkgname != pkg:
1950 rreclist[pkgname] = []
1951
1952 #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
1953 d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
1954
1955 def add_dep(list, dep):
1956 if dep not in list:
1957 list.append(dep)
1958
1959 depends = []
1960 for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""):
1961 add_dep(depends, dep)
1962
1963 rdepends = []
1964 for pkg in packages.split():
1965 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + pkg) or ""):
1966 add_dep(rdepends, dep)
1967
1968 #bb.note('rdepends is %s' % rdepends)
1969
1970 def post_getname(name, suffix):
1971 return '%s%s' % (name, suffix)
1972 def pre_getname(name, suffix):
1973 return '%s%s' % (suffix, name)
1974
1975 pkgs = {}
1976 for pkg in packages.split():
1977 for postfix in postfixes:
1978 if pkg.endswith(postfix):
1979 if not postfix in pkgs:
1980 pkgs[postfix] = {}
1981 pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
1982
1983 for prefix in prefixes:
1984 if pkg.startswith(prefix):
1985 if not prefix in pkgs:
1986 pkgs[prefix] = {}
1987 pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
1988
1989 if "-dbg" in pkgs:
1990 pkglibdeps = read_libdep_files(d)
1991 pkglibdeplist = []
1992 for pkg in pkglibdeps:
1993 for k in pkglibdeps[pkg]:
1994 add_dep(pkglibdeplist, k)
1995 dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d)))
1996
1997 for suffix in pkgs:
1998 for pkg in pkgs[suffix]:
1999 if d.getVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs'):
2000 continue
2001 (base, func) = pkgs[suffix][pkg]
2002 if suffix == "-dev":
2003 pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
2004 elif suffix == "-dbg":
2005 if not dbgdefaultdeps:
2006 pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
2007 continue
2008 if len(pkgs[suffix]) == 1:
2009 pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
2010 else:
2011 rdeps = []
2012 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + base) or ""):
2013 add_dep(rdeps, dep)
2014 pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
2015