diff options
Diffstat (limited to 'meta/classes/package.bbclass')
| -rw-r--r-- | meta/classes/package.bbclass | 2558 |
1 files changed, 0 insertions, 2558 deletions
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass deleted file mode 100644 index 418400da8c..0000000000 --- a/meta/classes/package.bbclass +++ /dev/null | |||
| @@ -1,2558 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: MIT | ||
| 5 | # | ||
| 6 | |||
| 7 | # | ||
| 8 | # Packaging process | ||
| 9 | # | ||
| 10 | # Executive summary: This class iterates over the functions listed in PACKAGEFUNCS | ||
| 11 | # Taking D and splitting it up into the packages listed in PACKAGES, placing the | ||
| 12 | # resulting output in PKGDEST. | ||
| 13 | # | ||
| 14 | # There are the following default steps but PACKAGEFUNCS can be extended: | ||
| 15 | # | ||
| 16 | # a) package_convert_pr_autoinc - convert AUTOINC in PKGV to ${PRSERV_PV_AUTOINC} | ||
| 17 | # | ||
| 18 | # b) perform_packagecopy - Copy D into PKGD | ||
| 19 | # | ||
| 20 | # c) package_do_split_locales - Split out the locale files, updates FILES and PACKAGES | ||
| 21 | # | ||
| 22 | # d) split_and_strip_files - split the files into runtime and debug and strip them. | ||
| 23 | # Debug files include debug info split, and associated sources that end up in -dbg packages | ||
| 24 | # | ||
| 25 | # e) fixup_perms - Fix up permissions in the package before we split it. | ||
| 26 | # | ||
| 27 | # f) populate_packages - Split the files in PKGD into separate packages in PKGDEST/<pkgname> | ||
| 28 | # Also triggers the binary stripping code to put files in -dbg packages. | ||
| 29 | # | ||
| 30 | # g) package_do_filedeps - Collect perfile run-time dependency metadata | ||
| 31 | # The data is stores in FILER{PROVIDES,DEPENDS}_file_pkg variables with | ||
| 32 | # a list of affected files in FILER{PROVIDES,DEPENDS}FLIST_pkg | ||
| 33 | # | ||
| 34 | # h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any | ||
| 35 | # dependencies found. Also stores the package name so anyone else using this library | ||
| 36 | # knows which package to depend on. | ||
| 37 | # | ||
| 38 | # i) package_do_pkgconfig - Keep track of which packages need and provide which .pc files | ||
| 39 | # | ||
| 40 | # j) read_shlibdeps - Reads the stored shlibs information into the metadata | ||
| 41 | # | ||
| 42 | # k) package_depchains - Adds automatic dependencies to -dbg and -dev packages | ||
| 43 | # | ||
| 44 | # l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later | ||
| 45 | # packaging steps | ||
| 46 | |||
| 47 | inherit packagedata | ||
| 48 | inherit chrpath | ||
| 49 | inherit package_pkgdata | ||
| 50 | inherit insane | ||
| 51 | |||
| 52 | PKGD = "${WORKDIR}/package" | ||
| 53 | PKGDEST = "${WORKDIR}/packages-split" | ||
| 54 | |||
| 55 | LOCALE_SECTION ?= '' | ||
| 56 | |||
| 57 | ALL_MULTILIB_PACKAGE_ARCHS = "${@all_multilib_tune_values(d, 'PACKAGE_ARCHS')}" | ||
| 58 | |||
| 59 | # rpm is used for the per-file dependency identification | ||
| 60 | # dwarfsrcfiles is used to determine the list of debug source files | ||
| 61 | PACKAGE_DEPENDS += "rpm-native dwarfsrcfiles-native" | ||
| 62 | |||
| 63 | |||
| 64 | # If your postinstall can execute at rootfs creation time rather than on | ||
| 65 | # target but depends on a native/cross tool in order to execute, you need to | ||
| 66 | # list that tool in PACKAGE_WRITE_DEPS. Target package dependencies belong | ||
| 67 | # in the package dependencies as normal, this is just for native/cross support | ||
| 68 | # tools at rootfs build time. | ||
| 69 | PACKAGE_WRITE_DEPS ??= "" | ||
| 70 | |||
| 71 | def legitimize_package_name(s): | ||
| 72 | """ | ||
| 73 | Make sure package names are legitimate strings | ||
| 74 | """ | ||
| 75 | import re | ||
| 76 | |||
| 77 | def fixutf(m): | ||
| 78 | cp = m.group(1) | ||
| 79 | if cp: | ||
| 80 | return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape') | ||
| 81 | |||
| 82 | # Handle unicode codepoints encoded as <U0123>, as in glibc locale files. | ||
| 83 | s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s) | ||
| 84 | |||
| 85 | # Remaining package name validity fixes | ||
| 86 | return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-') | ||
| 87 | |||
| 88 | def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False, summary=None): | ||
| 89 | """ | ||
| 90 | Used in .bb files to split up dynamically generated subpackages of a | ||
| 91 | given package, usually plugins or modules. | ||
| 92 | |||
| 93 | Arguments: | ||
| 94 | root -- the path in which to search | ||
| 95 | file_regex -- regular expression to match searched files. Use | ||
| 96 | parentheses () to mark the part of this expression | ||
| 97 | that should be used to derive the module name (to be | ||
| 98 | substituted where %s is used in other function | ||
| 99 | arguments as noted below) | ||
| 100 | output_pattern -- pattern to use for the package names. Must include %s. | ||
| 101 | description -- description to set for each package. Must include %s. | ||
| 102 | postinst -- postinstall script to use for all packages (as a | ||
| 103 | string) | ||
| 104 | recursive -- True to perform a recursive search - default False | ||
| 105 | hook -- a hook function to be called for every match. The | ||
| 106 | function will be called with the following arguments | ||
| 107 | (in the order listed): | ||
| 108 | f: full path to the file/directory match | ||
| 109 | pkg: the package name | ||
| 110 | file_regex: as above | ||
| 111 | output_pattern: as above | ||
| 112 | modulename: the module name derived using file_regex | ||
| 113 | extra_depends -- extra runtime dependencies (RDEPENDS) to be set for | ||
| 114 | all packages. The default value of None causes a | ||
| 115 | dependency on the main package (${PN}) - if you do | ||
| 116 | not want this, pass '' for this parameter. | ||
| 117 | aux_files_pattern -- extra item(s) to be added to FILES for each | ||
| 118 | package. Can be a single string item or a list of | ||
| 119 | strings for multiple items. Must include %s. | ||
| 120 | postrm -- postrm script to use for all packages (as a string) | ||
| 121 | allow_dirs -- True allow directories to be matched - default False | ||
| 122 | prepend -- if True, prepend created packages to PACKAGES instead | ||
| 123 | of the default False which appends them | ||
| 124 | match_path -- match file_regex on the whole relative path to the | ||
| 125 | root rather than just the file name | ||
| 126 | aux_files_pattern_verbatim -- extra item(s) to be added to FILES for | ||
| 127 | each package, using the actual derived module name | ||
| 128 | rather than converting it to something legal for a | ||
| 129 | package name. Can be a single string item or a list | ||
| 130 | of strings for multiple items. Must include %s. | ||
| 131 | allow_links -- True to allow symlinks to be matched - default False | ||
| 132 | summary -- Summary to set for each package. Must include %s; | ||
| 133 | defaults to description if not set. | ||
| 134 | |||
| 135 | """ | ||
| 136 | |||
| 137 | dvar = d.getVar('PKGD') | ||
| 138 | root = d.expand(root) | ||
| 139 | output_pattern = d.expand(output_pattern) | ||
| 140 | extra_depends = d.expand(extra_depends) | ||
| 141 | |||
| 142 | # If the root directory doesn't exist, don't error out later but silently do | ||
| 143 | # no splitting. | ||
| 144 | if not os.path.exists(dvar + root): | ||
| 145 | return [] | ||
| 146 | |||
| 147 | ml = d.getVar("MLPREFIX") | ||
| 148 | if ml: | ||
| 149 | if not output_pattern.startswith(ml): | ||
| 150 | output_pattern = ml + output_pattern | ||
| 151 | |||
| 152 | newdeps = [] | ||
| 153 | for dep in (extra_depends or "").split(): | ||
| 154 | if dep.startswith(ml): | ||
| 155 | newdeps.append(dep) | ||
| 156 | else: | ||
| 157 | newdeps.append(ml + dep) | ||
| 158 | if newdeps: | ||
| 159 | extra_depends = " ".join(newdeps) | ||
| 160 | |||
| 161 | |||
| 162 | packages = d.getVar('PACKAGES').split() | ||
| 163 | split_packages = set() | ||
| 164 | |||
| 165 | if postinst: | ||
| 166 | postinst = '#!/bin/sh\n' + postinst + '\n' | ||
| 167 | if postrm: | ||
| 168 | postrm = '#!/bin/sh\n' + postrm + '\n' | ||
| 169 | if not recursive: | ||
| 170 | objs = os.listdir(dvar + root) | ||
| 171 | else: | ||
| 172 | objs = [] | ||
| 173 | for walkroot, dirs, files in os.walk(dvar + root): | ||
| 174 | for file in files: | ||
| 175 | relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1) | ||
| 176 | if relpath: | ||
| 177 | objs.append(relpath) | ||
| 178 | |||
| 179 | if extra_depends == None: | ||
| 180 | extra_depends = d.getVar("PN") | ||
| 181 | |||
| 182 | if not summary: | ||
| 183 | summary = description | ||
| 184 | |||
| 185 | for o in sorted(objs): | ||
| 186 | import re, stat | ||
| 187 | if match_path: | ||
| 188 | m = re.match(file_regex, o) | ||
| 189 | else: | ||
| 190 | m = re.match(file_regex, os.path.basename(o)) | ||
| 191 | |||
| 192 | if not m: | ||
| 193 | continue | ||
| 194 | f = os.path.join(dvar + root, o) | ||
| 195 | mode = os.lstat(f).st_mode | ||
| 196 | if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))): | ||
| 197 | continue | ||
| 198 | on = legitimize_package_name(m.group(1)) | ||
| 199 | pkg = output_pattern % on | ||
| 200 | split_packages.add(pkg) | ||
| 201 | if not pkg in packages: | ||
| 202 | if prepend: | ||
| 203 | packages = [pkg] + packages | ||
| 204 | else: | ||
| 205 | packages.append(pkg) | ||
| 206 | oldfiles = d.getVar('FILES:' + pkg) | ||
| 207 | newfile = os.path.join(root, o) | ||
| 208 | # These names will be passed through glob() so if the filename actually | ||
| 209 | # contains * or ? (rare, but possible) we need to handle that specially | ||
| 210 | newfile = newfile.replace('*', '[*]') | ||
| 211 | newfile = newfile.replace('?', '[?]') | ||
| 212 | if not oldfiles: | ||
| 213 | the_files = [newfile] | ||
| 214 | if aux_files_pattern: | ||
| 215 | if type(aux_files_pattern) is list: | ||
| 216 | for fp in aux_files_pattern: | ||
| 217 | the_files.append(fp % on) | ||
| 218 | else: | ||
| 219 | the_files.append(aux_files_pattern % on) | ||
| 220 | if aux_files_pattern_verbatim: | ||
| 221 | if type(aux_files_pattern_verbatim) is list: | ||
| 222 | for fp in aux_files_pattern_verbatim: | ||
| 223 | the_files.append(fp % m.group(1)) | ||
| 224 | else: | ||
| 225 | the_files.append(aux_files_pattern_verbatim % m.group(1)) | ||
| 226 | d.setVar('FILES:' + pkg, " ".join(the_files)) | ||
| 227 | else: | ||
| 228 | d.setVar('FILES:' + pkg, oldfiles + " " + newfile) | ||
| 229 | if extra_depends != '': | ||
| 230 | d.appendVar('RDEPENDS:' + pkg, ' ' + extra_depends) | ||
| 231 | if not d.getVar('DESCRIPTION:' + pkg): | ||
| 232 | d.setVar('DESCRIPTION:' + pkg, description % on) | ||
| 233 | if not d.getVar('SUMMARY:' + pkg): | ||
| 234 | d.setVar('SUMMARY:' + pkg, summary % on) | ||
| 235 | if postinst: | ||
| 236 | d.setVar('pkg_postinst:' + pkg, postinst) | ||
| 237 | if postrm: | ||
| 238 | d.setVar('pkg_postrm:' + pkg, postrm) | ||
| 239 | if callable(hook): | ||
| 240 | hook(f, pkg, file_regex, output_pattern, m.group(1)) | ||
| 241 | |||
| 242 | d.setVar('PACKAGES', ' '.join(packages)) | ||
| 243 | return list(split_packages) | ||
| 244 | |||
| 245 | PACKAGE_DEPENDS += "file-native" | ||
| 246 | |||
| 247 | python () { | ||
| 248 | if d.getVar('PACKAGES') != '': | ||
| 249 | deps = "" | ||
| 250 | for dep in (d.getVar('PACKAGE_DEPENDS') or "").split(): | ||
| 251 | deps += " %s:do_populate_sysroot" % dep | ||
| 252 | if d.getVar('PACKAGE_MINIDEBUGINFO') == '1': | ||
| 253 | deps += ' xz-native:do_populate_sysroot' | ||
| 254 | d.appendVarFlag('do_package', 'depends', deps) | ||
| 255 | |||
| 256 | # shlibs requires any DEPENDS to have already packaged for the *.list files | ||
| 257 | d.appendVarFlag('do_package', 'deptask', " do_packagedata") | ||
| 258 | } | ||
| 259 | |||
| 260 | # Get a list of files from file vars by searching files under current working directory | ||
| 261 | # The list contains symlinks, directories and normal files. | ||
| 262 | def files_from_filevars(filevars): | ||
| 263 | import os,glob | ||
| 264 | cpath = oe.cachedpath.CachedPath() | ||
| 265 | files = [] | ||
| 266 | for f in filevars: | ||
| 267 | if os.path.isabs(f): | ||
| 268 | f = '.' + f | ||
| 269 | if not f.startswith("./"): | ||
| 270 | f = './' + f | ||
| 271 | globbed = glob.glob(f) | ||
| 272 | if globbed: | ||
| 273 | if [ f ] != globbed: | ||
| 274 | files += globbed | ||
| 275 | continue | ||
| 276 | files.append(f) | ||
| 277 | |||
| 278 | symlink_paths = [] | ||
| 279 | for ind, f in enumerate(files): | ||
| 280 | # Handle directory symlinks. Truncate path to the lowest level symlink | ||
| 281 | parent = '' | ||
| 282 | for dirname in f.split('/')[:-1]: | ||
| 283 | parent = os.path.join(parent, dirname) | ||
| 284 | if dirname == '.': | ||
| 285 | continue | ||
| 286 | if cpath.islink(parent): | ||
| 287 | bb.warn("FILES contains file '%s' which resides under a " | ||
| 288 | "directory symlink. Please fix the recipe and use the " | ||
| 289 | "real path for the file." % f[1:]) | ||
| 290 | symlink_paths.append(f) | ||
| 291 | files[ind] = parent | ||
| 292 | f = parent | ||
| 293 | break | ||
| 294 | |||
| 295 | if not cpath.islink(f): | ||
| 296 | if cpath.isdir(f): | ||
| 297 | newfiles = [ os.path.join(f,x) for x in os.listdir(f) ] | ||
| 298 | if newfiles: | ||
| 299 | files += newfiles | ||
| 300 | |||
| 301 | return files, symlink_paths | ||
| 302 | |||
| 303 | # Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files | ||
| 304 | def get_conffiles(pkg, d): | ||
| 305 | pkgdest = d.getVar('PKGDEST') | ||
| 306 | root = os.path.join(pkgdest, pkg) | ||
| 307 | cwd = os.getcwd() | ||
| 308 | os.chdir(root) | ||
| 309 | |||
| 310 | conffiles = d.getVar('CONFFILES:%s' % pkg); | ||
| 311 | if conffiles == None: | ||
| 312 | conffiles = d.getVar('CONFFILES') | ||
| 313 | if conffiles == None: | ||
| 314 | conffiles = "" | ||
| 315 | conffiles = conffiles.split() | ||
| 316 | conf_orig_list = files_from_filevars(conffiles)[0] | ||
| 317 | |||
| 318 | # Remove links and directories from conf_orig_list to get conf_list which only contains normal files | ||
| 319 | conf_list = [] | ||
| 320 | for f in conf_orig_list: | ||
| 321 | if os.path.isdir(f): | ||
| 322 | continue | ||
| 323 | if os.path.islink(f): | ||
| 324 | continue | ||
| 325 | if not os.path.exists(f): | ||
| 326 | continue | ||
| 327 | conf_list.append(f) | ||
| 328 | |||
| 329 | # Remove the leading './' | ||
| 330 | for i in range(0, len(conf_list)): | ||
| 331 | conf_list[i] = conf_list[i][1:] | ||
| 332 | |||
| 333 | os.chdir(cwd) | ||
| 334 | return conf_list | ||
| 335 | |||
| 336 | def checkbuildpath(file, d): | ||
| 337 | tmpdir = d.getVar('TMPDIR') | ||
| 338 | with open(file) as f: | ||
| 339 | file_content = f.read() | ||
| 340 | if tmpdir in file_content: | ||
| 341 | return True | ||
| 342 | |||
| 343 | return False | ||
| 344 | |||
| 345 | def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output): | ||
| 346 | debugfiles = {} | ||
| 347 | |||
| 348 | for line in dwarfsrcfiles_output.splitlines(): | ||
| 349 | if line.startswith("\t"): | ||
| 350 | debugfiles[os.path.normpath(line.split()[0])] = "" | ||
| 351 | |||
| 352 | return debugfiles.keys() | ||
| 353 | |||
| 354 | def source_info(file, d, fatal=True): | ||
| 355 | import subprocess | ||
| 356 | |||
| 357 | cmd = ["dwarfsrcfiles", file] | ||
| 358 | try: | ||
| 359 | output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT) | ||
| 360 | retval = 0 | ||
| 361 | except subprocess.CalledProcessError as exc: | ||
| 362 | output = exc.output | ||
| 363 | retval = exc.returncode | ||
| 364 | |||
| 365 | # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure | ||
| 366 | if retval != 0 and retval != 255: | ||
| 367 | msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "") | ||
| 368 | if fatal: | ||
| 369 | bb.fatal(msg) | ||
| 370 | bb.note(msg) | ||
| 371 | |||
| 372 | debugsources = parse_debugsources_from_dwarfsrcfiles_output(output) | ||
| 373 | |||
| 374 | return list(debugsources) | ||
| 375 | |||
| 376 | def splitdebuginfo(file, dvar, dv, d): | ||
| 377 | # Function to split a single file into two components, one is the stripped | ||
| 378 | # target system binary, the other contains any debugging information. The | ||
| 379 | # two files are linked to reference each other. | ||
| 380 | # | ||
| 381 | # return a mapping of files:debugsources | ||
| 382 | |||
| 383 | import stat | ||
| 384 | import subprocess | ||
| 385 | |||
| 386 | src = file[len(dvar):] | ||
| 387 | dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"] | ||
| 388 | debugfile = dvar + dest | ||
| 389 | sources = [] | ||
| 390 | |||
| 391 | if file.endswith(".ko") and file.find("/lib/modules/") != -1: | ||
| 392 | if oe.package.is_kernel_module_signed(file): | ||
| 393 | bb.debug(1, "Skip strip on signed module %s" % file) | ||
| 394 | return (file, sources) | ||
| 395 | |||
| 396 | # Split the file... | ||
| 397 | bb.utils.mkdirhier(os.path.dirname(debugfile)) | ||
| 398 | #bb.note("Split %s -> %s" % (file, debugfile)) | ||
| 399 | # Only store off the hard link reference if we successfully split! | ||
| 400 | |||
| 401 | dvar = d.getVar('PKGD') | ||
| 402 | objcopy = d.getVar("OBJCOPY") | ||
| 403 | |||
| 404 | newmode = None | ||
| 405 | if not os.access(file, os.W_OK) or os.access(file, os.R_OK): | ||
| 406 | origmode = os.stat(file)[stat.ST_MODE] | ||
| 407 | newmode = origmode | stat.S_IWRITE | stat.S_IREAD | ||
| 408 | os.chmod(file, newmode) | ||
| 409 | |||
| 410 | # We need to extract the debug src information here... | ||
| 411 | if dv["srcdir"]: | ||
| 412 | sources = source_info(file, d) | ||
| 413 | |||
| 414 | bb.utils.mkdirhier(os.path.dirname(debugfile)) | ||
| 415 | |||
| 416 | subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT) | ||
| 417 | |||
| 418 | # Set the debuglink to have the view of the file path on the target | ||
| 419 | subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT) | ||
| 420 | |||
| 421 | if newmode: | ||
| 422 | os.chmod(file, origmode) | ||
| 423 | |||
| 424 | return (file, sources) | ||
| 425 | |||
| 426 | def splitstaticdebuginfo(file, dvar, dv, d): | ||
| 427 | # Unlike the function above, there is no way to split a static library | ||
| 428 | # two components. So to get similar results we will copy the unmodified | ||
| 429 | # static library (containing the debug symbols) into a new directory. | ||
| 430 | # We will then strip (preserving symbols) the static library in the | ||
| 431 | # typical location. | ||
| 432 | # | ||
| 433 | # return a mapping of files:debugsources | ||
| 434 | |||
| 435 | import stat | ||
| 436 | |||
| 437 | src = file[len(dvar):] | ||
| 438 | dest = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(src) + dv["staticappend"] | ||
| 439 | debugfile = dvar + dest | ||
| 440 | sources = [] | ||
| 441 | |||
| 442 | # Copy the file... | ||
| 443 | bb.utils.mkdirhier(os.path.dirname(debugfile)) | ||
| 444 | #bb.note("Copy %s -> %s" % (file, debugfile)) | ||
| 445 | |||
| 446 | dvar = d.getVar('PKGD') | ||
| 447 | |||
| 448 | newmode = None | ||
| 449 | if not os.access(file, os.W_OK) or os.access(file, os.R_OK): | ||
| 450 | origmode = os.stat(file)[stat.ST_MODE] | ||
| 451 | newmode = origmode | stat.S_IWRITE | stat.S_IREAD | ||
| 452 | os.chmod(file, newmode) | ||
| 453 | |||
| 454 | # We need to extract the debug src information here... | ||
| 455 | if dv["srcdir"]: | ||
| 456 | sources = source_info(file, d) | ||
| 457 | |||
| 458 | bb.utils.mkdirhier(os.path.dirname(debugfile)) | ||
| 459 | |||
| 460 | # Copy the unmodified item to the debug directory | ||
| 461 | shutil.copy2(file, debugfile) | ||
| 462 | |||
| 463 | if newmode: | ||
| 464 | os.chmod(file, origmode) | ||
| 465 | |||
| 466 | return (file, sources) | ||
| 467 | |||
| 468 | def inject_minidebuginfo(file, dvar, dv, d): | ||
| 469 | # Extract just the symbols from debuginfo into minidebuginfo, | ||
| 470 | # compress it with xz and inject it back into the binary in a .gnu_debugdata section. | ||
| 471 | # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html | ||
| 472 | |||
| 473 | import subprocess | ||
| 474 | |||
| 475 | readelf = d.getVar('READELF') | ||
| 476 | nm = d.getVar('NM') | ||
| 477 | objcopy = d.getVar('OBJCOPY') | ||
| 478 | |||
| 479 | minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo') | ||
| 480 | |||
| 481 | src = file[len(dvar):] | ||
| 482 | dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"] | ||
| 483 | debugfile = dvar + dest | ||
| 484 | minidebugfile = minidebuginfodir + src + '.minidebug' | ||
| 485 | bb.utils.mkdirhier(os.path.dirname(minidebugfile)) | ||
| 486 | |||
| 487 | # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either | ||
| 488 | # so skip it. | ||
| 489 | if not os.path.exists(debugfile): | ||
| 490 | bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file)) | ||
| 491 | return | ||
| 492 | |||
| 493 | # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo. | ||
| 494 | # We will exclude all of these from minidebuginfo to save space. | ||
| 495 | remove_section_names = [] | ||
| 496 | for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines(): | ||
| 497 | fields = line.split() | ||
| 498 | if len(fields) < 8: | ||
| 499 | continue | ||
| 500 | name = fields[0] | ||
| 501 | type = fields[1] | ||
| 502 | flags = fields[7] | ||
| 503 | # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them | ||
| 504 | if name.startswith('.debug_'): | ||
| 505 | continue | ||
| 506 | if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']: | ||
| 507 | remove_section_names.append(name) | ||
| 508 | |||
| 509 | # List dynamic symbols in the binary. We can exclude these from minidebuginfo | ||
| 510 | # because they are always present in the binary. | ||
| 511 | dynsyms = set() | ||
| 512 | for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines(): | ||
| 513 | dynsyms.add(line.split()[0]) | ||
| 514 | |||
| 515 | # Find all function symbols from debuginfo which aren't in the dynamic symbols table. | ||
| 516 | # These are the ones we want to keep in minidebuginfo. | ||
| 517 | keep_symbols_file = minidebugfile + '.symlist' | ||
| 518 | found_any_symbols = False | ||
| 519 | with open(keep_symbols_file, 'w') as f: | ||
| 520 | for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines(): | ||
| 521 | fields = line.split('|') | ||
| 522 | if len(fields) < 7: | ||
| 523 | continue | ||
| 524 | name = fields[0].strip() | ||
| 525 | type = fields[3].strip() | ||
| 526 | if type == 'FUNC' and name not in dynsyms: | ||
| 527 | f.write('{}\n'.format(name)) | ||
| 528 | found_any_symbols = True | ||
| 529 | |||
| 530 | if not found_any_symbols: | ||
| 531 | bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file)) | ||
| 532 | return | ||
| 533 | |||
| 534 | bb.utils.remove(minidebugfile) | ||
| 535 | bb.utils.remove(minidebugfile + '.xz') | ||
| 536 | |||
| 537 | subprocess.check_call([objcopy, '-S'] + | ||
| 538 | ['--remove-section={}'.format(s) for s in remove_section_names] + | ||
| 539 | ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile]) | ||
| 540 | |||
| 541 | subprocess.check_call(['xz', '--keep', minidebugfile]) | ||
| 542 | |||
| 543 | subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file]) | ||
| 544 | |||
| 545 | def copydebugsources(debugsrcdir, sources, d): | ||
| 546 | # The debug src information written out to sourcefile is further processed | ||
| 547 | # and copied to the destination here. | ||
| 548 | |||
| 549 | import stat | ||
| 550 | import subprocess | ||
| 551 | |||
| 552 | if debugsrcdir and sources: | ||
| 553 | sourcefile = d.expand("${WORKDIR}/debugsources.list") | ||
| 554 | bb.utils.remove(sourcefile) | ||
| 555 | |||
| 556 | # filenames are null-separated - this is an artefact of the previous use | ||
| 557 | # of rpm's debugedit, which was writing them out that way, and the code elsewhere | ||
| 558 | # is still assuming that. | ||
| 559 | debuglistoutput = '\0'.join(sources) + '\0' | ||
| 560 | with open(sourcefile, 'a') as sf: | ||
| 561 | sf.write(debuglistoutput) | ||
| 562 | |||
| 563 | dvar = d.getVar('PKGD') | ||
| 564 | strip = d.getVar("STRIP") | ||
| 565 | objcopy = d.getVar("OBJCOPY") | ||
| 566 | workdir = d.getVar("WORKDIR") | ||
| 567 | sdir = d.getVar("S") | ||
| 568 | sparentdir = os.path.dirname(os.path.dirname(sdir)) | ||
| 569 | sbasedir = os.path.basename(os.path.dirname(sdir)) + "/" + os.path.basename(sdir) | ||
| 570 | workparentdir = os.path.dirname(os.path.dirname(workdir)) | ||
| 571 | workbasedir = os.path.basename(os.path.dirname(workdir)) + "/" + os.path.basename(workdir) | ||
| 572 | |||
| 573 | # If S isnt based on WORKDIR we can infer our sources are located elsewhere, | ||
| 574 | # e.g. using externalsrc; use S as base for our dirs | ||
| 575 | if workdir in sdir or 'work-shared' in sdir: | ||
| 576 | basedir = workbasedir | ||
| 577 | parentdir = workparentdir | ||
| 578 | else: | ||
| 579 | basedir = sbasedir | ||
| 580 | parentdir = sparentdir | ||
| 581 | |||
| 582 | # If build path exists in sourcefile, it means toolchain did not use | ||
| 583 | # -fdebug-prefix-map to compile | ||
| 584 | if checkbuildpath(sourcefile, d): | ||
| 585 | localsrc_prefix = parentdir + "/" | ||
| 586 | else: | ||
| 587 | localsrc_prefix = "/usr/src/debug/" | ||
| 588 | |||
| 589 | nosuchdir = [] | ||
| 590 | basepath = dvar | ||
| 591 | for p in debugsrcdir.split("/"): | ||
| 592 | basepath = basepath + "/" + p | ||
| 593 | if not cpath.exists(basepath): | ||
| 594 | nosuchdir.append(basepath) | ||
| 595 | bb.utils.mkdirhier(basepath) | ||
| 596 | cpath.updatecache(basepath) | ||
| 597 | |||
| 598 | # Ignore files from the recipe sysroots (target and native) | ||
| 599 | processdebugsrc = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | " | ||
| 600 | # We need to ignore files that are not actually ours | ||
| 601 | # we do this by only paying attention to items from this package | ||
| 602 | processdebugsrc += "fgrep -zw '%s' | " | ||
| 603 | # Remove prefix in the source paths | ||
| 604 | processdebugsrc += "sed 's#%s##g' | " | ||
| 605 | processdebugsrc += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)" | ||
| 606 | |||
| 607 | cmd = processdebugsrc % (sourcefile, basedir, localsrc_prefix, parentdir, dvar, debugsrcdir) | ||
| 608 | try: | ||
| 609 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | ||
| 610 | except subprocess.CalledProcessError: | ||
| 611 | # Can "fail" if internal headers/transient sources are attempted | ||
| 612 | pass | ||
| 613 | |||
| 614 | # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced. | ||
| 615 | # Work around this by manually finding and copying any symbolic links that made it through. | ||
| 616 | cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \ | ||
| 617 | (dvar, debugsrcdir, dvar, debugsrcdir, parentdir, dvar, debugsrcdir) | ||
| 618 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | ||
| 619 | |||
| 620 | |||
| 621 | # debugsources.list may be polluted from the host if we used externalsrc, | ||
| 622 | # cpio uses copy-pass and may have just created a directory structure | ||
| 623 | # matching the one from the host, if thats the case move those files to | ||
| 624 | # debugsrcdir to avoid host contamination. | ||
| 625 | # Empty dir structure will be deleted in the next step. | ||
| 626 | |||
| 627 | # Same check as above for externalsrc | ||
| 628 | if workdir not in sdir: | ||
| 629 | if os.path.exists(dvar + debugsrcdir + sdir): | ||
| 630 | cmd = "mv %s%s%s/* %s%s" % (dvar, debugsrcdir, sdir, dvar,debugsrcdir) | ||
| 631 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | ||
| 632 | |||
| 633 | # The copy by cpio may have resulted in some empty directories! Remove these | ||
| 634 | cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir) | ||
| 635 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | ||
| 636 | |||
| 637 | # Also remove debugsrcdir if its empty | ||
| 638 | for p in nosuchdir[::-1]: | ||
| 639 | if os.path.exists(p) and not os.listdir(p): | ||
| 640 | os.rmdir(p) | ||
| 641 | |||
| 642 | # | ||
| 643 | # Package data handling routines | ||
| 644 | # | ||
| 645 | |||
| 646 | def get_package_mapping (pkg, basepkg, d, depversions=None): | ||
| 647 | import oe.packagedata | ||
| 648 | |||
| 649 | data = oe.packagedata.read_subpkgdata(pkg, d) | ||
| 650 | key = "PKG:%s" % pkg | ||
| 651 | |||
| 652 | if key in data: | ||
| 653 | if bb.data.inherits_class('allarch', d) and bb.data.inherits_class('packagegroup', d) and pkg != data[key]: | ||
| 654 | bb.error("An allarch packagegroup shouldn't depend on packages which are dynamically renamed (%s to %s)" % (pkg, data[key])) | ||
| 655 | # Have to avoid undoing the write_extra_pkgs(global_variants...) | ||
| 656 | if bb.data.inherits_class('allarch', d) and not d.getVar('MULTILIB_VARIANTS') \ | ||
| 657 | and data[key] == basepkg: | ||
| 658 | return pkg | ||
| 659 | if depversions == []: | ||
| 660 | # Avoid returning a mapping if the renamed package rprovides its original name | ||
| 661 | rprovkey = "RPROVIDES:%s" % pkg | ||
| 662 | if rprovkey in data: | ||
| 663 | if pkg in bb.utils.explode_dep_versions2(data[rprovkey]): | ||
| 664 | bb.note("%s rprovides %s, not replacing the latter" % (data[key], pkg)) | ||
| 665 | return pkg | ||
| 666 | # Do map to rewritten package name | ||
| 667 | return data[key] | ||
| 668 | |||
| 669 | return pkg | ||
| 670 | |||
| 671 | def get_package_additional_metadata (pkg_type, d): | ||
| 672 | base_key = "PACKAGE_ADD_METADATA" | ||
| 673 | for key in ("%s_%s" % (base_key, pkg_type.upper()), base_key): | ||
| 674 | if d.getVar(key, False) is None: | ||
| 675 | continue | ||
| 676 | d.setVarFlag(key, "type", "list") | ||
| 677 | if d.getVarFlag(key, "separator") is None: | ||
| 678 | d.setVarFlag(key, "separator", "\\n") | ||
| 679 | metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)] | ||
| 680 | return "\n".join(metadata_fields).strip() | ||
| 681 | |||
| 682 | def runtime_mapping_rename (varname, pkg, d): | ||
| 683 | #bb.note("%s before: %s" % (varname, d.getVar(varname))) | ||
| 684 | |||
| 685 | new_depends = {} | ||
| 686 | deps = bb.utils.explode_dep_versions2(d.getVar(varname) or "") | ||
| 687 | for depend, depversions in deps.items(): | ||
| 688 | new_depend = get_package_mapping(depend, pkg, d, depversions) | ||
| 689 | if depend != new_depend: | ||
| 690 | bb.note("package name mapping done: %s -> %s" % (depend, new_depend)) | ||
| 691 | new_depends[new_depend] = deps[depend] | ||
| 692 | |||
| 693 | d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False)) | ||
| 694 | |||
| 695 | #bb.note("%s after: %s" % (varname, d.getVar(varname))) | ||
| 696 | |||
| 697 | # | ||
| 698 | # Used by do_packagedata (and possibly other routines post do_package) | ||
| 699 | # | ||
| 700 | |||
| 701 | PRSERV_ACTIVE = "${@bool(d.getVar("PRSERV_HOST"))}" | ||
| 702 | PRSERV_ACTIVE[vardepvalue] = "${PRSERV_ACTIVE}" | ||
| 703 | package_get_auto_pr[vardepsexclude] = "BB_TASKDEPDATA" | ||
| 704 | package_get_auto_pr[vardeps] += "PRSERV_ACTIVE" | ||
| 705 | python package_get_auto_pr() { | ||
| 706 | import oe.prservice | ||
| 707 | |||
| 708 | def get_do_package_hash(pn): | ||
| 709 | if d.getVar("BB_RUNTASK") != "do_package": | ||
| 710 | taskdepdata = d.getVar("BB_TASKDEPDATA", False) | ||
| 711 | for dep in taskdepdata: | ||
| 712 | if taskdepdata[dep][1] == "do_package" and taskdepdata[dep][0] == pn: | ||
| 713 | return taskdepdata[dep][6] | ||
| 714 | return None | ||
| 715 | |||
| 716 | # Support per recipe PRSERV_HOST | ||
| 717 | pn = d.getVar('PN') | ||
| 718 | host = d.getVar("PRSERV_HOST_" + pn) | ||
| 719 | if not (host is None): | ||
| 720 | d.setVar("PRSERV_HOST", host) | ||
| 721 | |||
| 722 | pkgv = d.getVar("PKGV") | ||
| 723 | |||
| 724 | # PR Server not active, handle AUTOINC | ||
| 725 | if not d.getVar('PRSERV_HOST'): | ||
| 726 | d.setVar("PRSERV_PV_AUTOINC", "0") | ||
| 727 | return | ||
| 728 | |||
| 729 | auto_pr = None | ||
| 730 | pv = d.getVar("PV") | ||
| 731 | version = d.getVar("PRAUTOINX") | ||
| 732 | pkgarch = d.getVar("PACKAGE_ARCH") | ||
| 733 | checksum = get_do_package_hash(pn) | ||
| 734 | |||
| 735 | # If do_package isn't in the dependencies, we can't get the checksum... | ||
| 736 | if not checksum: | ||
| 737 | bb.warn('Task %s requested do_package unihash, but it was not available.' % d.getVar('BB_RUNTASK')) | ||
| 738 | #taskdepdata = d.getVar("BB_TASKDEPDATA", False) | ||
| 739 | #for dep in taskdepdata: | ||
| 740 | # bb.warn('%s:%s = %s' % (taskdepdata[dep][0], taskdepdata[dep][1], taskdepdata[dep][6])) | ||
| 741 | return | ||
| 742 | |||
| 743 | if d.getVar('PRSERV_LOCKDOWN'): | ||
| 744 | auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch) or d.getVar('PRAUTO_' + version) or None | ||
| 745 | if auto_pr is None: | ||
| 746 | bb.fatal("Can NOT get PRAUTO from lockdown exported file") | ||
| 747 | d.setVar('PRAUTO',str(auto_pr)) | ||
| 748 | return | ||
| 749 | |||
| 750 | try: | ||
| 751 | conn = oe.prservice.prserv_make_conn(d) | ||
| 752 | if conn is not None: | ||
| 753 | if "AUTOINC" in pkgv: | ||
| 754 | srcpv = bb.fetch2.get_srcrev(d) | ||
| 755 | base_ver = "AUTOINC-%s" % version[:version.find(srcpv)] | ||
| 756 | value = conn.getPR(base_ver, pkgarch, srcpv) | ||
| 757 | d.setVar("PRSERV_PV_AUTOINC", str(value)) | ||
| 758 | |||
| 759 | auto_pr = conn.getPR(version, pkgarch, checksum) | ||
| 760 | conn.close() | ||
| 761 | except Exception as e: | ||
| 762 | bb.fatal("Can NOT get PRAUTO, exception %s" % str(e)) | ||
| 763 | if auto_pr is None: | ||
| 764 | bb.fatal("Can NOT get PRAUTO from remote PR service") | ||
| 765 | d.setVar('PRAUTO',str(auto_pr)) | ||
| 766 | } | ||
| 767 | |||
| 768 | # | ||
| 769 | # Package functions suitable for inclusion in PACKAGEFUNCS | ||
| 770 | # | ||
| 771 | |||
| 772 | python package_convert_pr_autoinc() { | ||
| 773 | pkgv = d.getVar("PKGV") | ||
| 774 | |||
| 775 | # Adjust pkgv as necessary... | ||
| 776 | if 'AUTOINC' in pkgv: | ||
| 777 | d.setVar("PKGV", pkgv.replace("AUTOINC", "${PRSERV_PV_AUTOINC}")) | ||
| 778 | |||
| 779 | # Change PRSERV_PV_AUTOINC and EXTENDPRAUTO usage to special values | ||
| 780 | d.setVar('PRSERV_PV_AUTOINC', '@PRSERV_PV_AUTOINC@') | ||
| 781 | d.setVar('EXTENDPRAUTO', '@EXTENDPRAUTO@') | ||
| 782 | } | ||
| 783 | |||
| 784 | LOCALEBASEPN ??= "${PN}" | ||
| 785 | |||
| 786 | python package_do_split_locales() { | ||
| 787 | if (d.getVar('PACKAGE_NO_LOCALE') == '1'): | ||
| 788 | bb.debug(1, "package requested not splitting locales") | ||
| 789 | return | ||
| 790 | |||
| 791 | packages = (d.getVar('PACKAGES') or "").split() | ||
| 792 | |||
| 793 | datadir = d.getVar('datadir') | ||
| 794 | if not datadir: | ||
| 795 | bb.note("datadir not defined") | ||
| 796 | return | ||
| 797 | |||
| 798 | dvar = d.getVar('PKGD') | ||
| 799 | pn = d.getVar('LOCALEBASEPN') | ||
| 800 | |||
| 801 | if pn + '-locale' in packages: | ||
| 802 | packages.remove(pn + '-locale') | ||
| 803 | |||
| 804 | localedir = os.path.join(dvar + datadir, 'locale') | ||
| 805 | |||
| 806 | if not cpath.isdir(localedir): | ||
| 807 | bb.debug(1, "No locale files in this package") | ||
| 808 | return | ||
| 809 | |||
| 810 | locales = os.listdir(localedir) | ||
| 811 | |||
| 812 | summary = d.getVar('SUMMARY') or pn | ||
| 813 | description = d.getVar('DESCRIPTION') or "" | ||
| 814 | locale_section = d.getVar('LOCALE_SECTION') | ||
| 815 | mlprefix = d.getVar('MLPREFIX') or "" | ||
| 816 | for l in sorted(locales): | ||
| 817 | ln = legitimize_package_name(l) | ||
| 818 | pkg = pn + '-locale-' + ln | ||
| 819 | packages.append(pkg) | ||
| 820 | d.setVar('FILES:' + pkg, os.path.join(datadir, 'locale', l)) | ||
| 821 | d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln)) | ||
| 822 | d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln)) | ||
| 823 | d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l)) | ||
| 824 | d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l)) | ||
| 825 | if locale_section: | ||
| 826 | d.setVar('SECTION:' + pkg, locale_section) | ||
| 827 | |||
| 828 | d.setVar('PACKAGES', ' '.join(packages)) | ||
| 829 | |||
| 830 | # Disabled by RP 18/06/07 | ||
| 831 | # Wildcards aren't supported in debian | ||
| 832 | # They break with ipkg since glibc-locale* will mean that | ||
| 833 | # glibc-localedata-translit* won't install as a dependency | ||
| 834 | # for some other package which breaks meta-toolchain | ||
| 835 | # Probably breaks since virtual-locale- isn't provided anywhere | ||
| 836 | #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split() | ||
| 837 | #rdep.append('%s-locale*' % pn) | ||
| 838 | #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep)) | ||
| 839 | } | ||
| 840 | |||
| 841 | python perform_packagecopy () { | ||
| 842 | import subprocess | ||
| 843 | import shutil | ||
| 844 | |||
| 845 | dest = d.getVar('D') | ||
| 846 | dvar = d.getVar('PKGD') | ||
| 847 | |||
| 848 | # Start by package population by taking a copy of the installed | ||
| 849 | # files to operate on | ||
| 850 | # Preserve sparse files and hard links | ||
| 851 | cmd = 'tar --exclude=./sysroot-only -cf - -C %s -p -S . | tar -xf - -C %s' % (dest, dvar) | ||
| 852 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | ||
| 853 | |||
| 854 | # replace RPATHs for the nativesdk binaries, to make them relocatable | ||
| 855 | if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d): | ||
| 856 | rpath_replace (dvar, d) | ||
| 857 | } | ||
| 858 | perform_packagecopy[cleandirs] = "${PKGD}" | ||
| 859 | perform_packagecopy[dirs] = "${PKGD}" | ||
| 860 | |||
| 861 | # We generate a master list of directories to process, we start by | ||
| 862 | # seeding this list with reasonable defaults, then load from | ||
| 863 | # the fs-perms.txt files | ||
| 864 | python fixup_perms () { | ||
| 865 | import pwd, grp | ||
| 866 | |||
| 867 | # init using a string with the same format as a line as documented in | ||
| 868 | # the fs-perms.txt file | ||
| 869 | # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid> | ||
| 870 | # <path> link <link target> | ||
| 871 | # | ||
| 872 | # __str__ can be used to print out an entry in the input format | ||
| 873 | # | ||
| 874 | # if fs_perms_entry.path is None: | ||
| 875 | # an error occurred | ||
| 876 | # if fs_perms_entry.link, you can retrieve: | ||
| 877 | # fs_perms_entry.path = path | ||
| 878 | # fs_perms_entry.link = target of link | ||
| 879 | # if not fs_perms_entry.link, you can retrieve: | ||
| 880 | # fs_perms_entry.path = path | ||
| 881 | # fs_perms_entry.mode = expected dir mode or None | ||
| 882 | # fs_perms_entry.uid = expected uid or -1 | ||
| 883 | # fs_perms_entry.gid = expected gid or -1 | ||
| 884 | # fs_perms_entry.walk = 'true' or something else | ||
| 885 | # fs_perms_entry.fmode = expected file mode or None | ||
| 886 | # fs_perms_entry.fuid = expected file uid or -1 | ||
| 887 | # fs_perms_entry_fgid = expected file gid or -1 | ||
| 888 | class fs_perms_entry(): | ||
| 889 | def __init__(self, line): | ||
| 890 | lsplit = line.split() | ||
| 891 | if len(lsplit) == 3 and lsplit[1].lower() == "link": | ||
| 892 | self._setlink(lsplit[0], lsplit[2]) | ||
| 893 | elif len(lsplit) == 8: | ||
| 894 | self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7]) | ||
| 895 | else: | ||
| 896 | msg = "Fixup Perms: invalid config line %s" % line | ||
| 897 | oe.qa.handle_error("perm-config", msg, d) | ||
| 898 | self.path = None | ||
| 899 | self.link = None | ||
| 900 | |||
| 901 | def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid): | ||
| 902 | self.path = os.path.normpath(path) | ||
| 903 | self.link = None | ||
| 904 | self.mode = self._procmode(mode) | ||
| 905 | self.uid = self._procuid(uid) | ||
| 906 | self.gid = self._procgid(gid) | ||
| 907 | self.walk = walk.lower() | ||
| 908 | self.fmode = self._procmode(fmode) | ||
| 909 | self.fuid = self._procuid(fuid) | ||
| 910 | self.fgid = self._procgid(fgid) | ||
| 911 | |||
| 912 | def _setlink(self, path, link): | ||
| 913 | self.path = os.path.normpath(path) | ||
| 914 | self.link = link | ||
| 915 | |||
| 916 | def _procmode(self, mode): | ||
| 917 | if not mode or (mode and mode == "-"): | ||
| 918 | return None | ||
| 919 | else: | ||
| 920 | return int(mode,8) | ||
| 921 | |||
| 922 | # Note uid/gid -1 has special significance in os.lchown | ||
| 923 | def _procuid(self, uid): | ||
| 924 | if uid is None or uid == "-": | ||
| 925 | return -1 | ||
| 926 | elif uid.isdigit(): | ||
| 927 | return int(uid) | ||
| 928 | else: | ||
| 929 | return pwd.getpwnam(uid).pw_uid | ||
| 930 | |||
| 931 | def _procgid(self, gid): | ||
| 932 | if gid is None or gid == "-": | ||
| 933 | return -1 | ||
| 934 | elif gid.isdigit(): | ||
| 935 | return int(gid) | ||
| 936 | else: | ||
| 937 | return grp.getgrnam(gid).gr_gid | ||
| 938 | |||
| 939 | # Use for debugging the entries | ||
| 940 | def __str__(self): | ||
| 941 | if self.link: | ||
| 942 | return "%s link %s" % (self.path, self.link) | ||
| 943 | else: | ||
| 944 | mode = "-" | ||
| 945 | if self.mode: | ||
| 946 | mode = "0%o" % self.mode | ||
| 947 | fmode = "-" | ||
| 948 | if self.fmode: | ||
| 949 | fmode = "0%o" % self.fmode | ||
| 950 | uid = self._mapugid(self.uid) | ||
| 951 | gid = self._mapugid(self.gid) | ||
| 952 | fuid = self._mapugid(self.fuid) | ||
| 953 | fgid = self._mapugid(self.fgid) | ||
| 954 | return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid) | ||
| 955 | |||
| 956 | def _mapugid(self, id): | ||
| 957 | if id is None or id == -1: | ||
| 958 | return "-" | ||
| 959 | else: | ||
| 960 | return "%d" % id | ||
| 961 | |||
| 962 | # Fix the permission, owner and group of path | ||
| 963 | def fix_perms(path, mode, uid, gid, dir): | ||
| 964 | if mode and not os.path.islink(path): | ||
| 965 | #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir)) | ||
| 966 | os.chmod(path, mode) | ||
| 967 | # -1 is a special value that means don't change the uid/gid | ||
| 968 | # if they are BOTH -1, don't bother to lchown | ||
| 969 | if not (uid == -1 and gid == -1): | ||
| 970 | #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir)) | ||
| 971 | os.lchown(path, uid, gid) | ||
| 972 | |||
| 973 | # Return a list of configuration files based on either the default | ||
| 974 | # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES | ||
| 975 | # paths are resolved via BBPATH | ||
| 976 | def get_fs_perms_list(d): | ||
| 977 | str = "" | ||
| 978 | bbpath = d.getVar('BBPATH') | ||
| 979 | fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES') or "" | ||
| 980 | for conf_file in fs_perms_tables.split(): | ||
| 981 | confpath = bb.utils.which(bbpath, conf_file) | ||
| 982 | if confpath: | ||
| 983 | str += " %s" % bb.utils.which(bbpath, conf_file) | ||
| 984 | else: | ||
| 985 | bb.warn("cannot find %s specified in FILESYSTEM_PERMS_TABLES" % conf_file) | ||
| 986 | return str | ||
| 987 | |||
| 988 | |||
| 989 | |||
| 990 | dvar = d.getVar('PKGD') | ||
| 991 | |||
| 992 | fs_perms_table = {} | ||
| 993 | fs_link_table = {} | ||
| 994 | |||
| 995 | # By default all of the standard directories specified in | ||
| 996 | # bitbake.conf will get 0755 root:root. | ||
| 997 | target_path_vars = [ 'base_prefix', | ||
| 998 | 'prefix', | ||
| 999 | 'exec_prefix', | ||
| 1000 | 'base_bindir', | ||
| 1001 | 'base_sbindir', | ||
| 1002 | 'base_libdir', | ||
| 1003 | 'datadir', | ||
| 1004 | 'sysconfdir', | ||
| 1005 | 'servicedir', | ||
| 1006 | 'sharedstatedir', | ||
| 1007 | 'localstatedir', | ||
| 1008 | 'infodir', | ||
| 1009 | 'mandir', | ||
| 1010 | 'docdir', | ||
| 1011 | 'bindir', | ||
| 1012 | 'sbindir', | ||
| 1013 | 'libexecdir', | ||
| 1014 | 'libdir', | ||
| 1015 | 'includedir', | ||
| 1016 | 'oldincludedir' ] | ||
| 1017 | |||
| 1018 | for path in target_path_vars: | ||
| 1019 | dir = d.getVar(path) or "" | ||
| 1020 | if dir == "": | ||
| 1021 | continue | ||
| 1022 | fs_perms_table[dir] = fs_perms_entry(d.expand("%s 0755 root root false - - -" % (dir))) | ||
| 1023 | |||
| 1024 | # Now we actually load from the configuration files | ||
| 1025 | for conf in get_fs_perms_list(d).split(): | ||
| 1026 | if not os.path.exists(conf): | ||
| 1027 | continue | ||
| 1028 | with open(conf) as f: | ||
| 1029 | for line in f: | ||
| 1030 | if line.startswith('#'): | ||
| 1031 | continue | ||
| 1032 | lsplit = line.split() | ||
| 1033 | if len(lsplit) == 0: | ||
| 1034 | continue | ||
| 1035 | if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"): | ||
| 1036 | msg = "Fixup perms: %s invalid line: %s" % (conf, line) | ||
| 1037 | oe.qa.handle_error("perm-line", msg, d) | ||
| 1038 | continue | ||
| 1039 | entry = fs_perms_entry(d.expand(line)) | ||
| 1040 | if entry and entry.path: | ||
| 1041 | if entry.link: | ||
| 1042 | fs_link_table[entry.path] = entry | ||
| 1043 | if entry.path in fs_perms_table: | ||
| 1044 | fs_perms_table.pop(entry.path) | ||
| 1045 | else: | ||
| 1046 | fs_perms_table[entry.path] = entry | ||
| 1047 | if entry.path in fs_link_table: | ||
| 1048 | fs_link_table.pop(entry.path) | ||
| 1049 | |||
| 1050 | # Debug -- list out in-memory table | ||
| 1051 | #for dir in fs_perms_table: | ||
| 1052 | # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir]))) | ||
| 1053 | #for link in fs_link_table: | ||
| 1054 | # bb.note("Fixup Perms: %s: %s" % (link, str(fs_link_table[link]))) | ||
| 1055 | |||
| 1056 | # We process links first, so we can go back and fixup directory ownership | ||
| 1057 | # for any newly created directories | ||
| 1058 | # Process in sorted order so /run gets created before /run/lock, etc. | ||
| 1059 | for entry in sorted(fs_link_table.values(), key=lambda x: x.link): | ||
| 1060 | link = entry.link | ||
| 1061 | dir = entry.path | ||
| 1062 | origin = dvar + dir | ||
| 1063 | if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)): | ||
| 1064 | continue | ||
| 1065 | |||
| 1066 | if link[0] == "/": | ||
| 1067 | target = dvar + link | ||
| 1068 | ptarget = link | ||
| 1069 | else: | ||
| 1070 | target = os.path.join(os.path.dirname(origin), link) | ||
| 1071 | ptarget = os.path.join(os.path.dirname(dir), link) | ||
| 1072 | if os.path.exists(target): | ||
| 1073 | msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget) | ||
| 1074 | oe.qa.handle_error("perm-link", msg, d) | ||
| 1075 | continue | ||
| 1076 | |||
| 1077 | # Create path to move directory to, move it, and then setup the symlink | ||
| 1078 | bb.utils.mkdirhier(os.path.dirname(target)) | ||
| 1079 | #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget)) | ||
| 1080 | bb.utils.rename(origin, target) | ||
| 1081 | #bb.note("Fixup Perms: Link %s -> %s" % (dir, link)) | ||
| 1082 | os.symlink(link, origin) | ||
| 1083 | |||
| 1084 | for dir in fs_perms_table: | ||
| 1085 | origin = dvar + dir | ||
| 1086 | if not (cpath.exists(origin) and cpath.isdir(origin)): | ||
| 1087 | continue | ||
| 1088 | |||
| 1089 | fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir) | ||
| 1090 | |||
| 1091 | if fs_perms_table[dir].walk == 'true': | ||
| 1092 | for root, dirs, files in os.walk(origin): | ||
| 1093 | for dr in dirs: | ||
| 1094 | each_dir = os.path.join(root, dr) | ||
| 1095 | fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir) | ||
| 1096 | for f in files: | ||
| 1097 | each_file = os.path.join(root, f) | ||
| 1098 | fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir) | ||
| 1099 | } | ||
| 1100 | |||
| 1101 | def package_debug_vars(d): | ||
| 1102 | # We default to '.debug' style | ||
| 1103 | if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory': | ||
| 1104 | # Single debug-file-directory style debug info | ||
| 1105 | debug_vars = { | ||
| 1106 | "append": ".debug", | ||
| 1107 | "staticappend": "", | ||
| 1108 | "dir": "", | ||
| 1109 | "staticdir": "", | ||
| 1110 | "libdir": "/usr/lib/debug", | ||
| 1111 | "staticlibdir": "/usr/lib/debug-static", | ||
| 1112 | "srcdir": "/usr/src/debug", | ||
| 1113 | } | ||
| 1114 | elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src': | ||
| 1115 | # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug | ||
| 1116 | debug_vars = { | ||
| 1117 | "append": "", | ||
| 1118 | "staticappend": "", | ||
| 1119 | "dir": "/.debug", | ||
| 1120 | "staticdir": "/.debug-static", | ||
| 1121 | "libdir": "", | ||
| 1122 | "staticlibdir": "", | ||
| 1123 | "srcdir": "", | ||
| 1124 | } | ||
| 1125 | elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg': | ||
| 1126 | debug_vars = { | ||
| 1127 | "append": "", | ||
| 1128 | "staticappend": "", | ||
| 1129 | "dir": "/.debug", | ||
| 1130 | "staticdir": "/.debug-static", | ||
| 1131 | "libdir": "", | ||
| 1132 | "staticlibdir": "", | ||
| 1133 | "srcdir": "/usr/src/debug", | ||
| 1134 | } | ||
| 1135 | else: | ||
| 1136 | # Original OE-core, a.k.a. ".debug", style debug info | ||
| 1137 | debug_vars = { | ||
| 1138 | "append": "", | ||
| 1139 | "staticappend": "", | ||
| 1140 | "dir": "/.debug", | ||
| 1141 | "staticdir": "/.debug-static", | ||
| 1142 | "libdir": "", | ||
| 1143 | "staticlibdir": "", | ||
| 1144 | "srcdir": "/usr/src/debug", | ||
| 1145 | } | ||
| 1146 | |||
| 1147 | return debug_vars | ||
| 1148 | |||
| 1149 | python split_and_strip_files () { | ||
| 1150 | import stat, errno | ||
| 1151 | import subprocess | ||
| 1152 | |||
| 1153 | dvar = d.getVar('PKGD') | ||
| 1154 | pn = d.getVar('PN') | ||
| 1155 | hostos = d.getVar('HOST_OS') | ||
| 1156 | |||
| 1157 | oldcwd = os.getcwd() | ||
| 1158 | os.chdir(dvar) | ||
| 1159 | |||
| 1160 | dv = package_debug_vars(d) | ||
| 1161 | |||
| 1162 | # | ||
| 1163 | # First lets figure out all of the files we may have to process ... do this only once! | ||
| 1164 | # | ||
| 1165 | elffiles = {} | ||
| 1166 | symlinks = {} | ||
| 1167 | staticlibs = [] | ||
| 1168 | inodes = {} | ||
| 1169 | libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir")) | ||
| 1170 | baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir")) | ||
| 1171 | skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split() | ||
| 1172 | if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \ | ||
| 1173 | d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'): | ||
| 1174 | checkelf = {} | ||
| 1175 | checkelflinks = {} | ||
| 1176 | for root, dirs, files in cpath.walk(dvar): | ||
| 1177 | for f in files: | ||
| 1178 | file = os.path.join(root, f) | ||
| 1179 | |||
| 1180 | # Skip debug files | ||
| 1181 | if dv["append"] and file.endswith(dv["append"]): | ||
| 1182 | continue | ||
| 1183 | if dv["dir"] and dv["dir"] in os.path.dirname(file[len(dvar):]): | ||
| 1184 | continue | ||
| 1185 | |||
| 1186 | if file in skipfiles: | ||
| 1187 | continue | ||
| 1188 | |||
| 1189 | if oe.package.is_static_lib(file): | ||
| 1190 | staticlibs.append(file) | ||
| 1191 | continue | ||
| 1192 | |||
| 1193 | try: | ||
| 1194 | ltarget = cpath.realpath(file, dvar, False) | ||
| 1195 | s = cpath.lstat(ltarget) | ||
| 1196 | except OSError as e: | ||
| 1197 | (err, strerror) = e.args | ||
| 1198 | if err != errno.ENOENT: | ||
| 1199 | raise | ||
| 1200 | # Skip broken symlinks | ||
| 1201 | continue | ||
| 1202 | if not s: | ||
| 1203 | continue | ||
| 1204 | # Check its an executable | ||
| 1205 | if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \ | ||
| 1206 | or (s[stat.ST_MODE] & stat.S_IXOTH) \ | ||
| 1207 | or ((file.startswith(libdir) or file.startswith(baselibdir)) \ | ||
| 1208 | and (".so" in f or ".node" in f)) \ | ||
| 1209 | or (f.startswith('vmlinux') or ".ko" in f): | ||
| 1210 | |||
| 1211 | if cpath.islink(file): | ||
| 1212 | checkelflinks[file] = ltarget | ||
| 1213 | continue | ||
| 1214 | # Use a reference of device ID and inode number to identify files | ||
| 1215 | file_reference = "%d_%d" % (s.st_dev, s.st_ino) | ||
| 1216 | checkelf[file] = (file, file_reference) | ||
| 1217 | |||
| 1218 | results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d) | ||
| 1219 | results_map = {} | ||
| 1220 | for (ltarget, elf_file) in results: | ||
| 1221 | results_map[ltarget] = elf_file | ||
| 1222 | for file in checkelflinks: | ||
| 1223 | ltarget = checkelflinks[file] | ||
| 1224 | # If it's a symlink, and points to an ELF file, we capture the readlink target | ||
| 1225 | if results_map[ltarget]: | ||
| 1226 | target = os.readlink(file) | ||
| 1227 | #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget])) | ||
| 1228 | symlinks[file] = target | ||
| 1229 | |||
| 1230 | results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d) | ||
| 1231 | |||
| 1232 | # Sort results by file path. This ensures that the files are always | ||
| 1233 | # processed in the same order, which is important to make sure builds | ||
| 1234 | # are reproducible when dealing with hardlinks | ||
| 1235 | results.sort(key=lambda x: x[0]) | ||
| 1236 | |||
| 1237 | for (file, elf_file) in results: | ||
| 1238 | # It's a file (or hardlink), not a link | ||
| 1239 | # ...but is it ELF, and is it already stripped? | ||
| 1240 | if elf_file & 1: | ||
| 1241 | if elf_file & 2: | ||
| 1242 | if 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split(): | ||
| 1243 | bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn)) | ||
| 1244 | else: | ||
| 1245 | msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn) | ||
| 1246 | oe.qa.handle_error("already-stripped", msg, d) | ||
| 1247 | continue | ||
| 1248 | |||
| 1249 | # At this point we have an unstripped elf file. We need to: | ||
| 1250 | # a) Make sure any file we strip is not hardlinked to anything else outside this tree | ||
| 1251 | # b) Only strip any hardlinked file once (no races) | ||
| 1252 | # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks | ||
| 1253 | |||
| 1254 | # Use a reference of device ID and inode number to identify files | ||
| 1255 | file_reference = checkelf[file][1] | ||
| 1256 | if file_reference in inodes: | ||
| 1257 | os.unlink(file) | ||
| 1258 | os.link(inodes[file_reference][0], file) | ||
| 1259 | inodes[file_reference].append(file) | ||
| 1260 | else: | ||
| 1261 | inodes[file_reference] = [file] | ||
| 1262 | # break hardlink | ||
| 1263 | bb.utils.break_hardlinks(file) | ||
| 1264 | elffiles[file] = elf_file | ||
| 1265 | # Modified the file so clear the cache | ||
| 1266 | cpath.updatecache(file) | ||
| 1267 | |||
| 1268 | def strip_pkgd_prefix(f): | ||
| 1269 | nonlocal dvar | ||
| 1270 | |||
| 1271 | if f.startswith(dvar): | ||
| 1272 | return f[len(dvar):] | ||
| 1273 | |||
| 1274 | return f | ||
| 1275 | |||
| 1276 | # | ||
| 1277 | # First lets process debug splitting | ||
| 1278 | # | ||
| 1279 | if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'): | ||
| 1280 | results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, dv, d)) | ||
| 1281 | |||
| 1282 | if dv["srcdir"] and not hostos.startswith("mingw"): | ||
| 1283 | if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'): | ||
| 1284 | results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, dv, d)) | ||
| 1285 | else: | ||
| 1286 | for file in staticlibs: | ||
| 1287 | results.append( (file,source_info(file, d)) ) | ||
| 1288 | |||
| 1289 | d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results}) | ||
| 1290 | |||
| 1291 | sources = set() | ||
| 1292 | for r in results: | ||
| 1293 | sources.update(r[1]) | ||
| 1294 | |||
| 1295 | # Hardlink our debug symbols to the other hardlink copies | ||
| 1296 | for ref in inodes: | ||
| 1297 | if len(inodes[ref]) == 1: | ||
| 1298 | continue | ||
| 1299 | |||
| 1300 | target = inodes[ref][0][len(dvar):] | ||
| 1301 | for file in inodes[ref][1:]: | ||
| 1302 | src = file[len(dvar):] | ||
| 1303 | dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"] | ||
| 1304 | fpath = dvar + dest | ||
| 1305 | ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"] | ||
| 1306 | bb.utils.mkdirhier(os.path.dirname(fpath)) | ||
| 1307 | # Only one hardlink of separated debug info file in each directory | ||
| 1308 | if not os.access(fpath, os.R_OK): | ||
| 1309 | #bb.note("Link %s -> %s" % (fpath, ftarget)) | ||
| 1310 | os.link(ftarget, fpath) | ||
| 1311 | |||
| 1312 | # Create symlinks for all cases we were able to split symbols | ||
| 1313 | for file in symlinks: | ||
| 1314 | src = file[len(dvar):] | ||
| 1315 | dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"] | ||
| 1316 | fpath = dvar + dest | ||
| 1317 | # Skip it if the target doesn't exist | ||
| 1318 | try: | ||
| 1319 | s = os.stat(fpath) | ||
| 1320 | except OSError as e: | ||
| 1321 | (err, strerror) = e.args | ||
| 1322 | if err != errno.ENOENT: | ||
| 1323 | raise | ||
| 1324 | continue | ||
| 1325 | |||
| 1326 | ltarget = symlinks[file] | ||
| 1327 | lpath = os.path.dirname(ltarget) | ||
| 1328 | lbase = os.path.basename(ltarget) | ||
| 1329 | ftarget = "" | ||
| 1330 | if lpath and lpath != ".": | ||
| 1331 | ftarget += lpath + dv["dir"] + "/" | ||
| 1332 | ftarget += lbase + dv["append"] | ||
| 1333 | if lpath.startswith(".."): | ||
| 1334 | ftarget = os.path.join("..", ftarget) | ||
| 1335 | bb.utils.mkdirhier(os.path.dirname(fpath)) | ||
| 1336 | #bb.note("Symlink %s -> %s" % (fpath, ftarget)) | ||
| 1337 | os.symlink(ftarget, fpath) | ||
| 1338 | |||
| 1339 | # Process the dv["srcdir"] if requested... | ||
| 1340 | # This copies and places the referenced sources for later debugging... | ||
| 1341 | copydebugsources(dv["srcdir"], sources, d) | ||
| 1342 | # | ||
| 1343 | # End of debug splitting | ||
| 1344 | # | ||
| 1345 | |||
| 1346 | # | ||
| 1347 | # Now lets go back over things and strip them | ||
| 1348 | # | ||
| 1349 | if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'): | ||
| 1350 | strip = d.getVar("STRIP") | ||
| 1351 | sfiles = [] | ||
| 1352 | for file in elffiles: | ||
| 1353 | elf_file = int(elffiles[file]) | ||
| 1354 | #bb.note("Strip %s" % file) | ||
| 1355 | sfiles.append((file, elf_file, strip)) | ||
| 1356 | if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'): | ||
| 1357 | for f in staticlibs: | ||
| 1358 | sfiles.append((f, 16, strip)) | ||
| 1359 | |||
| 1360 | oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d) | ||
| 1361 | |||
| 1362 | # Build "minidebuginfo" and reinject it back into the stripped binaries | ||
| 1363 | if d.getVar('PACKAGE_MINIDEBUGINFO') == '1': | ||
| 1364 | oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d, | ||
| 1365 | extraargs=(dvar, dv, d)) | ||
| 1366 | |||
| 1367 | # | ||
| 1368 | # End of strip | ||
| 1369 | # | ||
| 1370 | os.chdir(oldcwd) | ||
| 1371 | } | ||
| 1372 | |||
| 1373 | python populate_packages () { | ||
| 1374 | import glob, re | ||
| 1375 | |||
| 1376 | workdir = d.getVar('WORKDIR') | ||
| 1377 | outdir = d.getVar('DEPLOY_DIR') | ||
| 1378 | dvar = d.getVar('PKGD') | ||
| 1379 | packages = d.getVar('PACKAGES').split() | ||
| 1380 | pn = d.getVar('PN') | ||
| 1381 | |||
| 1382 | bb.utils.mkdirhier(outdir) | ||
| 1383 | os.chdir(dvar) | ||
| 1384 | |||
| 1385 | autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False) | ||
| 1386 | |||
| 1387 | split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg') | ||
| 1388 | |||
| 1389 | # If debug-with-srcpkg mode is enabled then add the source package if it | ||
| 1390 | # doesn't exist and add the source file contents to the source package. | ||
| 1391 | if split_source_package: | ||
| 1392 | src_package_name = ('%s-src' % d.getVar('PN')) | ||
| 1393 | if not src_package_name in packages: | ||
| 1394 | packages.append(src_package_name) | ||
| 1395 | d.setVar('FILES:%s' % src_package_name, '/usr/src/debug') | ||
| 1396 | |||
| 1397 | # Sanity check PACKAGES for duplicates | ||
| 1398 | # Sanity should be moved to sanity.bbclass once we have the infrastructure | ||
| 1399 | package_dict = {} | ||
| 1400 | |||
| 1401 | for i, pkg in enumerate(packages): | ||
| 1402 | if pkg in package_dict: | ||
| 1403 | msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg | ||
| 1404 | oe.qa.handle_error("packages-list", msg, d) | ||
| 1405 | # Ensure the source package gets the chance to pick up the source files | ||
| 1406 | # before the debug package by ordering it first in PACKAGES. Whether it | ||
| 1407 | # actually picks up any source files is controlled by | ||
| 1408 | # PACKAGE_DEBUG_SPLIT_STYLE. | ||
| 1409 | elif pkg.endswith("-src"): | ||
| 1410 | package_dict[pkg] = (10, i) | ||
| 1411 | elif autodebug and pkg.endswith("-dbg"): | ||
| 1412 | package_dict[pkg] = (30, i) | ||
| 1413 | else: | ||
| 1414 | package_dict[pkg] = (50, i) | ||
| 1415 | packages = sorted(package_dict.keys(), key=package_dict.get) | ||
| 1416 | d.setVar('PACKAGES', ' '.join(packages)) | ||
| 1417 | pkgdest = d.getVar('PKGDEST') | ||
| 1418 | |||
| 1419 | seen = [] | ||
| 1420 | |||
| 1421 | # os.mkdir masks the permissions with umask so we have to unset it first | ||
| 1422 | oldumask = os.umask(0) | ||
| 1423 | |||
| 1424 | debug = [] | ||
| 1425 | for root, dirs, files in cpath.walk(dvar): | ||
| 1426 | dir = root[len(dvar):] | ||
| 1427 | if not dir: | ||
| 1428 | dir = os.sep | ||
| 1429 | for f in (files + dirs): | ||
| 1430 | path = "." + os.path.join(dir, f) | ||
| 1431 | if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"): | ||
| 1432 | debug.append(path) | ||
| 1433 | |||
| 1434 | for pkg in packages: | ||
| 1435 | root = os.path.join(pkgdest, pkg) | ||
| 1436 | bb.utils.mkdirhier(root) | ||
| 1437 | |||
| 1438 | filesvar = d.getVar('FILES:%s' % pkg) or "" | ||
| 1439 | if "//" in filesvar: | ||
| 1440 | msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg | ||
| 1441 | oe.qa.handle_error("files-invalid", msg, d) | ||
| 1442 | filesvar.replace("//", "/") | ||
| 1443 | |||
| 1444 | origfiles = filesvar.split() | ||
| 1445 | files, symlink_paths = files_from_filevars(origfiles) | ||
| 1446 | |||
| 1447 | if autodebug and pkg.endswith("-dbg"): | ||
| 1448 | files.extend(debug) | ||
| 1449 | |||
| 1450 | for file in files: | ||
| 1451 | if (not cpath.islink(file)) and (not cpath.exists(file)): | ||
| 1452 | continue | ||
| 1453 | if file in seen: | ||
| 1454 | continue | ||
| 1455 | seen.append(file) | ||
| 1456 | |||
| 1457 | def mkdir(src, dest, p): | ||
| 1458 | src = os.path.join(src, p) | ||
| 1459 | dest = os.path.join(dest, p) | ||
| 1460 | fstat = cpath.stat(src) | ||
| 1461 | os.mkdir(dest) | ||
| 1462 | os.chmod(dest, fstat.st_mode) | ||
| 1463 | os.chown(dest, fstat.st_uid, fstat.st_gid) | ||
| 1464 | if p not in seen: | ||
| 1465 | seen.append(p) | ||
| 1466 | cpath.updatecache(dest) | ||
| 1467 | |||
| 1468 | def mkdir_recurse(src, dest, paths): | ||
| 1469 | if cpath.exists(dest + '/' + paths): | ||
| 1470 | return | ||
| 1471 | while paths.startswith("./"): | ||
| 1472 | paths = paths[2:] | ||
| 1473 | p = "." | ||
| 1474 | for c in paths.split("/"): | ||
| 1475 | p = os.path.join(p, c) | ||
| 1476 | if not cpath.exists(os.path.join(dest, p)): | ||
| 1477 | mkdir(src, dest, p) | ||
| 1478 | |||
| 1479 | if cpath.isdir(file) and not cpath.islink(file): | ||
| 1480 | mkdir_recurse(dvar, root, file) | ||
| 1481 | continue | ||
| 1482 | |||
| 1483 | mkdir_recurse(dvar, root, os.path.dirname(file)) | ||
| 1484 | fpath = os.path.join(root,file) | ||
| 1485 | if not cpath.islink(file): | ||
| 1486 | os.link(file, fpath) | ||
| 1487 | continue | ||
| 1488 | ret = bb.utils.copyfile(file, fpath) | ||
| 1489 | if ret is False or ret == 0: | ||
| 1490 | bb.fatal("File population failed") | ||
| 1491 | |||
| 1492 | # Check if symlink paths exist | ||
| 1493 | for file in symlink_paths: | ||
| 1494 | if not os.path.exists(os.path.join(root,file)): | ||
| 1495 | bb.fatal("File '%s' cannot be packaged into '%s' because its " | ||
| 1496 | "parent directory structure does not exist. One of " | ||
| 1497 | "its parent directories is a symlink whose target " | ||
| 1498 | "directory is not included in the package." % | ||
| 1499 | (file, pkg)) | ||
| 1500 | |||
| 1501 | os.umask(oldumask) | ||
| 1502 | os.chdir(workdir) | ||
| 1503 | |||
| 1504 | # Handle excluding packages with incompatible licenses | ||
| 1505 | package_list = [] | ||
| 1506 | for pkg in packages: | ||
| 1507 | licenses = d.getVar('_exclude_incompatible-' + pkg) | ||
| 1508 | if licenses: | ||
| 1509 | msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses) | ||
| 1510 | oe.qa.handle_error("incompatible-license", msg, d) | ||
| 1511 | else: | ||
| 1512 | package_list.append(pkg) | ||
| 1513 | d.setVar('PACKAGES', ' '.join(package_list)) | ||
| 1514 | |||
| 1515 | unshipped = [] | ||
| 1516 | for root, dirs, files in cpath.walk(dvar): | ||
| 1517 | dir = root[len(dvar):] | ||
| 1518 | if not dir: | ||
| 1519 | dir = os.sep | ||
| 1520 | for f in (files + dirs): | ||
| 1521 | path = os.path.join(dir, f) | ||
| 1522 | if ('.' + path) not in seen: | ||
| 1523 | unshipped.append(path) | ||
| 1524 | |||
| 1525 | if unshipped != []: | ||
| 1526 | msg = pn + ": Files/directories were installed but not shipped in any package:" | ||
| 1527 | if "installed-vs-shipped" in (d.getVar('INSANE_SKIP:' + pn) or "").split(): | ||
| 1528 | bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn) | ||
| 1529 | else: | ||
| 1530 | for f in unshipped: | ||
| 1531 | msg = msg + "\n " + f | ||
| 1532 | msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n" | ||
| 1533 | msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped)) | ||
| 1534 | oe.qa.handle_error("installed-vs-shipped", msg, d) | ||
| 1535 | } | ||
| 1536 | populate_packages[dirs] = "${D}" | ||
| 1537 | |||
| 1538 | python package_fixsymlinks () { | ||
| 1539 | import errno | ||
| 1540 | pkgdest = d.getVar('PKGDEST') | ||
| 1541 | packages = d.getVar("PACKAGES", False).split() | ||
| 1542 | |||
| 1543 | dangling_links = {} | ||
| 1544 | pkg_files = {} | ||
| 1545 | for pkg in packages: | ||
| 1546 | dangling_links[pkg] = [] | ||
| 1547 | pkg_files[pkg] = [] | ||
| 1548 | inst_root = os.path.join(pkgdest, pkg) | ||
| 1549 | for path in pkgfiles[pkg]: | ||
| 1550 | rpath = path[len(inst_root):] | ||
| 1551 | pkg_files[pkg].append(rpath) | ||
| 1552 | rtarget = cpath.realpath(path, inst_root, True, assume_dir = True) | ||
| 1553 | if not cpath.lexists(rtarget): | ||
| 1554 | dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):])) | ||
| 1555 | |||
| 1556 | newrdepends = {} | ||
| 1557 | for pkg in dangling_links: | ||
| 1558 | for l in dangling_links[pkg]: | ||
| 1559 | found = False | ||
| 1560 | bb.debug(1, "%s contains dangling link %s" % (pkg, l)) | ||
| 1561 | for p in packages: | ||
| 1562 | if l in pkg_files[p]: | ||
| 1563 | found = True | ||
| 1564 | bb.debug(1, "target found in %s" % p) | ||
| 1565 | if p == pkg: | ||
| 1566 | break | ||
| 1567 | if pkg not in newrdepends: | ||
| 1568 | newrdepends[pkg] = [] | ||
| 1569 | newrdepends[pkg].append(p) | ||
| 1570 | break | ||
| 1571 | if found == False: | ||
| 1572 | bb.note("%s contains dangling symlink to %s" % (pkg, l)) | ||
| 1573 | |||
| 1574 | for pkg in newrdepends: | ||
| 1575 | rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "") | ||
| 1576 | for p in newrdepends[pkg]: | ||
| 1577 | if p not in rdepends: | ||
| 1578 | rdepends[p] = [] | ||
| 1579 | d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False)) | ||
| 1580 | } | ||
| 1581 | |||
| 1582 | |||
| 1583 | python package_package_name_hook() { | ||
| 1584 | """ | ||
| 1585 | A package_name_hook function can be used to rewrite the package names by | ||
| 1586 | changing PKG. For an example, see debian.bbclass. | ||
| 1587 | """ | ||
| 1588 | pass | ||
| 1589 | } | ||
| 1590 | |||
| 1591 | EXPORT_FUNCTIONS package_name_hook | ||
| 1592 | |||
| 1593 | |||
| 1594 | PKGDESTWORK = "${WORKDIR}/pkgdata" | ||
| 1595 | |||
| 1596 | PKGDATA_VARS = "PN PE PV PR PKGE PKGV PKGR LICENSE DESCRIPTION SUMMARY RDEPENDS RPROVIDES RRECOMMENDS RSUGGESTS RREPLACES RCONFLICTS SECTION PKG ALLOW_EMPTY FILES CONFFILES FILES_INFO PACKAGE_ADD_METADATA pkg_postinst pkg_postrm pkg_preinst pkg_prerm" | ||
| 1597 | |||
| 1598 | python emit_pkgdata() { | ||
| 1599 | from glob import glob | ||
| 1600 | import json | ||
| 1601 | import bb.compress.zstd | ||
| 1602 | |||
| 1603 | def process_postinst_on_target(pkg, mlprefix): | ||
| 1604 | pkgval = d.getVar('PKG:%s' % pkg) | ||
| 1605 | if pkgval is None: | ||
| 1606 | pkgval = pkg | ||
| 1607 | |||
| 1608 | defer_fragment = """ | ||
| 1609 | if [ -n "$D" ]; then | ||
| 1610 | $INTERCEPT_DIR/postinst_intercept delay_to_first_boot %s mlprefix=%s | ||
| 1611 | exit 0 | ||
| 1612 | fi | ||
| 1613 | """ % (pkgval, mlprefix) | ||
| 1614 | |||
| 1615 | postinst = d.getVar('pkg_postinst:%s' % pkg) | ||
| 1616 | postinst_ontarget = d.getVar('pkg_postinst_ontarget:%s' % pkg) | ||
| 1617 | |||
| 1618 | if postinst_ontarget: | ||
| 1619 | bb.debug(1, 'adding deferred pkg_postinst_ontarget() to pkg_postinst() for %s' % pkg) | ||
| 1620 | if not postinst: | ||
| 1621 | postinst = '#!/bin/sh\n' | ||
| 1622 | postinst += defer_fragment | ||
| 1623 | postinst += postinst_ontarget | ||
| 1624 | d.setVar('pkg_postinst:%s' % pkg, postinst) | ||
| 1625 | |||
| 1626 | def add_set_e_to_scriptlets(pkg): | ||
| 1627 | for scriptlet_name in ('pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm'): | ||
| 1628 | scriptlet = d.getVar('%s:%s' % (scriptlet_name, pkg)) | ||
| 1629 | if scriptlet: | ||
| 1630 | scriptlet_split = scriptlet.split('\n') | ||
| 1631 | if scriptlet_split[0].startswith("#!"): | ||
| 1632 | scriptlet = scriptlet_split[0] + "\nset -e\n" + "\n".join(scriptlet_split[1:]) | ||
| 1633 | else: | ||
| 1634 | scriptlet = "set -e\n" + "\n".join(scriptlet_split[0:]) | ||
| 1635 | d.setVar('%s:%s' % (scriptlet_name, pkg), scriptlet) | ||
| 1636 | |||
| 1637 | def write_if_exists(f, pkg, var): | ||
| 1638 | def encode(str): | ||
| 1639 | import codecs | ||
| 1640 | c = codecs.getencoder("unicode_escape") | ||
| 1641 | return c(str)[0].decode("latin1") | ||
| 1642 | |||
| 1643 | val = d.getVar('%s:%s' % (var, pkg)) | ||
| 1644 | if val: | ||
| 1645 | f.write('%s:%s: %s\n' % (var, pkg, encode(val))) | ||
| 1646 | return val | ||
| 1647 | val = d.getVar('%s' % (var)) | ||
| 1648 | if val: | ||
| 1649 | f.write('%s: %s\n' % (var, encode(val))) | ||
| 1650 | return val | ||
| 1651 | |||
| 1652 | def write_extra_pkgs(variants, pn, packages, pkgdatadir): | ||
| 1653 | for variant in variants: | ||
| 1654 | with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd: | ||
| 1655 | fd.write("PACKAGES: %s\n" % ' '.join( | ||
| 1656 | map(lambda pkg: '%s-%s' % (variant, pkg), packages.split()))) | ||
| 1657 | |||
| 1658 | def write_extra_runtime_pkgs(variants, packages, pkgdatadir): | ||
| 1659 | for variant in variants: | ||
| 1660 | for pkg in packages.split(): | ||
| 1661 | ml_pkg = "%s-%s" % (variant, pkg) | ||
| 1662 | subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg) | ||
| 1663 | with open(subdata_file, 'w') as fd: | ||
| 1664 | fd.write("PKG:%s: %s" % (ml_pkg, pkg)) | ||
| 1665 | |||
| 1666 | packages = d.getVar('PACKAGES') | ||
| 1667 | pkgdest = d.getVar('PKGDEST') | ||
| 1668 | pkgdatadir = d.getVar('PKGDESTWORK') | ||
| 1669 | |||
| 1670 | data_file = pkgdatadir + d.expand("/${PN}") | ||
| 1671 | with open(data_file, 'w') as fd: | ||
| 1672 | fd.write("PACKAGES: %s\n" % packages) | ||
| 1673 | |||
| 1674 | pkgdebugsource = d.getVar("PKGDEBUGSOURCES") or [] | ||
| 1675 | |||
| 1676 | pn = d.getVar('PN') | ||
| 1677 | global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split() | ||
| 1678 | variants = (d.getVar('MULTILIB_VARIANTS') or "").split() | ||
| 1679 | |||
| 1680 | if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d): | ||
| 1681 | write_extra_pkgs(variants, pn, packages, pkgdatadir) | ||
| 1682 | |||
| 1683 | if bb.data.inherits_class('allarch', d) and not variants \ | ||
| 1684 | and not bb.data.inherits_class('packagegroup', d): | ||
| 1685 | write_extra_pkgs(global_variants, pn, packages, pkgdatadir) | ||
| 1686 | |||
| 1687 | workdir = d.getVar('WORKDIR') | ||
| 1688 | |||
| 1689 | for pkg in packages.split(): | ||
| 1690 | pkgval = d.getVar('PKG:%s' % pkg) | ||
| 1691 | if pkgval is None: | ||
| 1692 | pkgval = pkg | ||
| 1693 | d.setVar('PKG:%s' % pkg, pkg) | ||
| 1694 | |||
| 1695 | extended_data = { | ||
| 1696 | "files_info": {} | ||
| 1697 | } | ||
| 1698 | |||
| 1699 | pkgdestpkg = os.path.join(pkgdest, pkg) | ||
| 1700 | files = {} | ||
| 1701 | files_extra = {} | ||
| 1702 | total_size = 0 | ||
| 1703 | seen = set() | ||
| 1704 | for f in pkgfiles[pkg]: | ||
| 1705 | fpath = os.sep + os.path.relpath(f, pkgdestpkg) | ||
| 1706 | |||
| 1707 | fstat = os.lstat(f) | ||
| 1708 | files[fpath] = fstat.st_size | ||
| 1709 | |||
| 1710 | extended_data["files_info"].setdefault(fpath, {}) | ||
| 1711 | extended_data["files_info"][fpath]['size'] = fstat.st_size | ||
| 1712 | |||
| 1713 | if fstat.st_ino not in seen: | ||
| 1714 | seen.add(fstat.st_ino) | ||
| 1715 | total_size += fstat.st_size | ||
| 1716 | |||
| 1717 | if fpath in pkgdebugsource: | ||
| 1718 | extended_data["files_info"][fpath]['debugsrc'] = pkgdebugsource[fpath] | ||
| 1719 | del pkgdebugsource[fpath] | ||
| 1720 | |||
| 1721 | d.setVar('FILES_INFO:' + pkg , json.dumps(files, sort_keys=True)) | ||
| 1722 | |||
| 1723 | process_postinst_on_target(pkg, d.getVar("MLPREFIX")) | ||
| 1724 | add_set_e_to_scriptlets(pkg) | ||
| 1725 | |||
| 1726 | subdata_file = pkgdatadir + "/runtime/%s" % pkg | ||
| 1727 | with open(subdata_file, 'w') as sf: | ||
| 1728 | for var in (d.getVar('PKGDATA_VARS') or "").split(): | ||
| 1729 | val = write_if_exists(sf, pkg, var) | ||
| 1730 | |||
| 1731 | write_if_exists(sf, pkg, 'FILERPROVIDESFLIST') | ||
| 1732 | for dfile in sorted((d.getVar('FILERPROVIDESFLIST:' + pkg) or "").split()): | ||
| 1733 | write_if_exists(sf, pkg, 'FILERPROVIDES:' + dfile) | ||
| 1734 | |||
| 1735 | write_if_exists(sf, pkg, 'FILERDEPENDSFLIST') | ||
| 1736 | for dfile in sorted((d.getVar('FILERDEPENDSFLIST:' + pkg) or "").split()): | ||
| 1737 | write_if_exists(sf, pkg, 'FILERDEPENDS:' + dfile) | ||
| 1738 | |||
| 1739 | sf.write('%s:%s: %d\n' % ('PKGSIZE', pkg, total_size)) | ||
| 1740 | |||
| 1741 | subdata_extended_file = pkgdatadir + "/extended/%s.json.zstd" % pkg | ||
| 1742 | num_threads = int(d.getVar("BB_NUMBER_THREADS")) | ||
| 1743 | with bb.compress.zstd.open(subdata_extended_file, "wt", encoding="utf-8", num_threads=num_threads) as f: | ||
| 1744 | json.dump(extended_data, f, sort_keys=True, separators=(",", ":")) | ||
| 1745 | |||
| 1746 | # Symlinks needed for rprovides lookup | ||
| 1747 | rprov = d.getVar('RPROVIDES:%s' % pkg) or d.getVar('RPROVIDES') | ||
| 1748 | if rprov: | ||
| 1749 | for p in bb.utils.explode_deps(rprov): | ||
| 1750 | subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg) | ||
| 1751 | bb.utils.mkdirhier(os.path.dirname(subdata_sym)) | ||
| 1752 | oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True) | ||
| 1753 | |||
| 1754 | allow_empty = d.getVar('ALLOW_EMPTY:%s' % pkg) | ||
| 1755 | if not allow_empty: | ||
| 1756 | allow_empty = d.getVar('ALLOW_EMPTY') | ||
| 1757 | root = "%s/%s" % (pkgdest, pkg) | ||
| 1758 | os.chdir(root) | ||
| 1759 | g = glob('*') | ||
| 1760 | if g or allow_empty == "1": | ||
| 1761 | # Symlinks needed for reverse lookups (from the final package name) | ||
| 1762 | subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval | ||
| 1763 | oe.path.symlink("../runtime/%s" % pkg, subdata_sym, True) | ||
| 1764 | |||
| 1765 | packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg | ||
| 1766 | open(packagedfile, 'w').close() | ||
| 1767 | |||
| 1768 | if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d): | ||
| 1769 | write_extra_runtime_pkgs(variants, packages, pkgdatadir) | ||
| 1770 | |||
| 1771 | if bb.data.inherits_class('allarch', d) and not variants \ | ||
| 1772 | and not bb.data.inherits_class('packagegroup', d): | ||
| 1773 | write_extra_runtime_pkgs(global_variants, packages, pkgdatadir) | ||
| 1774 | |||
| 1775 | } | ||
| 1776 | emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime ${PKGDESTWORK}/runtime-reverse ${PKGDESTWORK}/runtime-rprovides ${PKGDESTWORK}/extended" | ||
| 1777 | emit_pkgdata[vardepsexclude] = "BB_NUMBER_THREADS" | ||
| 1778 | |||
| 1779 | ldconfig_postinst_fragment() { | ||
| 1780 | if [ x"$D" = "x" ]; then | ||
| 1781 | if [ -x /sbin/ldconfig ]; then /sbin/ldconfig ; fi | ||
| 1782 | fi | ||
| 1783 | } | ||
| 1784 | |||
| 1785 | RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/rpmdeps --alldeps --define '__font_provides %{nil}'" | ||
| 1786 | |||
| 1787 | # Collect perfile run-time dependency metadata | ||
| 1788 | # Output: | ||
| 1789 | # FILERPROVIDESFLIST:pkg - list of all files w/ deps | ||
| 1790 | # FILERPROVIDES:filepath:pkg - per file dep | ||
| 1791 | # | ||
| 1792 | # FILERDEPENDSFLIST:pkg - list of all files w/ deps | ||
| 1793 | # FILERDEPENDS:filepath:pkg - per file dep | ||
| 1794 | |||
| 1795 | python package_do_filedeps() { | ||
| 1796 | if d.getVar('SKIP_FILEDEPS') == '1': | ||
| 1797 | return | ||
| 1798 | |||
| 1799 | pkgdest = d.getVar('PKGDEST') | ||
| 1800 | packages = d.getVar('PACKAGES') | ||
| 1801 | rpmdeps = d.getVar('RPMDEPS') | ||
| 1802 | |||
| 1803 | def chunks(files, n): | ||
| 1804 | return [files[i:i+n] for i in range(0, len(files), n)] | ||
| 1805 | |||
| 1806 | pkglist = [] | ||
| 1807 | for pkg in packages.split(): | ||
| 1808 | if d.getVar('SKIP_FILEDEPS:' + pkg) == '1': | ||
| 1809 | continue | ||
| 1810 | if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'): | ||
| 1811 | continue | ||
| 1812 | for files in chunks(pkgfiles[pkg], 100): | ||
| 1813 | pkglist.append((pkg, files, rpmdeps, pkgdest)) | ||
| 1814 | |||
| 1815 | processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d) | ||
| 1816 | |||
| 1817 | provides_files = {} | ||
| 1818 | requires_files = {} | ||
| 1819 | |||
| 1820 | for result in processed: | ||
| 1821 | (pkg, provides, requires) = result | ||
| 1822 | |||
| 1823 | if pkg not in provides_files: | ||
| 1824 | provides_files[pkg] = [] | ||
| 1825 | if pkg not in requires_files: | ||
| 1826 | requires_files[pkg] = [] | ||
| 1827 | |||
| 1828 | for file in sorted(provides): | ||
| 1829 | provides_files[pkg].append(file) | ||
| 1830 | key = "FILERPROVIDES:" + file + ":" + pkg | ||
| 1831 | d.appendVar(key, " " + " ".join(provides[file])) | ||
| 1832 | |||
| 1833 | for file in sorted(requires): | ||
| 1834 | requires_files[pkg].append(file) | ||
| 1835 | key = "FILERDEPENDS:" + file + ":" + pkg | ||
| 1836 | d.appendVar(key, " " + " ".join(requires[file])) | ||
| 1837 | |||
| 1838 | for pkg in requires_files: | ||
| 1839 | d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg]))) | ||
| 1840 | for pkg in provides_files: | ||
| 1841 | d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg]))) | ||
| 1842 | } | ||
| 1843 | |||
| 1844 | SHLIBSDIRS = "${WORKDIR_PKGDATA}/${MLPREFIX}shlibs2" | ||
| 1845 | SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs2" | ||
| 1846 | |||
| 1847 | python package_do_shlibs() { | ||
| 1848 | import itertools | ||
| 1849 | import re, pipes | ||
| 1850 | import subprocess | ||
| 1851 | |||
| 1852 | exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False) | ||
| 1853 | if exclude_shlibs: | ||
| 1854 | bb.note("not generating shlibs") | ||
| 1855 | return | ||
| 1856 | |||
| 1857 | lib_re = re.compile(r"^.*\.so") | ||
| 1858 | libdir_re = re.compile(r".*/%s$" % d.getVar('baselib')) | ||
| 1859 | |||
| 1860 | packages = d.getVar('PACKAGES') | ||
| 1861 | |||
| 1862 | shlib_pkgs = [] | ||
| 1863 | exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS") | ||
| 1864 | if exclusion_list: | ||
| 1865 | for pkg in packages.split(): | ||
| 1866 | if pkg not in exclusion_list.split(): | ||
| 1867 | shlib_pkgs.append(pkg) | ||
| 1868 | else: | ||
| 1869 | bb.note("not generating shlibs for %s" % pkg) | ||
| 1870 | else: | ||
| 1871 | shlib_pkgs = packages.split() | ||
| 1872 | |||
| 1873 | hostos = d.getVar('HOST_OS') | ||
| 1874 | |||
| 1875 | workdir = d.getVar('WORKDIR') | ||
| 1876 | |||
| 1877 | ver = d.getVar('PKGV') | ||
| 1878 | if not ver: | ||
| 1879 | msg = "PKGV not defined" | ||
| 1880 | oe.qa.handle_error("pkgv-undefined", msg, d) | ||
| 1881 | return | ||
| 1882 | |||
| 1883 | pkgdest = d.getVar('PKGDEST') | ||
| 1884 | |||
| 1885 | shlibswork_dir = d.getVar('SHLIBSWORKDIR') | ||
| 1886 | |||
| 1887 | def linux_so(file, pkg, pkgver, d): | ||
| 1888 | needs_ldconfig = False | ||
| 1889 | needed = set() | ||
| 1890 | sonames = set() | ||
| 1891 | renames = [] | ||
| 1892 | ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') | ||
| 1893 | cmd = d.getVar('OBJDUMP') + " -p " + pipes.quote(file) + " 2>/dev/null" | ||
| 1894 | fd = os.popen(cmd) | ||
| 1895 | lines = fd.readlines() | ||
| 1896 | fd.close() | ||
| 1897 | rpath = tuple() | ||
| 1898 | for l in lines: | ||
| 1899 | m = re.match(r"\s+RPATH\s+([^\s]*)", l) | ||
| 1900 | if m: | ||
| 1901 | rpaths = m.group(1).replace("$ORIGIN", ldir).split(":") | ||
| 1902 | rpath = tuple(map(os.path.normpath, rpaths)) | ||
| 1903 | for l in lines: | ||
| 1904 | m = re.match(r"\s+NEEDED\s+([^\s]*)", l) | ||
| 1905 | if m: | ||
| 1906 | dep = m.group(1) | ||
| 1907 | if dep not in needed: | ||
| 1908 | needed.add((dep, file, rpath)) | ||
| 1909 | m = re.match(r"\s+SONAME\s+([^\s]*)", l) | ||
| 1910 | if m: | ||
| 1911 | this_soname = m.group(1) | ||
| 1912 | prov = (this_soname, ldir, pkgver) | ||
| 1913 | if not prov in sonames: | ||
| 1914 | # if library is private (only used by package) then do not build shlib for it | ||
| 1915 | import fnmatch | ||
| 1916 | if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0: | ||
| 1917 | sonames.add(prov) | ||
| 1918 | if libdir_re.match(os.path.dirname(file)): | ||
| 1919 | needs_ldconfig = True | ||
| 1920 | if needs_ldconfig and snap_symlinks and (os.path.basename(file) != this_soname): | ||
| 1921 | renames.append((file, os.path.join(os.path.dirname(file), this_soname))) | ||
| 1922 | return (needs_ldconfig, needed, sonames, renames) | ||
| 1923 | |||
| 1924 | def darwin_so(file, needed, sonames, renames, pkgver): | ||
| 1925 | if not os.path.exists(file): | ||
| 1926 | return | ||
| 1927 | ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') | ||
| 1928 | |||
| 1929 | def get_combinations(base): | ||
| 1930 | # | ||
| 1931 | # Given a base library name, find all combinations of this split by "." and "-" | ||
| 1932 | # | ||
| 1933 | combos = [] | ||
| 1934 | options = base.split(".") | ||
| 1935 | for i in range(1, len(options) + 1): | ||
| 1936 | combos.append(".".join(options[0:i])) | ||
| 1937 | options = base.split("-") | ||
| 1938 | for i in range(1, len(options) + 1): | ||
| 1939 | combos.append("-".join(options[0:i])) | ||
| 1940 | return combos | ||
| 1941 | |||
| 1942 | if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'): | ||
| 1943 | # Drop suffix | ||
| 1944 | name = os.path.basename(file).rsplit(".",1)[0] | ||
| 1945 | # Find all combinations | ||
| 1946 | combos = get_combinations(name) | ||
| 1947 | for combo in combos: | ||
| 1948 | if not combo in sonames: | ||
| 1949 | prov = (combo, ldir, pkgver) | ||
| 1950 | sonames.add(prov) | ||
| 1951 | if file.endswith('.dylib') or file.endswith('.so'): | ||
| 1952 | rpath = [] | ||
| 1953 | p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) | ||
| 1954 | out, err = p.communicate() | ||
| 1955 | # If returned successfully, process stdout for results | ||
| 1956 | if p.returncode == 0: | ||
| 1957 | for l in out.split("\n"): | ||
| 1958 | l = l.strip() | ||
| 1959 | if l.startswith('path '): | ||
| 1960 | rpath.append(l.split()[1]) | ||
| 1961 | |||
| 1962 | p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) | ||
| 1963 | out, err = p.communicate() | ||
| 1964 | # If returned successfully, process stdout for results | ||
| 1965 | if p.returncode == 0: | ||
| 1966 | for l in out.split("\n"): | ||
| 1967 | l = l.strip() | ||
| 1968 | if not l or l.endswith(":"): | ||
| 1969 | continue | ||
| 1970 | if "is not an object file" in l: | ||
| 1971 | continue | ||
| 1972 | name = os.path.basename(l.split()[0]).rsplit(".", 1)[0] | ||
| 1973 | if name and name not in needed[pkg]: | ||
| 1974 | needed[pkg].add((name, file, tuple())) | ||
| 1975 | |||
| 1976 | def mingw_dll(file, needed, sonames, renames, pkgver): | ||
| 1977 | if not os.path.exists(file): | ||
| 1978 | return | ||
| 1979 | |||
| 1980 | if file.endswith(".dll"): | ||
| 1981 | # assume all dlls are shared objects provided by the package | ||
| 1982 | sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver)) | ||
| 1983 | |||
| 1984 | if (file.endswith(".dll") or file.endswith(".exe")): | ||
| 1985 | # use objdump to search for "DLL Name: .*\.dll" | ||
| 1986 | p = subprocess.Popen([d.expand("${HOST_PREFIX}objdump"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) | ||
| 1987 | out, err = p.communicate() | ||
| 1988 | # process the output, grabbing all .dll names | ||
| 1989 | if p.returncode == 0: | ||
| 1990 | for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE): | ||
| 1991 | dllname = m.group(1) | ||
| 1992 | if dllname: | ||
| 1993 | needed[pkg].add((dllname, file, tuple())) | ||
| 1994 | |||
| 1995 | if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1": | ||
| 1996 | snap_symlinks = True | ||
| 1997 | else: | ||
| 1998 | snap_symlinks = False | ||
| 1999 | |||
| 2000 | needed = {} | ||
| 2001 | |||
| 2002 | shlib_provider = oe.package.read_shlib_providers(d) | ||
| 2003 | |||
| 2004 | for pkg in shlib_pkgs: | ||
| 2005 | private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or "" | ||
| 2006 | private_libs = private_libs.split() | ||
| 2007 | needs_ldconfig = False | ||
| 2008 | bb.debug(2, "calculating shlib provides for %s" % pkg) | ||
| 2009 | |||
| 2010 | pkgver = d.getVar('PKGV:' + pkg) | ||
| 2011 | if not pkgver: | ||
| 2012 | pkgver = d.getVar('PV_' + pkg) | ||
| 2013 | if not pkgver: | ||
| 2014 | pkgver = ver | ||
| 2015 | |||
| 2016 | needed[pkg] = set() | ||
| 2017 | sonames = set() | ||
| 2018 | renames = [] | ||
| 2019 | linuxlist = [] | ||
| 2020 | for file in pkgfiles[pkg]: | ||
| 2021 | soname = None | ||
| 2022 | if cpath.islink(file): | ||
| 2023 | continue | ||
| 2024 | if hostos == "darwin" or hostos == "darwin8": | ||
| 2025 | darwin_so(file, needed, sonames, renames, pkgver) | ||
| 2026 | elif hostos.startswith("mingw"): | ||
| 2027 | mingw_dll(file, needed, sonames, renames, pkgver) | ||
| 2028 | elif os.access(file, os.X_OK) or lib_re.match(file): | ||
| 2029 | linuxlist.append(file) | ||
| 2030 | |||
| 2031 | if linuxlist: | ||
| 2032 | results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d)) | ||
| 2033 | for r in results: | ||
| 2034 | ldconfig = r[0] | ||
| 2035 | needed[pkg] |= r[1] | ||
| 2036 | sonames |= r[2] | ||
| 2037 | renames.extend(r[3]) | ||
| 2038 | needs_ldconfig = needs_ldconfig or ldconfig | ||
| 2039 | |||
| 2040 | for (old, new) in renames: | ||
| 2041 | bb.note("Renaming %s to %s" % (old, new)) | ||
| 2042 | bb.utils.rename(old, new) | ||
| 2043 | pkgfiles[pkg].remove(old) | ||
| 2044 | |||
| 2045 | shlibs_file = os.path.join(shlibswork_dir, pkg + ".list") | ||
| 2046 | if len(sonames): | ||
| 2047 | with open(shlibs_file, 'w') as fd: | ||
| 2048 | for s in sorted(sonames): | ||
| 2049 | if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]: | ||
| 2050 | (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]] | ||
| 2051 | if old_pkg != pkg: | ||
| 2052 | bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver)) | ||
| 2053 | bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0])) | ||
| 2054 | fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n') | ||
| 2055 | if s[0] not in shlib_provider: | ||
| 2056 | shlib_provider[s[0]] = {} | ||
| 2057 | shlib_provider[s[0]][s[1]] = (pkg, pkgver) | ||
| 2058 | if needs_ldconfig: | ||
| 2059 | bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg) | ||
| 2060 | postinst = d.getVar('pkg_postinst:%s' % pkg) | ||
| 2061 | if not postinst: | ||
| 2062 | postinst = '#!/bin/sh\n' | ||
| 2063 | postinst += d.getVar('ldconfig_postinst_fragment') | ||
| 2064 | d.setVar('pkg_postinst:%s' % pkg, postinst) | ||
| 2065 | bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames)) | ||
| 2066 | |||
| 2067 | assumed_libs = d.getVar('ASSUME_SHLIBS') | ||
| 2068 | if assumed_libs: | ||
| 2069 | libdir = d.getVar("libdir") | ||
| 2070 | for e in assumed_libs.split(): | ||
| 2071 | l, dep_pkg = e.split(":") | ||
| 2072 | lib_ver = None | ||
| 2073 | dep_pkg = dep_pkg.rsplit("_", 1) | ||
| 2074 | if len(dep_pkg) == 2: | ||
| 2075 | lib_ver = dep_pkg[1] | ||
| 2076 | dep_pkg = dep_pkg[0] | ||
| 2077 | if l not in shlib_provider: | ||
| 2078 | shlib_provider[l] = {} | ||
| 2079 | shlib_provider[l][libdir] = (dep_pkg, lib_ver) | ||
| 2080 | |||
| 2081 | libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')] | ||
| 2082 | |||
| 2083 | for pkg in shlib_pkgs: | ||
| 2084 | bb.debug(2, "calculating shlib requirements for %s" % pkg) | ||
| 2085 | |||
| 2086 | private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or "" | ||
| 2087 | private_libs = private_libs.split() | ||
| 2088 | |||
| 2089 | deps = list() | ||
| 2090 | for n in needed[pkg]: | ||
| 2091 | # if n is in private libraries, don't try to search provider for it | ||
| 2092 | # this could cause problem in case some abc.bb provides private | ||
| 2093 | # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1 | ||
| 2094 | # but skipping it is still better alternative than providing own | ||
| 2095 | # version and then adding runtime dependency for the same system library | ||
| 2096 | import fnmatch | ||
| 2097 | if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0: | ||
| 2098 | bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0])) | ||
| 2099 | continue | ||
| 2100 | if n[0] in shlib_provider.keys(): | ||
| 2101 | shlib_provider_map = shlib_provider[n[0]] | ||
| 2102 | matches = set() | ||
| 2103 | for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath): | ||
| 2104 | if p in shlib_provider_map: | ||
| 2105 | matches.add(p) | ||
| 2106 | if len(matches) > 1: | ||
| 2107 | matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches]) | ||
| 2108 | bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1])) | ||
| 2109 | elif len(matches) == 1: | ||
| 2110 | (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()] | ||
| 2111 | |||
| 2112 | bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1])) | ||
| 2113 | |||
| 2114 | if dep_pkg == pkg: | ||
| 2115 | continue | ||
| 2116 | |||
| 2117 | if ver_needed: | ||
| 2118 | dep = "%s (>= %s)" % (dep_pkg, ver_needed) | ||
| 2119 | else: | ||
| 2120 | dep = dep_pkg | ||
| 2121 | if not dep in deps: | ||
| 2122 | deps.append(dep) | ||
| 2123 | continue | ||
| 2124 | bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1])) | ||
| 2125 | |||
| 2126 | deps_file = os.path.join(pkgdest, pkg + ".shlibdeps") | ||
| 2127 | if os.path.exists(deps_file): | ||
| 2128 | os.remove(deps_file) | ||
| 2129 | if deps: | ||
| 2130 | with open(deps_file, 'w') as fd: | ||
| 2131 | for dep in sorted(deps): | ||
| 2132 | fd.write(dep + '\n') | ||
| 2133 | } | ||
| 2134 | |||
| 2135 | python package_do_pkgconfig () { | ||
| 2136 | import re | ||
| 2137 | |||
| 2138 | packages = d.getVar('PACKAGES') | ||
| 2139 | workdir = d.getVar('WORKDIR') | ||
| 2140 | pkgdest = d.getVar('PKGDEST') | ||
| 2141 | |||
| 2142 | shlibs_dirs = d.getVar('SHLIBSDIRS').split() | ||
| 2143 | shlibswork_dir = d.getVar('SHLIBSWORKDIR') | ||
| 2144 | |||
| 2145 | pc_re = re.compile(r'(.*)\.pc$') | ||
| 2146 | var_re = re.compile(r'(.*)=(.*)') | ||
| 2147 | field_re = re.compile(r'(.*): (.*)') | ||
| 2148 | |||
| 2149 | pkgconfig_provided = {} | ||
| 2150 | pkgconfig_needed = {} | ||
| 2151 | for pkg in packages.split(): | ||
| 2152 | pkgconfig_provided[pkg] = [] | ||
| 2153 | pkgconfig_needed[pkg] = [] | ||
| 2154 | for file in sorted(pkgfiles[pkg]): | ||
| 2155 | m = pc_re.match(file) | ||
| 2156 | if m: | ||
| 2157 | pd = bb.data.init() | ||
| 2158 | name = m.group(1) | ||
| 2159 | pkgconfig_provided[pkg].append(os.path.basename(name)) | ||
| 2160 | if not os.access(file, os.R_OK): | ||
| 2161 | continue | ||
| 2162 | with open(file, 'r') as f: | ||
| 2163 | lines = f.readlines() | ||
| 2164 | for l in lines: | ||
| 2165 | m = var_re.match(l) | ||
| 2166 | if m: | ||
| 2167 | name = m.group(1) | ||
| 2168 | val = m.group(2) | ||
| 2169 | pd.setVar(name, pd.expand(val)) | ||
| 2170 | continue | ||
| 2171 | m = field_re.match(l) | ||
| 2172 | if m: | ||
| 2173 | hdr = m.group(1) | ||
| 2174 | exp = pd.expand(m.group(2)) | ||
| 2175 | if hdr == 'Requires': | ||
| 2176 | pkgconfig_needed[pkg] += exp.replace(',', ' ').split() | ||
| 2177 | |||
| 2178 | for pkg in packages.split(): | ||
| 2179 | pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist") | ||
| 2180 | if pkgconfig_provided[pkg] != []: | ||
| 2181 | with open(pkgs_file, 'w') as f: | ||
| 2182 | for p in sorted(pkgconfig_provided[pkg]): | ||
| 2183 | f.write('%s\n' % p) | ||
| 2184 | |||
| 2185 | # Go from least to most specific since the last one found wins | ||
| 2186 | for dir in reversed(shlibs_dirs): | ||
| 2187 | if not os.path.exists(dir): | ||
| 2188 | continue | ||
| 2189 | for file in sorted(os.listdir(dir)): | ||
| 2190 | m = re.match(r'^(.*)\.pclist$', file) | ||
| 2191 | if m: | ||
| 2192 | pkg = m.group(1) | ||
| 2193 | with open(os.path.join(dir, file)) as fd: | ||
| 2194 | lines = fd.readlines() | ||
| 2195 | pkgconfig_provided[pkg] = [] | ||
| 2196 | for l in lines: | ||
| 2197 | pkgconfig_provided[pkg].append(l.rstrip()) | ||
| 2198 | |||
| 2199 | for pkg in packages.split(): | ||
| 2200 | deps = [] | ||
| 2201 | for n in pkgconfig_needed[pkg]: | ||
| 2202 | found = False | ||
| 2203 | for k in pkgconfig_provided.keys(): | ||
| 2204 | if n in pkgconfig_provided[k]: | ||
| 2205 | if k != pkg and not (k in deps): | ||
| 2206 | deps.append(k) | ||
| 2207 | found = True | ||
| 2208 | if found == False: | ||
| 2209 | bb.note("couldn't find pkgconfig module '%s' in any package" % n) | ||
| 2210 | deps_file = os.path.join(pkgdest, pkg + ".pcdeps") | ||
| 2211 | if len(deps): | ||
| 2212 | with open(deps_file, 'w') as fd: | ||
| 2213 | for dep in deps: | ||
| 2214 | fd.write(dep + '\n') | ||
| 2215 | } | ||
| 2216 | |||
| 2217 | def read_libdep_files(d): | ||
| 2218 | pkglibdeps = {} | ||
| 2219 | packages = d.getVar('PACKAGES').split() | ||
| 2220 | for pkg in packages: | ||
| 2221 | pkglibdeps[pkg] = {} | ||
| 2222 | for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": | ||
| 2223 | depsfile = d.expand("${PKGDEST}/" + pkg + extension) | ||
| 2224 | if os.access(depsfile, os.R_OK): | ||
| 2225 | with open(depsfile) as fd: | ||
| 2226 | lines = fd.readlines() | ||
| 2227 | for l in lines: | ||
| 2228 | l.rstrip() | ||
| 2229 | deps = bb.utils.explode_dep_versions2(l) | ||
| 2230 | for dep in deps: | ||
| 2231 | if not dep in pkglibdeps[pkg]: | ||
| 2232 | pkglibdeps[pkg][dep] = deps[dep] | ||
| 2233 | return pkglibdeps | ||
| 2234 | |||
| 2235 | python read_shlibdeps () { | ||
| 2236 | pkglibdeps = read_libdep_files(d) | ||
| 2237 | |||
| 2238 | packages = d.getVar('PACKAGES').split() | ||
| 2239 | for pkg in packages: | ||
| 2240 | rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "") | ||
| 2241 | for dep in sorted(pkglibdeps[pkg]): | ||
| 2242 | # Add the dep if it's not already there, or if no comparison is set | ||
| 2243 | if dep not in rdepends: | ||
| 2244 | rdepends[dep] = [] | ||
| 2245 | for v in pkglibdeps[pkg][dep]: | ||
| 2246 | if v not in rdepends[dep]: | ||
| 2247 | rdepends[dep].append(v) | ||
| 2248 | d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False)) | ||
| 2249 | } | ||
| 2250 | |||
| 2251 | python package_depchains() { | ||
| 2252 | """ | ||
| 2253 | For a given set of prefix and postfix modifiers, make those packages | ||
| 2254 | RRECOMMENDS on the corresponding packages for its RDEPENDS. | ||
| 2255 | |||
| 2256 | Example: If package A depends upon package B, and A's .bb emits an | ||
| 2257 | A-dev package, this would make A-dev Recommends: B-dev. | ||
| 2258 | |||
| 2259 | If only one of a given suffix is specified, it will take the RRECOMMENDS | ||
| 2260 | based on the RDEPENDS of *all* other packages. If more than one of a given | ||
| 2261 | suffix is specified, its will only use the RDEPENDS of the single parent | ||
| 2262 | package. | ||
| 2263 | """ | ||
| 2264 | |||
| 2265 | packages = d.getVar('PACKAGES') | ||
| 2266 | postfixes = (d.getVar('DEPCHAIN_POST') or '').split() | ||
| 2267 | prefixes = (d.getVar('DEPCHAIN_PRE') or '').split() | ||
| 2268 | |||
| 2269 | def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d): | ||
| 2270 | |||
| 2271 | #bb.note('depends for %s is %s' % (base, depends)) | ||
| 2272 | rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "") | ||
| 2273 | |||
| 2274 | for depend in sorted(depends): | ||
| 2275 | if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'): | ||
| 2276 | #bb.note("Skipping %s" % depend) | ||
| 2277 | continue | ||
| 2278 | if depend.endswith('-dev'): | ||
| 2279 | depend = depend[:-4] | ||
| 2280 | if depend.endswith('-dbg'): | ||
| 2281 | depend = depend[:-4] | ||
| 2282 | pkgname = getname(depend, suffix) | ||
| 2283 | #bb.note("Adding %s for %s" % (pkgname, depend)) | ||
| 2284 | if pkgname not in rreclist and pkgname != pkg: | ||
| 2285 | rreclist[pkgname] = [] | ||
| 2286 | |||
| 2287 | #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist))) | ||
| 2288 | d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) | ||
| 2289 | |||
| 2290 | def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): | ||
| 2291 | |||
| 2292 | #bb.note('rdepends for %s is %s' % (base, rdepends)) | ||
| 2293 | rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "") | ||
| 2294 | |||
| 2295 | for depend in sorted(rdepends): | ||
| 2296 | if depend.find('virtual-locale-') != -1: | ||
| 2297 | #bb.note("Skipping %s" % depend) | ||
| 2298 | continue | ||
| 2299 | if depend.endswith('-dev'): | ||
| 2300 | depend = depend[:-4] | ||
| 2301 | if depend.endswith('-dbg'): | ||
| 2302 | depend = depend[:-4] | ||
| 2303 | pkgname = getname(depend, suffix) | ||
| 2304 | #bb.note("Adding %s for %s" % (pkgname, depend)) | ||
| 2305 | if pkgname not in rreclist and pkgname != pkg: | ||
| 2306 | rreclist[pkgname] = [] | ||
| 2307 | |||
| 2308 | #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist))) | ||
| 2309 | d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) | ||
| 2310 | |||
| 2311 | def add_dep(list, dep): | ||
| 2312 | if dep not in list: | ||
| 2313 | list.append(dep) | ||
| 2314 | |||
| 2315 | depends = [] | ||
| 2316 | for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""): | ||
| 2317 | add_dep(depends, dep) | ||
| 2318 | |||
| 2319 | rdepends = [] | ||
| 2320 | for pkg in packages.split(): | ||
| 2321 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + pkg) or ""): | ||
| 2322 | add_dep(rdepends, dep) | ||
| 2323 | |||
| 2324 | #bb.note('rdepends is %s' % rdepends) | ||
| 2325 | |||
| 2326 | def post_getname(name, suffix): | ||
| 2327 | return '%s%s' % (name, suffix) | ||
| 2328 | def pre_getname(name, suffix): | ||
| 2329 | return '%s%s' % (suffix, name) | ||
| 2330 | |||
| 2331 | pkgs = {} | ||
| 2332 | for pkg in packages.split(): | ||
| 2333 | for postfix in postfixes: | ||
| 2334 | if pkg.endswith(postfix): | ||
| 2335 | if not postfix in pkgs: | ||
| 2336 | pkgs[postfix] = {} | ||
| 2337 | pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname) | ||
| 2338 | |||
| 2339 | for prefix in prefixes: | ||
| 2340 | if pkg.startswith(prefix): | ||
| 2341 | if not prefix in pkgs: | ||
| 2342 | pkgs[prefix] = {} | ||
| 2343 | pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname) | ||
| 2344 | |||
| 2345 | if "-dbg" in pkgs: | ||
| 2346 | pkglibdeps = read_libdep_files(d) | ||
| 2347 | pkglibdeplist = [] | ||
| 2348 | for pkg in pkglibdeps: | ||
| 2349 | for k in pkglibdeps[pkg]: | ||
| 2350 | add_dep(pkglibdeplist, k) | ||
| 2351 | dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d))) | ||
| 2352 | |||
| 2353 | for suffix in pkgs: | ||
| 2354 | for pkg in pkgs[suffix]: | ||
| 2355 | if d.getVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs'): | ||
| 2356 | continue | ||
| 2357 | (base, func) = pkgs[suffix][pkg] | ||
| 2358 | if suffix == "-dev": | ||
| 2359 | pkg_adddeprrecs(pkg, base, suffix, func, depends, d) | ||
| 2360 | elif suffix == "-dbg": | ||
| 2361 | if not dbgdefaultdeps: | ||
| 2362 | pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d) | ||
| 2363 | continue | ||
| 2364 | if len(pkgs[suffix]) == 1: | ||
| 2365 | pkg_addrrecs(pkg, base, suffix, func, rdepends, d) | ||
| 2366 | else: | ||
| 2367 | rdeps = [] | ||
| 2368 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + base) or ""): | ||
| 2369 | add_dep(rdeps, dep) | ||
| 2370 | pkg_addrrecs(pkg, base, suffix, func, rdeps, d) | ||
| 2371 | } | ||
| 2372 | |||
| 2373 | # Since bitbake can't determine which variables are accessed during package | ||
| 2374 | # iteration, we need to list them here: | ||
| 2375 | PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm pkg_postinst_ontarget INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR USERADD_PARAM GROUPADD_PARAM CONFFILES SYSTEMD_SERVICE LICENSE SECTION pkg_preinst pkg_prerm RREPLACES GROUPMEMS_PARAM SYSTEMD_AUTO_ENABLE SKIP_FILEDEPS PRIVATE_LIBS PACKAGE_ADD_METADATA" | ||
| 2376 | |||
| 2377 | def gen_packagevar(d, pkgvars="PACKAGEVARS"): | ||
| 2378 | ret = [] | ||
| 2379 | pkgs = (d.getVar("PACKAGES") or "").split() | ||
| 2380 | vars = (d.getVar(pkgvars) or "").split() | ||
| 2381 | for v in vars: | ||
| 2382 | ret.append(v) | ||
| 2383 | for p in pkgs: | ||
| 2384 | for v in vars: | ||
| 2385 | ret.append(v + ":" + p) | ||
| 2386 | |||
| 2387 | # Ensure that changes to INCOMPATIBLE_LICENSE re-run do_package for | ||
| 2388 | # affected recipes. | ||
| 2389 | ret.append('_exclude_incompatible-%s' % p) | ||
| 2390 | return " ".join(ret) | ||
| 2391 | |||
| 2392 | PACKAGE_PREPROCESS_FUNCS ?= "" | ||
| 2393 | # Functions for setting up PKGD | ||
| 2394 | PACKAGEBUILDPKGD ?= " \ | ||
| 2395 | package_prepare_pkgdata \ | ||
| 2396 | perform_packagecopy \ | ||
| 2397 | ${PACKAGE_PREPROCESS_FUNCS} \ | ||
| 2398 | split_and_strip_files \ | ||
| 2399 | fixup_perms \ | ||
| 2400 | " | ||
| 2401 | # Functions which split PKGD up into separate packages | ||
| 2402 | PACKAGESPLITFUNCS ?= " \ | ||
| 2403 | package_do_split_locales \ | ||
| 2404 | populate_packages" | ||
| 2405 | # Functions which process metadata based on split packages | ||
| 2406 | PACKAGEFUNCS += " \ | ||
| 2407 | package_fixsymlinks \ | ||
| 2408 | package_name_hook \ | ||
| 2409 | package_do_filedeps \ | ||
| 2410 | package_do_shlibs \ | ||
| 2411 | package_do_pkgconfig \ | ||
| 2412 | read_shlibdeps \ | ||
| 2413 | package_depchains \ | ||
| 2414 | emit_pkgdata" | ||
| 2415 | |||
| 2416 | python do_package () { | ||
| 2417 | # Change the following version to cause sstate to invalidate the package | ||
| 2418 | # cache. This is useful if an item this class depends on changes in a | ||
| 2419 | # way that the output of this class changes. rpmdeps is a good example | ||
| 2420 | # as any change to rpmdeps requires this to be rerun. | ||
| 2421 | # PACKAGE_BBCLASS_VERSION = "4" | ||
| 2422 | |||
| 2423 | # Init cachedpath | ||
| 2424 | global cpath | ||
| 2425 | cpath = oe.cachedpath.CachedPath() | ||
| 2426 | |||
| 2427 | ########################################################################### | ||
| 2428 | # Sanity test the setup | ||
| 2429 | ########################################################################### | ||
| 2430 | |||
| 2431 | packages = (d.getVar('PACKAGES') or "").split() | ||
| 2432 | if len(packages) < 1: | ||
| 2433 | bb.debug(1, "No packages to build, skipping do_package") | ||
| 2434 | return | ||
| 2435 | |||
| 2436 | workdir = d.getVar('WORKDIR') | ||
| 2437 | outdir = d.getVar('DEPLOY_DIR') | ||
| 2438 | dest = d.getVar('D') | ||
| 2439 | dvar = d.getVar('PKGD') | ||
| 2440 | pn = d.getVar('PN') | ||
| 2441 | |||
| 2442 | if not workdir or not outdir or not dest or not dvar or not pn: | ||
| 2443 | msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package" | ||
| 2444 | oe.qa.handle_error("var-undefined", msg, d) | ||
| 2445 | return | ||
| 2446 | |||
| 2447 | bb.build.exec_func("package_convert_pr_autoinc", d) | ||
| 2448 | |||
| 2449 | ########################################################################### | ||
| 2450 | # Optimisations | ||
| 2451 | ########################################################################### | ||
| 2452 | |||
| 2453 | # Continually expanding complex expressions is inefficient, particularly | ||
| 2454 | # when we write to the datastore and invalidate the expansion cache. This | ||
| 2455 | # code pre-expands some frequently used variables | ||
| 2456 | |||
| 2457 | def expandVar(x, d): | ||
| 2458 | d.setVar(x, d.getVar(x)) | ||
| 2459 | |||
| 2460 | for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO': | ||
| 2461 | expandVar(x, d) | ||
| 2462 | |||
| 2463 | ########################################################################### | ||
| 2464 | # Setup PKGD (from D) | ||
| 2465 | ########################################################################### | ||
| 2466 | |||
| 2467 | for f in (d.getVar('PACKAGEBUILDPKGD') or '').split(): | ||
| 2468 | bb.build.exec_func(f, d) | ||
| 2469 | |||
| 2470 | ########################################################################### | ||
| 2471 | # Split up PKGD into PKGDEST | ||
| 2472 | ########################################################################### | ||
| 2473 | |||
| 2474 | cpath = oe.cachedpath.CachedPath() | ||
| 2475 | |||
| 2476 | for f in (d.getVar('PACKAGESPLITFUNCS') or '').split(): | ||
| 2477 | bb.build.exec_func(f, d) | ||
| 2478 | |||
| 2479 | ########################################################################### | ||
| 2480 | # Process PKGDEST | ||
| 2481 | ########################################################################### | ||
| 2482 | |||
| 2483 | # Build global list of files in each split package | ||
| 2484 | global pkgfiles | ||
| 2485 | pkgfiles = {} | ||
| 2486 | packages = d.getVar('PACKAGES').split() | ||
| 2487 | pkgdest = d.getVar('PKGDEST') | ||
| 2488 | for pkg in packages: | ||
| 2489 | pkgfiles[pkg] = [] | ||
| 2490 | for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg): | ||
| 2491 | for file in files: | ||
| 2492 | pkgfiles[pkg].append(walkroot + os.sep + file) | ||
| 2493 | |||
| 2494 | for f in (d.getVar('PACKAGEFUNCS') or '').split(): | ||
| 2495 | bb.build.exec_func(f, d) | ||
| 2496 | |||
| 2497 | oe.qa.exit_if_errors(d) | ||
| 2498 | } | ||
| 2499 | |||
| 2500 | do_package[dirs] = "${SHLIBSWORKDIR} ${D}" | ||
| 2501 | do_package[vardeps] += "${PACKAGEBUILDPKGD} ${PACKAGESPLITFUNCS} ${PACKAGEFUNCS} ${@gen_packagevar(d)}" | ||
| 2502 | addtask package after do_install | ||
| 2503 | |||
| 2504 | SSTATETASKS += "do_package" | ||
| 2505 | do_package[cleandirs] = "${PKGDEST} ${PKGDESTWORK}" | ||
| 2506 | do_package[sstate-plaindirs] = "${PKGD} ${PKGDEST} ${PKGDESTWORK}" | ||
| 2507 | do_package_setscene[dirs] = "${STAGING_DIR}" | ||
| 2508 | |||
| 2509 | python do_package_setscene () { | ||
| 2510 | sstate_setscene(d) | ||
| 2511 | } | ||
| 2512 | addtask do_package_setscene | ||
| 2513 | |||
| 2514 | # Copy from PKGDESTWORK to tempdirectory as tempdirectory can be cleaned at both | ||
| 2515 | # do_package_setscene and do_packagedata_setscene leading to races | ||
| 2516 | python do_packagedata () { | ||
| 2517 | bb.build.exec_func("package_get_auto_pr", d) | ||
| 2518 | |||
| 2519 | src = d.expand("${PKGDESTWORK}") | ||
| 2520 | dest = d.expand("${WORKDIR}/pkgdata-pdata-input") | ||
| 2521 | oe.path.copyhardlinktree(src, dest) | ||
| 2522 | |||
| 2523 | bb.build.exec_func("packagedata_translate_pr_autoinc", d) | ||
| 2524 | } | ||
| 2525 | do_packagedata[cleandirs] += "${WORKDIR}/pkgdata-pdata-input" | ||
| 2526 | |||
| 2527 | # Translate the EXTENDPRAUTO and AUTOINC to the final values | ||
| 2528 | packagedata_translate_pr_autoinc() { | ||
| 2529 | find ${WORKDIR}/pkgdata-pdata-input -type f | xargs --no-run-if-empty \ | ||
| 2530 | sed -e 's,@PRSERV_PV_AUTOINC@,${PRSERV_PV_AUTOINC},g' \ | ||
| 2531 | -e 's,@EXTENDPRAUTO@,${EXTENDPRAUTO},g' -i | ||
| 2532 | } | ||
| 2533 | |||
| 2534 | addtask packagedata before do_build after do_package | ||
| 2535 | |||
| 2536 | SSTATETASKS += "do_packagedata" | ||
| 2537 | do_packagedata[sstate-inputdirs] = "${WORKDIR}/pkgdata-pdata-input" | ||
| 2538 | do_packagedata[sstate-outputdirs] = "${PKGDATA_DIR}" | ||
| 2539 | do_packagedata[stamp-extra-info] = "${MACHINE_ARCH}" | ||
| 2540 | |||
| 2541 | python do_packagedata_setscene () { | ||
| 2542 | sstate_setscene(d) | ||
| 2543 | } | ||
| 2544 | addtask do_packagedata_setscene | ||
| 2545 | |||
| 2546 | # | ||
| 2547 | # Helper functions for the package writing classes | ||
| 2548 | # | ||
| 2549 | |||
| 2550 | def mapping_rename_hook(d): | ||
| 2551 | """ | ||
| 2552 | Rewrite variables to account for package renaming in things | ||
| 2553 | like debian.bbclass or manual PKG variable name changes | ||
| 2554 | """ | ||
| 2555 | pkg = d.getVar("PKG") | ||
| 2556 | runtime_mapping_rename("RDEPENDS", pkg, d) | ||
| 2557 | runtime_mapping_rename("RRECOMMENDS", pkg, d) | ||
| 2558 | runtime_mapping_rename("RSUGGESTS", pkg, d) | ||
