diff options
Diffstat (limited to 'meta/classes/package.bbclass')
-rw-r--r-- | meta/classes/package.bbclass | 2519 |
1 files changed, 0 insertions, 2519 deletions
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass deleted file mode 100644 index 84eafbd529..0000000000 --- a/meta/classes/package.bbclass +++ /dev/null | |||
@@ -1,2519 +0,0 @@ | |||
1 | # | ||
2 | # Packaging process | ||
3 | # | ||
4 | # Executive summary: This class iterates over the functions listed in PACKAGEFUNCS | ||
5 | # Taking D and splitting it up into the packages listed in PACKAGES, placing the | ||
6 | # resulting output in PKGDEST. | ||
7 | # | ||
8 | # There are the following default steps but PACKAGEFUNCS can be extended: | ||
9 | # | ||
10 | # a) package_convert_pr_autoinc - convert AUTOINC in PKGV to ${PRSERV_PV_AUTOINC} | ||
11 | # | ||
12 | # b) perform_packagecopy - Copy D into PKGD | ||
13 | # | ||
14 | # c) package_do_split_locales - Split out the locale files, updates FILES and PACKAGES | ||
15 | # | ||
16 | # d) split_and_strip_files - split the files into runtime and debug and strip them. | ||
17 | # Debug files include debug info split, and associated sources that end up in -dbg packages | ||
18 | # | ||
19 | # e) fixup_perms - Fix up permissions in the package before we split it. | ||
20 | # | ||
21 | # f) populate_packages - Split the files in PKGD into separate packages in PKGDEST/<pkgname> | ||
22 | # Also triggers the binary stripping code to put files in -dbg packages. | ||
23 | # | ||
24 | # g) package_do_filedeps - Collect perfile run-time dependency metadata | ||
25 | # The data is stores in FILER{PROVIDES,DEPENDS}_file_pkg variables with | ||
26 | # a list of affected files in FILER{PROVIDES,DEPENDS}FLIST_pkg | ||
27 | # | ||
28 | # h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any | ||
29 | # dependencies found. Also stores the package name so anyone else using this library | ||
30 | # knows which package to depend on. | ||
31 | # | ||
32 | # i) package_do_pkgconfig - Keep track of which packages need and provide which .pc files | ||
33 | # | ||
34 | # j) read_shlibdeps - Reads the stored shlibs information into the metadata | ||
35 | # | ||
36 | # k) package_depchains - Adds automatic dependencies to -dbg and -dev packages | ||
37 | # | ||
38 | # l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later | ||
39 | # packaging steps | ||
40 | |||
41 | inherit packagedata | ||
42 | inherit chrpath | ||
43 | inherit package_pkgdata | ||
44 | inherit insane | ||
45 | |||
46 | PKGD = "${WORKDIR}/package" | ||
47 | PKGDEST = "${WORKDIR}/packages-split" | ||
48 | |||
49 | LOCALE_SECTION ?= '' | ||
50 | |||
51 | ALL_MULTILIB_PACKAGE_ARCHS = "${@all_multilib_tune_values(d, 'PACKAGE_ARCHS')}" | ||
52 | |||
53 | # rpm is used for the per-file dependency identification | ||
54 | # dwarfsrcfiles is used to determine the list of debug source files | ||
55 | PACKAGE_DEPENDS += "rpm-native dwarfsrcfiles-native" | ||
56 | |||
57 | |||
58 | # If your postinstall can execute at rootfs creation time rather than on | ||
59 | # target but depends on a native/cross tool in order to execute, you need to | ||
60 | # list that tool in PACKAGE_WRITE_DEPS. Target package dependencies belong | ||
61 | # in the package dependencies as normal, this is just for native/cross support | ||
62 | # tools at rootfs build time. | ||
63 | PACKAGE_WRITE_DEPS ??= "" | ||
64 | |||
65 | def legitimize_package_name(s): | ||
66 | """ | ||
67 | Make sure package names are legitimate strings | ||
68 | """ | ||
69 | import re | ||
70 | |||
71 | def fixutf(m): | ||
72 | cp = m.group(1) | ||
73 | if cp: | ||
74 | return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape') | ||
75 | |||
76 | # Handle unicode codepoints encoded as <U0123>, as in glibc locale files. | ||
77 | s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s) | ||
78 | |||
79 | # Remaining package name validity fixes | ||
80 | return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-') | ||
81 | |||
82 | def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False, summary=None): | ||
83 | """ | ||
84 | Used in .bb files to split up dynamically generated subpackages of a | ||
85 | given package, usually plugins or modules. | ||
86 | |||
87 | Arguments: | ||
88 | root -- the path in which to search | ||
89 | file_regex -- regular expression to match searched files. Use | ||
90 | parentheses () to mark the part of this expression | ||
91 | that should be used to derive the module name (to be | ||
92 | substituted where %s is used in other function | ||
93 | arguments as noted below) | ||
94 | output_pattern -- pattern to use for the package names. Must include %s. | ||
95 | description -- description to set for each package. Must include %s. | ||
96 | postinst -- postinstall script to use for all packages (as a | ||
97 | string) | ||
98 | recursive -- True to perform a recursive search - default False | ||
99 | hook -- a hook function to be called for every match. The | ||
100 | function will be called with the following arguments | ||
101 | (in the order listed): | ||
102 | f: full path to the file/directory match | ||
103 | pkg: the package name | ||
104 | file_regex: as above | ||
105 | output_pattern: as above | ||
106 | modulename: the module name derived using file_regex | ||
107 | extra_depends -- extra runtime dependencies (RDEPENDS) to be set for | ||
108 | all packages. The default value of None causes a | ||
109 | dependency on the main package (${PN}) - if you do | ||
110 | not want this, pass '' for this parameter. | ||
111 | aux_files_pattern -- extra item(s) to be added to FILES for each | ||
112 | package. Can be a single string item or a list of | ||
113 | strings for multiple items. Must include %s. | ||
114 | postrm -- postrm script to use for all packages (as a string) | ||
115 | allow_dirs -- True allow directories to be matched - default False | ||
116 | prepend -- if True, prepend created packages to PACKAGES instead | ||
117 | of the default False which appends them | ||
118 | match_path -- match file_regex on the whole relative path to the | ||
119 | root rather than just the file name | ||
120 | aux_files_pattern_verbatim -- extra item(s) to be added to FILES for | ||
121 | each package, using the actual derived module name | ||
122 | rather than converting it to something legal for a | ||
123 | package name. Can be a single string item or a list | ||
124 | of strings for multiple items. Must include %s. | ||
125 | allow_links -- True to allow symlinks to be matched - default False | ||
126 | summary -- Summary to set for each package. Must include %s; | ||
127 | defaults to description if not set. | ||
128 | |||
129 | """ | ||
130 | |||
131 | dvar = d.getVar('PKGD') | ||
132 | root = d.expand(root) | ||
133 | output_pattern = d.expand(output_pattern) | ||
134 | extra_depends = d.expand(extra_depends) | ||
135 | |||
136 | # If the root directory doesn't exist, don't error out later but silently do | ||
137 | # no splitting. | ||
138 | if not os.path.exists(dvar + root): | ||
139 | return [] | ||
140 | |||
141 | ml = d.getVar("MLPREFIX") | ||
142 | if ml: | ||
143 | if not output_pattern.startswith(ml): | ||
144 | output_pattern = ml + output_pattern | ||
145 | |||
146 | newdeps = [] | ||
147 | for dep in (extra_depends or "").split(): | ||
148 | if dep.startswith(ml): | ||
149 | newdeps.append(dep) | ||
150 | else: | ||
151 | newdeps.append(ml + dep) | ||
152 | if newdeps: | ||
153 | extra_depends = " ".join(newdeps) | ||
154 | |||
155 | |||
156 | packages = d.getVar('PACKAGES').split() | ||
157 | split_packages = set() | ||
158 | |||
159 | if postinst: | ||
160 | postinst = '#!/bin/sh\n' + postinst + '\n' | ||
161 | if postrm: | ||
162 | postrm = '#!/bin/sh\n' + postrm + '\n' | ||
163 | if not recursive: | ||
164 | objs = os.listdir(dvar + root) | ||
165 | else: | ||
166 | objs = [] | ||
167 | for walkroot, dirs, files in os.walk(dvar + root): | ||
168 | for file in files: | ||
169 | relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1) | ||
170 | if relpath: | ||
171 | objs.append(relpath) | ||
172 | |||
173 | if extra_depends == None: | ||
174 | extra_depends = d.getVar("PN") | ||
175 | |||
176 | if not summary: | ||
177 | summary = description | ||
178 | |||
179 | for o in sorted(objs): | ||
180 | import re, stat | ||
181 | if match_path: | ||
182 | m = re.match(file_regex, o) | ||
183 | else: | ||
184 | m = re.match(file_regex, os.path.basename(o)) | ||
185 | |||
186 | if not m: | ||
187 | continue | ||
188 | f = os.path.join(dvar + root, o) | ||
189 | mode = os.lstat(f).st_mode | ||
190 | if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))): | ||
191 | continue | ||
192 | on = legitimize_package_name(m.group(1)) | ||
193 | pkg = output_pattern % on | ||
194 | split_packages.add(pkg) | ||
195 | if not pkg in packages: | ||
196 | if prepend: | ||
197 | packages = [pkg] + packages | ||
198 | else: | ||
199 | packages.append(pkg) | ||
200 | oldfiles = d.getVar('FILES:' + pkg) | ||
201 | newfile = os.path.join(root, o) | ||
202 | # These names will be passed through glob() so if the filename actually | ||
203 | # contains * or ? (rare, but possible) we need to handle that specially | ||
204 | newfile = newfile.replace('*', '[*]') | ||
205 | newfile = newfile.replace('?', '[?]') | ||
206 | if not oldfiles: | ||
207 | the_files = [newfile] | ||
208 | if aux_files_pattern: | ||
209 | if type(aux_files_pattern) is list: | ||
210 | for fp in aux_files_pattern: | ||
211 | the_files.append(fp % on) | ||
212 | else: | ||
213 | the_files.append(aux_files_pattern % on) | ||
214 | if aux_files_pattern_verbatim: | ||
215 | if type(aux_files_pattern_verbatim) is list: | ||
216 | for fp in aux_files_pattern_verbatim: | ||
217 | the_files.append(fp % m.group(1)) | ||
218 | else: | ||
219 | the_files.append(aux_files_pattern_verbatim % m.group(1)) | ||
220 | d.setVar('FILES:' + pkg, " ".join(the_files)) | ||
221 | else: | ||
222 | d.setVar('FILES:' + pkg, oldfiles + " " + newfile) | ||
223 | if extra_depends != '': | ||
224 | d.appendVar('RDEPENDS:' + pkg, ' ' + extra_depends) | ||
225 | if not d.getVar('DESCRIPTION:' + pkg): | ||
226 | d.setVar('DESCRIPTION:' + pkg, description % on) | ||
227 | if not d.getVar('SUMMARY:' + pkg): | ||
228 | d.setVar('SUMMARY:' + pkg, summary % on) | ||
229 | if postinst: | ||
230 | d.setVar('pkg_postinst:' + pkg, postinst) | ||
231 | if postrm: | ||
232 | d.setVar('pkg_postrm:' + pkg, postrm) | ||
233 | if callable(hook): | ||
234 | hook(f, pkg, file_regex, output_pattern, m.group(1)) | ||
235 | |||
236 | d.setVar('PACKAGES', ' '.join(packages)) | ||
237 | return list(split_packages) | ||
238 | |||
239 | PACKAGE_DEPENDS += "file-native" | ||
240 | |||
241 | python () { | ||
242 | if d.getVar('PACKAGES') != '': | ||
243 | deps = "" | ||
244 | for dep in (d.getVar('PACKAGE_DEPENDS') or "").split(): | ||
245 | deps += " %s:do_populate_sysroot" % dep | ||
246 | if d.getVar('PACKAGE_MINIDEBUGINFO') == '1': | ||
247 | deps += ' xz-native:do_populate_sysroot' | ||
248 | d.appendVarFlag('do_package', 'depends', deps) | ||
249 | |||
250 | # shlibs requires any DEPENDS to have already packaged for the *.list files | ||
251 | d.appendVarFlag('do_package', 'deptask', " do_packagedata") | ||
252 | } | ||
253 | |||
254 | # Get a list of files from file vars by searching files under current working directory | ||
255 | # The list contains symlinks, directories and normal files. | ||
256 | def files_from_filevars(filevars): | ||
257 | import os,glob | ||
258 | cpath = oe.cachedpath.CachedPath() | ||
259 | files = [] | ||
260 | for f in filevars: | ||
261 | if os.path.isabs(f): | ||
262 | f = '.' + f | ||
263 | if not f.startswith("./"): | ||
264 | f = './' + f | ||
265 | globbed = glob.glob(f) | ||
266 | if globbed: | ||
267 | if [ f ] != globbed: | ||
268 | files += globbed | ||
269 | continue | ||
270 | files.append(f) | ||
271 | |||
272 | symlink_paths = [] | ||
273 | for ind, f in enumerate(files): | ||
274 | # Handle directory symlinks. Truncate path to the lowest level symlink | ||
275 | parent = '' | ||
276 | for dirname in f.split('/')[:-1]: | ||
277 | parent = os.path.join(parent, dirname) | ||
278 | if dirname == '.': | ||
279 | continue | ||
280 | if cpath.islink(parent): | ||
281 | bb.warn("FILES contains file '%s' which resides under a " | ||
282 | "directory symlink. Please fix the recipe and use the " | ||
283 | "real path for the file." % f[1:]) | ||
284 | symlink_paths.append(f) | ||
285 | files[ind] = parent | ||
286 | f = parent | ||
287 | break | ||
288 | |||
289 | if not cpath.islink(f): | ||
290 | if cpath.isdir(f): | ||
291 | newfiles = [ os.path.join(f,x) for x in os.listdir(f) ] | ||
292 | if newfiles: | ||
293 | files += newfiles | ||
294 | |||
295 | return files, symlink_paths | ||
296 | |||
297 | # Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files | ||
298 | def get_conffiles(pkg, d): | ||
299 | pkgdest = d.getVar('PKGDEST') | ||
300 | root = os.path.join(pkgdest, pkg) | ||
301 | cwd = os.getcwd() | ||
302 | os.chdir(root) | ||
303 | |||
304 | conffiles = d.getVar('CONFFILES:%s' % pkg); | ||
305 | if conffiles == None: | ||
306 | conffiles = d.getVar('CONFFILES') | ||
307 | if conffiles == None: | ||
308 | conffiles = "" | ||
309 | conffiles = conffiles.split() | ||
310 | conf_orig_list = files_from_filevars(conffiles)[0] | ||
311 | |||
312 | # Remove links and directories from conf_orig_list to get conf_list which only contains normal files | ||
313 | conf_list = [] | ||
314 | for f in conf_orig_list: | ||
315 | if os.path.isdir(f): | ||
316 | continue | ||
317 | if os.path.islink(f): | ||
318 | continue | ||
319 | if not os.path.exists(f): | ||
320 | continue | ||
321 | conf_list.append(f) | ||
322 | |||
323 | # Remove the leading './' | ||
324 | for i in range(0, len(conf_list)): | ||
325 | conf_list[i] = conf_list[i][1:] | ||
326 | |||
327 | os.chdir(cwd) | ||
328 | return conf_list | ||
329 | |||
330 | def checkbuildpath(file, d): | ||
331 | tmpdir = d.getVar('TMPDIR') | ||
332 | with open(file) as f: | ||
333 | file_content = f.read() | ||
334 | if tmpdir in file_content: | ||
335 | return True | ||
336 | |||
337 | return False | ||
338 | |||
339 | def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output): | ||
340 | debugfiles = {} | ||
341 | |||
342 | for line in dwarfsrcfiles_output.splitlines(): | ||
343 | if line.startswith("\t"): | ||
344 | debugfiles[os.path.normpath(line.split()[0])] = "" | ||
345 | |||
346 | return debugfiles.keys() | ||
347 | |||
348 | def source_info(file, d, fatal=True): | ||
349 | import subprocess | ||
350 | |||
351 | cmd = ["dwarfsrcfiles", file] | ||
352 | try: | ||
353 | output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT) | ||
354 | retval = 0 | ||
355 | except subprocess.CalledProcessError as exc: | ||
356 | output = exc.output | ||
357 | retval = exc.returncode | ||
358 | |||
359 | # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure | ||
360 | if retval != 0 and retval != 255: | ||
361 | msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "") | ||
362 | if fatal: | ||
363 | bb.fatal(msg) | ||
364 | bb.note(msg) | ||
365 | |||
366 | debugsources = parse_debugsources_from_dwarfsrcfiles_output(output) | ||
367 | |||
368 | return list(debugsources) | ||
369 | |||
370 | def splitdebuginfo(file, dvar, debugdir, debuglibdir, debugappend, debugsrcdir, d): | ||
371 | # Function to split a single file into two components, one is the stripped | ||
372 | # target system binary, the other contains any debugging information. The | ||
373 | # two files are linked to reference each other. | ||
374 | # | ||
375 | # return a mapping of files:debugsources | ||
376 | |||
377 | import stat | ||
378 | import subprocess | ||
379 | |||
380 | src = file[len(dvar):] | ||
381 | dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend | ||
382 | debugfile = dvar + dest | ||
383 | sources = [] | ||
384 | |||
385 | # Split the file... | ||
386 | bb.utils.mkdirhier(os.path.dirname(debugfile)) | ||
387 | #bb.note("Split %s -> %s" % (file, debugfile)) | ||
388 | # Only store off the hard link reference if we successfully split! | ||
389 | |||
390 | dvar = d.getVar('PKGD') | ||
391 | objcopy = d.getVar("OBJCOPY") | ||
392 | |||
393 | # We ignore kernel modules, we don't generate debug info files. | ||
394 | if file.find("/lib/modules/") != -1 and file.endswith(".ko"): | ||
395 | return (file, sources) | ||
396 | |||
397 | newmode = None | ||
398 | if not os.access(file, os.W_OK) or os.access(file, os.R_OK): | ||
399 | origmode = os.stat(file)[stat.ST_MODE] | ||
400 | newmode = origmode | stat.S_IWRITE | stat.S_IREAD | ||
401 | os.chmod(file, newmode) | ||
402 | |||
403 | # We need to extract the debug src information here... | ||
404 | if debugsrcdir: | ||
405 | sources = source_info(file, d) | ||
406 | |||
407 | bb.utils.mkdirhier(os.path.dirname(debugfile)) | ||
408 | |||
409 | subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT) | ||
410 | |||
411 | # Set the debuglink to have the view of the file path on the target | ||
412 | subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT) | ||
413 | |||
414 | if newmode: | ||
415 | os.chmod(file, origmode) | ||
416 | |||
417 | return (file, sources) | ||
418 | |||
419 | def splitstaticdebuginfo(file, dvar, debugstaticdir, debugstaticlibdir, debugstaticappend, debugsrcdir, d): | ||
420 | # Unlike the function above, there is no way to split a static library | ||
421 | # two components. So to get similar results we will copy the unmodified | ||
422 | # static library (containing the debug symbols) into a new directory. | ||
423 | # We will then strip (preserving symbols) the static library in the | ||
424 | # typical location. | ||
425 | # | ||
426 | # return a mapping of files:debugsources | ||
427 | |||
428 | import stat | ||
429 | import shutil | ||
430 | |||
431 | src = file[len(dvar):] | ||
432 | dest = debugstaticlibdir + os.path.dirname(src) + debugstaticdir + "/" + os.path.basename(src) + debugstaticappend | ||
433 | debugfile = dvar + dest | ||
434 | sources = [] | ||
435 | |||
436 | # Copy the file... | ||
437 | bb.utils.mkdirhier(os.path.dirname(debugfile)) | ||
438 | #bb.note("Copy %s -> %s" % (file, debugfile)) | ||
439 | |||
440 | dvar = d.getVar('PKGD') | ||
441 | |||
442 | newmode = None | ||
443 | if not os.access(file, os.W_OK) or os.access(file, os.R_OK): | ||
444 | origmode = os.stat(file)[stat.ST_MODE] | ||
445 | newmode = origmode | stat.S_IWRITE | stat.S_IREAD | ||
446 | os.chmod(file, newmode) | ||
447 | |||
448 | # We need to extract the debug src information here... | ||
449 | if debugsrcdir: | ||
450 | sources = source_info(file, d) | ||
451 | |||
452 | bb.utils.mkdirhier(os.path.dirname(debugfile)) | ||
453 | |||
454 | # Copy the unmodified item to the debug directory | ||
455 | shutil.copy2(file, debugfile) | ||
456 | |||
457 | if newmode: | ||
458 | os.chmod(file, origmode) | ||
459 | |||
460 | return (file, sources) | ||
461 | |||
462 | def inject_minidebuginfo(file, dvar, debugdir, debuglibdir, debugappend, debugsrcdir, d): | ||
463 | # Extract just the symbols from debuginfo into minidebuginfo, | ||
464 | # compress it with xz and inject it back into the binary in a .gnu_debugdata section. | ||
465 | # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html | ||
466 | |||
467 | import subprocess | ||
468 | |||
469 | readelf = d.getVar('READELF') | ||
470 | nm = d.getVar('NM') | ||
471 | objcopy = d.getVar('OBJCOPY') | ||
472 | |||
473 | minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo') | ||
474 | |||
475 | src = file[len(dvar):] | ||
476 | dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend | ||
477 | debugfile = dvar + dest | ||
478 | minidebugfile = minidebuginfodir + src + '.minidebug' | ||
479 | bb.utils.mkdirhier(os.path.dirname(minidebugfile)) | ||
480 | |||
481 | # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either | ||
482 | # so skip it. | ||
483 | if not os.path.exists(debugfile): | ||
484 | bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file)) | ||
485 | return | ||
486 | |||
487 | # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo. | ||
488 | # We will exclude all of these from minidebuginfo to save space. | ||
489 | remove_section_names = [] | ||
490 | for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines(): | ||
491 | fields = line.split() | ||
492 | if len(fields) < 8: | ||
493 | continue | ||
494 | name = fields[0] | ||
495 | type = fields[1] | ||
496 | flags = fields[7] | ||
497 | # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them | ||
498 | if name.startswith('.debug_'): | ||
499 | continue | ||
500 | if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']: | ||
501 | remove_section_names.append(name) | ||
502 | |||
503 | # List dynamic symbols in the binary. We can exclude these from minidebuginfo | ||
504 | # because they are always present in the binary. | ||
505 | dynsyms = set() | ||
506 | for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines(): | ||
507 | dynsyms.add(line.split()[0]) | ||
508 | |||
509 | # Find all function symbols from debuginfo which aren't in the dynamic symbols table. | ||
510 | # These are the ones we want to keep in minidebuginfo. | ||
511 | keep_symbols_file = minidebugfile + '.symlist' | ||
512 | found_any_symbols = False | ||
513 | with open(keep_symbols_file, 'w') as f: | ||
514 | for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines(): | ||
515 | fields = line.split('|') | ||
516 | if len(fields) < 7: | ||
517 | continue | ||
518 | name = fields[0].strip() | ||
519 | type = fields[3].strip() | ||
520 | if type == 'FUNC' and name not in dynsyms: | ||
521 | f.write('{}\n'.format(name)) | ||
522 | found_any_symbols = True | ||
523 | |||
524 | if not found_any_symbols: | ||
525 | bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file)) | ||
526 | return | ||
527 | |||
528 | bb.utils.remove(minidebugfile) | ||
529 | bb.utils.remove(minidebugfile + '.xz') | ||
530 | |||
531 | subprocess.check_call([objcopy, '-S'] + | ||
532 | ['--remove-section={}'.format(s) for s in remove_section_names] + | ||
533 | ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile]) | ||
534 | |||
535 | subprocess.check_call(['xz', '--keep', minidebugfile]) | ||
536 | |||
537 | subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file]) | ||
538 | |||
539 | def copydebugsources(debugsrcdir, sources, d): | ||
540 | # The debug src information written out to sourcefile is further processed | ||
541 | # and copied to the destination here. | ||
542 | |||
543 | import stat | ||
544 | import subprocess | ||
545 | |||
546 | if debugsrcdir and sources: | ||
547 | sourcefile = d.expand("${WORKDIR}/debugsources.list") | ||
548 | bb.utils.remove(sourcefile) | ||
549 | |||
550 | # filenames are null-separated - this is an artefact of the previous use | ||
551 | # of rpm's debugedit, which was writing them out that way, and the code elsewhere | ||
552 | # is still assuming that. | ||
553 | debuglistoutput = '\0'.join(sources) + '\0' | ||
554 | with open(sourcefile, 'a') as sf: | ||
555 | sf.write(debuglistoutput) | ||
556 | |||
557 | dvar = d.getVar('PKGD') | ||
558 | strip = d.getVar("STRIP") | ||
559 | objcopy = d.getVar("OBJCOPY") | ||
560 | workdir = d.getVar("WORKDIR") | ||
561 | workparentdir = os.path.dirname(os.path.dirname(workdir)) | ||
562 | workbasedir = os.path.basename(os.path.dirname(workdir)) + "/" + os.path.basename(workdir) | ||
563 | |||
564 | # If build path exists in sourcefile, it means toolchain did not use | ||
565 | # -fdebug-prefix-map to compile | ||
566 | if checkbuildpath(sourcefile, d): | ||
567 | localsrc_prefix = workparentdir + "/" | ||
568 | else: | ||
569 | localsrc_prefix = "/usr/src/debug/" | ||
570 | |||
571 | nosuchdir = [] | ||
572 | basepath = dvar | ||
573 | for p in debugsrcdir.split("/"): | ||
574 | basepath = basepath + "/" + p | ||
575 | if not cpath.exists(basepath): | ||
576 | nosuchdir.append(basepath) | ||
577 | bb.utils.mkdirhier(basepath) | ||
578 | cpath.updatecache(basepath) | ||
579 | |||
580 | # Ignore files from the recipe sysroots (target and native) | ||
581 | processdebugsrc = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | " | ||
582 | # We need to ignore files that are not actually ours | ||
583 | # we do this by only paying attention to items from this package | ||
584 | processdebugsrc += "fgrep -zw '%s' | " | ||
585 | # Remove prefix in the source paths | ||
586 | processdebugsrc += "sed 's#%s##g' | " | ||
587 | processdebugsrc += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)" | ||
588 | |||
589 | cmd = processdebugsrc % (sourcefile, workbasedir, localsrc_prefix, workparentdir, dvar, debugsrcdir) | ||
590 | try: | ||
591 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | ||
592 | except subprocess.CalledProcessError: | ||
593 | # Can "fail" if internal headers/transient sources are attempted | ||
594 | pass | ||
595 | |||
596 | # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced. | ||
597 | # Work around this by manually finding and copying any symbolic links that made it through. | ||
598 | cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \ | ||
599 | (dvar, debugsrcdir, dvar, debugsrcdir, workparentdir, dvar, debugsrcdir) | ||
600 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | ||
601 | |||
602 | # The copy by cpio may have resulted in some empty directories! Remove these | ||
603 | cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir) | ||
604 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | ||
605 | |||
606 | # Also remove debugsrcdir if its empty | ||
607 | for p in nosuchdir[::-1]: | ||
608 | if os.path.exists(p) and not os.listdir(p): | ||
609 | os.rmdir(p) | ||
610 | |||
611 | # | ||
612 | # Package data handling routines | ||
613 | # | ||
614 | |||
615 | def get_package_mapping (pkg, basepkg, d, depversions=None): | ||
616 | import oe.packagedata | ||
617 | |||
618 | data = oe.packagedata.read_subpkgdata(pkg, d) | ||
619 | key = "PKG:%s" % pkg | ||
620 | |||
621 | if key in data: | ||
622 | if bb.data.inherits_class('allarch', d) and bb.data.inherits_class('packagegroup', d) and pkg != data[key]: | ||
623 | bb.error("An allarch packagegroup shouldn't depend on packages which are dynamically renamed (%s to %s)" % (pkg, data[key])) | ||
624 | # Have to avoid undoing the write_extra_pkgs(global_variants...) | ||
625 | if bb.data.inherits_class('allarch', d) and not d.getVar('MULTILIB_VARIANTS') \ | ||
626 | and data[key] == basepkg: | ||
627 | return pkg | ||
628 | if depversions == []: | ||
629 | # Avoid returning a mapping if the renamed package rprovides its original name | ||
630 | rprovkey = "RPROVIDES:%s" % pkg | ||
631 | if rprovkey in data: | ||
632 | if pkg in bb.utils.explode_dep_versions2(data[rprovkey]): | ||
633 | bb.note("%s rprovides %s, not replacing the latter" % (data[key], pkg)) | ||
634 | return pkg | ||
635 | # Do map to rewritten package name | ||
636 | return data[key] | ||
637 | |||
638 | return pkg | ||
639 | |||
640 | def get_package_additional_metadata (pkg_type, d): | ||
641 | base_key = "PACKAGE_ADD_METADATA" | ||
642 | for key in ("%s_%s" % (base_key, pkg_type.upper()), base_key): | ||
643 | if d.getVar(key, False) is None: | ||
644 | continue | ||
645 | d.setVarFlag(key, "type", "list") | ||
646 | if d.getVarFlag(key, "separator") is None: | ||
647 | d.setVarFlag(key, "separator", "\\n") | ||
648 | metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)] | ||
649 | return "\n".join(metadata_fields).strip() | ||
650 | |||
651 | def runtime_mapping_rename (varname, pkg, d): | ||
652 | #bb.note("%s before: %s" % (varname, d.getVar(varname))) | ||
653 | |||
654 | new_depends = {} | ||
655 | deps = bb.utils.explode_dep_versions2(d.getVar(varname) or "") | ||
656 | for depend, depversions in deps.items(): | ||
657 | new_depend = get_package_mapping(depend, pkg, d, depversions) | ||
658 | if depend != new_depend: | ||
659 | bb.note("package name mapping done: %s -> %s" % (depend, new_depend)) | ||
660 | new_depends[new_depend] = deps[depend] | ||
661 | |||
662 | d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False)) | ||
663 | |||
664 | #bb.note("%s after: %s" % (varname, d.getVar(varname))) | ||
665 | |||
666 | # | ||
667 | # Used by do_packagedata (and possibly other routines post do_package) | ||
668 | # | ||
669 | |||
670 | package_get_auto_pr[vardepsexclude] = "BB_TASKDEPDATA" | ||
671 | python package_get_auto_pr() { | ||
672 | import oe.prservice | ||
673 | |||
674 | def get_do_package_hash(pn): | ||
675 | if d.getVar("BB_RUNTASK") != "do_package": | ||
676 | taskdepdata = d.getVar("BB_TASKDEPDATA", False) | ||
677 | for dep in taskdepdata: | ||
678 | if taskdepdata[dep][1] == "do_package" and taskdepdata[dep][0] == pn: | ||
679 | return taskdepdata[dep][6] | ||
680 | return None | ||
681 | |||
682 | # Support per recipe PRSERV_HOST | ||
683 | pn = d.getVar('PN') | ||
684 | host = d.getVar("PRSERV_HOST_" + pn) | ||
685 | if not (host is None): | ||
686 | d.setVar("PRSERV_HOST", host) | ||
687 | |||
688 | pkgv = d.getVar("PKGV") | ||
689 | |||
690 | # PR Server not active, handle AUTOINC | ||
691 | if not d.getVar('PRSERV_HOST'): | ||
692 | d.setVar("PRSERV_PV_AUTOINC", "0") | ||
693 | return | ||
694 | |||
695 | auto_pr = None | ||
696 | pv = d.getVar("PV") | ||
697 | version = d.getVar("PRAUTOINX") | ||
698 | pkgarch = d.getVar("PACKAGE_ARCH") | ||
699 | checksum = get_do_package_hash(pn) | ||
700 | |||
701 | # If do_package isn't in the dependencies, we can't get the checksum... | ||
702 | if not checksum: | ||
703 | bb.warn('Task %s requested do_package unihash, but it was not available.' % d.getVar('BB_RUNTASK')) | ||
704 | #taskdepdata = d.getVar("BB_TASKDEPDATA", False) | ||
705 | #for dep in taskdepdata: | ||
706 | # bb.warn('%s:%s = %s' % (taskdepdata[dep][0], taskdepdata[dep][1], taskdepdata[dep][6])) | ||
707 | return | ||
708 | |||
709 | if d.getVar('PRSERV_LOCKDOWN'): | ||
710 | auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch) or d.getVar('PRAUTO_' + version) or None | ||
711 | if auto_pr is None: | ||
712 | bb.fatal("Can NOT get PRAUTO from lockdown exported file") | ||
713 | d.setVar('PRAUTO',str(auto_pr)) | ||
714 | return | ||
715 | |||
716 | try: | ||
717 | conn = oe.prservice.prserv_make_conn(d) | ||
718 | if conn is not None: | ||
719 | if "AUTOINC" in pkgv: | ||
720 | srcpv = bb.fetch2.get_srcrev(d) | ||
721 | base_ver = "AUTOINC-%s" % version[:version.find(srcpv)] | ||
722 | value = conn.getPR(base_ver, pkgarch, srcpv) | ||
723 | d.setVar("PRSERV_PV_AUTOINC", str(value)) | ||
724 | |||
725 | auto_pr = conn.getPR(version, pkgarch, checksum) | ||
726 | conn.close() | ||
727 | except Exception as e: | ||
728 | bb.fatal("Can NOT get PRAUTO, exception %s" % str(e)) | ||
729 | if auto_pr is None: | ||
730 | bb.fatal("Can NOT get PRAUTO from remote PR service") | ||
731 | d.setVar('PRAUTO',str(auto_pr)) | ||
732 | } | ||
733 | |||
734 | # | ||
735 | # Package functions suitable for inclusion in PACKAGEFUNCS | ||
736 | # | ||
737 | |||
738 | python package_convert_pr_autoinc() { | ||
739 | pkgv = d.getVar("PKGV") | ||
740 | |||
741 | # Adjust pkgv as necessary... | ||
742 | if 'AUTOINC' in pkgv: | ||
743 | d.setVar("PKGV", pkgv.replace("AUTOINC", "${PRSERV_PV_AUTOINC}")) | ||
744 | |||
745 | # Change PRSERV_PV_AUTOINC and EXTENDPRAUTO usage to special values | ||
746 | d.setVar('PRSERV_PV_AUTOINC', '@PRSERV_PV_AUTOINC@') | ||
747 | d.setVar('EXTENDPRAUTO', '@EXTENDPRAUTO@') | ||
748 | } | ||
749 | |||
750 | LOCALEBASEPN ??= "${PN}" | ||
751 | |||
752 | python package_do_split_locales() { | ||
753 | if (d.getVar('PACKAGE_NO_LOCALE') == '1'): | ||
754 | bb.debug(1, "package requested not splitting locales") | ||
755 | return | ||
756 | |||
757 | packages = (d.getVar('PACKAGES') or "").split() | ||
758 | |||
759 | datadir = d.getVar('datadir') | ||
760 | if not datadir: | ||
761 | bb.note("datadir not defined") | ||
762 | return | ||
763 | |||
764 | dvar = d.getVar('PKGD') | ||
765 | pn = d.getVar('LOCALEBASEPN') | ||
766 | |||
767 | if pn + '-locale' in packages: | ||
768 | packages.remove(pn + '-locale') | ||
769 | |||
770 | localedir = os.path.join(dvar + datadir, 'locale') | ||
771 | |||
772 | if not cpath.isdir(localedir): | ||
773 | bb.debug(1, "No locale files in this package") | ||
774 | return | ||
775 | |||
776 | locales = os.listdir(localedir) | ||
777 | |||
778 | summary = d.getVar('SUMMARY') or pn | ||
779 | description = d.getVar('DESCRIPTION') or "" | ||
780 | locale_section = d.getVar('LOCALE_SECTION') | ||
781 | mlprefix = d.getVar('MLPREFIX') or "" | ||
782 | for l in sorted(locales): | ||
783 | ln = legitimize_package_name(l) | ||
784 | pkg = pn + '-locale-' + ln | ||
785 | packages.append(pkg) | ||
786 | d.setVar('FILES:' + pkg, os.path.join(datadir, 'locale', l)) | ||
787 | d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln)) | ||
788 | d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln)) | ||
789 | d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l)) | ||
790 | d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l)) | ||
791 | if locale_section: | ||
792 | d.setVar('SECTION:' + pkg, locale_section) | ||
793 | |||
794 | d.setVar('PACKAGES', ' '.join(packages)) | ||
795 | |||
796 | # Disabled by RP 18/06/07 | ||
797 | # Wildcards aren't supported in debian | ||
798 | # They break with ipkg since glibc-locale* will mean that | ||
799 | # glibc-localedata-translit* won't install as a dependency | ||
800 | # for some other package which breaks meta-toolchain | ||
801 | # Probably breaks since virtual-locale- isn't provided anywhere | ||
802 | #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split() | ||
803 | #rdep.append('%s-locale*' % pn) | ||
804 | #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep)) | ||
805 | } | ||
806 | |||
807 | python perform_packagecopy () { | ||
808 | import subprocess | ||
809 | import shutil | ||
810 | |||
811 | dest = d.getVar('D') | ||
812 | dvar = d.getVar('PKGD') | ||
813 | |||
814 | # Remove ${D}/sysroot-only if present | ||
815 | sysroot_only = os.path.join(dest, 'sysroot-only') | ||
816 | if cpath.exists(sysroot_only) and cpath.isdir(sysroot_only): | ||
817 | shutil.rmtree(sysroot_only) | ||
818 | |||
819 | # Start by package population by taking a copy of the installed | ||
820 | # files to operate on | ||
821 | # Preserve sparse files and hard links | ||
822 | cmd = 'tar -cf - -C %s -p -S . | tar -xf - -C %s' % (dest, dvar) | ||
823 | subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) | ||
824 | |||
825 | # replace RPATHs for the nativesdk binaries, to make them relocatable | ||
826 | if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d): | ||
827 | rpath_replace (dvar, d) | ||
828 | } | ||
829 | perform_packagecopy[cleandirs] = "${PKGD}" | ||
830 | perform_packagecopy[dirs] = "${PKGD}" | ||
831 | |||
832 | # We generate a master list of directories to process, we start by | ||
833 | # seeding this list with reasonable defaults, then load from | ||
834 | # the fs-perms.txt files | ||
835 | python fixup_perms () { | ||
836 | import pwd, grp | ||
837 | |||
838 | # init using a string with the same format as a line as documented in | ||
839 | # the fs-perms.txt file | ||
840 | # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid> | ||
841 | # <path> link <link target> | ||
842 | # | ||
843 | # __str__ can be used to print out an entry in the input format | ||
844 | # | ||
845 | # if fs_perms_entry.path is None: | ||
846 | # an error occurred | ||
847 | # if fs_perms_entry.link, you can retrieve: | ||
848 | # fs_perms_entry.path = path | ||
849 | # fs_perms_entry.link = target of link | ||
850 | # if not fs_perms_entry.link, you can retrieve: | ||
851 | # fs_perms_entry.path = path | ||
852 | # fs_perms_entry.mode = expected dir mode or None | ||
853 | # fs_perms_entry.uid = expected uid or -1 | ||
854 | # fs_perms_entry.gid = expected gid or -1 | ||
855 | # fs_perms_entry.walk = 'true' or something else | ||
856 | # fs_perms_entry.fmode = expected file mode or None | ||
857 | # fs_perms_entry.fuid = expected file uid or -1 | ||
858 | # fs_perms_entry_fgid = expected file gid or -1 | ||
859 | class fs_perms_entry(): | ||
860 | def __init__(self, line): | ||
861 | lsplit = line.split() | ||
862 | if len(lsplit) == 3 and lsplit[1].lower() == "link": | ||
863 | self._setlink(lsplit[0], lsplit[2]) | ||
864 | elif len(lsplit) == 8: | ||
865 | self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7]) | ||
866 | else: | ||
867 | msg = "Fixup Perms: invalid config line %s" % line | ||
868 | oe.qa.handle_error("perm-config", msg, d) | ||
869 | self.path = None | ||
870 | self.link = None | ||
871 | |||
872 | def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid): | ||
873 | self.path = os.path.normpath(path) | ||
874 | self.link = None | ||
875 | self.mode = self._procmode(mode) | ||
876 | self.uid = self._procuid(uid) | ||
877 | self.gid = self._procgid(gid) | ||
878 | self.walk = walk.lower() | ||
879 | self.fmode = self._procmode(fmode) | ||
880 | self.fuid = self._procuid(fuid) | ||
881 | self.fgid = self._procgid(fgid) | ||
882 | |||
883 | def _setlink(self, path, link): | ||
884 | self.path = os.path.normpath(path) | ||
885 | self.link = link | ||
886 | |||
887 | def _procmode(self, mode): | ||
888 | if not mode or (mode and mode == "-"): | ||
889 | return None | ||
890 | else: | ||
891 | return int(mode,8) | ||
892 | |||
893 | # Note uid/gid -1 has special significance in os.lchown | ||
894 | def _procuid(self, uid): | ||
895 | if uid is None or uid == "-": | ||
896 | return -1 | ||
897 | elif uid.isdigit(): | ||
898 | return int(uid) | ||
899 | else: | ||
900 | return pwd.getpwnam(uid).pw_uid | ||
901 | |||
902 | def _procgid(self, gid): | ||
903 | if gid is None or gid == "-": | ||
904 | return -1 | ||
905 | elif gid.isdigit(): | ||
906 | return int(gid) | ||
907 | else: | ||
908 | return grp.getgrnam(gid).gr_gid | ||
909 | |||
910 | # Use for debugging the entries | ||
911 | def __str__(self): | ||
912 | if self.link: | ||
913 | return "%s link %s" % (self.path, self.link) | ||
914 | else: | ||
915 | mode = "-" | ||
916 | if self.mode: | ||
917 | mode = "0%o" % self.mode | ||
918 | fmode = "-" | ||
919 | if self.fmode: | ||
920 | fmode = "0%o" % self.fmode | ||
921 | uid = self._mapugid(self.uid) | ||
922 | gid = self._mapugid(self.gid) | ||
923 | fuid = self._mapugid(self.fuid) | ||
924 | fgid = self._mapugid(self.fgid) | ||
925 | return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid) | ||
926 | |||
927 | def _mapugid(self, id): | ||
928 | if id is None or id == -1: | ||
929 | return "-" | ||
930 | else: | ||
931 | return "%d" % id | ||
932 | |||
933 | # Fix the permission, owner and group of path | ||
934 | def fix_perms(path, mode, uid, gid, dir): | ||
935 | if mode and not os.path.islink(path): | ||
936 | #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir)) | ||
937 | os.chmod(path, mode) | ||
938 | # -1 is a special value that means don't change the uid/gid | ||
939 | # if they are BOTH -1, don't bother to lchown | ||
940 | if not (uid == -1 and gid == -1): | ||
941 | #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir)) | ||
942 | os.lchown(path, uid, gid) | ||
943 | |||
944 | # Return a list of configuration files based on either the default | ||
945 | # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES | ||
946 | # paths are resolved via BBPATH | ||
947 | def get_fs_perms_list(d): | ||
948 | str = "" | ||
949 | bbpath = d.getVar('BBPATH') | ||
950 | fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES') or "" | ||
951 | for conf_file in fs_perms_tables.split(): | ||
952 | confpath = bb.utils.which(bbpath, conf_file) | ||
953 | if confpath: | ||
954 | str += " %s" % bb.utils.which(bbpath, conf_file) | ||
955 | else: | ||
956 | bb.warn("cannot find %s specified in FILESYSTEM_PERMS_TABLES" % conf_file) | ||
957 | return str | ||
958 | |||
959 | |||
960 | |||
961 | dvar = d.getVar('PKGD') | ||
962 | |||
963 | fs_perms_table = {} | ||
964 | fs_link_table = {} | ||
965 | |||
966 | # By default all of the standard directories specified in | ||
967 | # bitbake.conf will get 0755 root:root. | ||
968 | target_path_vars = [ 'base_prefix', | ||
969 | 'prefix', | ||
970 | 'exec_prefix', | ||
971 | 'base_bindir', | ||
972 | 'base_sbindir', | ||
973 | 'base_libdir', | ||
974 | 'datadir', | ||
975 | 'sysconfdir', | ||
976 | 'servicedir', | ||
977 | 'sharedstatedir', | ||
978 | 'localstatedir', | ||
979 | 'infodir', | ||
980 | 'mandir', | ||
981 | 'docdir', | ||
982 | 'bindir', | ||
983 | 'sbindir', | ||
984 | 'libexecdir', | ||
985 | 'libdir', | ||
986 | 'includedir', | ||
987 | 'oldincludedir' ] | ||
988 | |||
989 | for path in target_path_vars: | ||
990 | dir = d.getVar(path) or "" | ||
991 | if dir == "": | ||
992 | continue | ||
993 | fs_perms_table[dir] = fs_perms_entry(d.expand("%s 0755 root root false - - -" % (dir))) | ||
994 | |||
995 | # Now we actually load from the configuration files | ||
996 | for conf in get_fs_perms_list(d).split(): | ||
997 | if not os.path.exists(conf): | ||
998 | continue | ||
999 | with open(conf) as f: | ||
1000 | for line in f: | ||
1001 | if line.startswith('#'): | ||
1002 | continue | ||
1003 | lsplit = line.split() | ||
1004 | if len(lsplit) == 0: | ||
1005 | continue | ||
1006 | if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"): | ||
1007 | msg = "Fixup perms: %s invalid line: %s" % (conf, line) | ||
1008 | oe.qa.handle_error("perm-line", msg, d) | ||
1009 | continue | ||
1010 | entry = fs_perms_entry(d.expand(line)) | ||
1011 | if entry and entry.path: | ||
1012 | if entry.link: | ||
1013 | fs_link_table[entry.path] = entry | ||
1014 | if entry.path in fs_perms_table: | ||
1015 | fs_perms_table.pop(entry.path) | ||
1016 | else: | ||
1017 | fs_perms_table[entry.path] = entry | ||
1018 | if entry.path in fs_link_table: | ||
1019 | fs_link_table.pop(entry.path) | ||
1020 | |||
1021 | # Debug -- list out in-memory table | ||
1022 | #for dir in fs_perms_table: | ||
1023 | # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir]))) | ||
1024 | #for link in fs_link_table: | ||
1025 | # bb.note("Fixup Perms: %s: %s" % (link, str(fs_link_table[link]))) | ||
1026 | |||
1027 | # We process links first, so we can go back and fixup directory ownership | ||
1028 | # for any newly created directories | ||
1029 | # Process in sorted order so /run gets created before /run/lock, etc. | ||
1030 | for entry in sorted(fs_link_table.values(), key=lambda x: x.link): | ||
1031 | link = entry.link | ||
1032 | dir = entry.path | ||
1033 | origin = dvar + dir | ||
1034 | if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)): | ||
1035 | continue | ||
1036 | |||
1037 | if link[0] == "/": | ||
1038 | target = dvar + link | ||
1039 | ptarget = link | ||
1040 | else: | ||
1041 | target = os.path.join(os.path.dirname(origin), link) | ||
1042 | ptarget = os.path.join(os.path.dirname(dir), link) | ||
1043 | if os.path.exists(target): | ||
1044 | msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget) | ||
1045 | oe.qa.handle_error("perm-link", msg, d) | ||
1046 | continue | ||
1047 | |||
1048 | # Create path to move directory to, move it, and then setup the symlink | ||
1049 | bb.utils.mkdirhier(os.path.dirname(target)) | ||
1050 | #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget)) | ||
1051 | bb.utils.rename(origin, target) | ||
1052 | #bb.note("Fixup Perms: Link %s -> %s" % (dir, link)) | ||
1053 | os.symlink(link, origin) | ||
1054 | |||
1055 | for dir in fs_perms_table: | ||
1056 | origin = dvar + dir | ||
1057 | if not (cpath.exists(origin) and cpath.isdir(origin)): | ||
1058 | continue | ||
1059 | |||
1060 | fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir) | ||
1061 | |||
1062 | if fs_perms_table[dir].walk == 'true': | ||
1063 | for root, dirs, files in os.walk(origin): | ||
1064 | for dr in dirs: | ||
1065 | each_dir = os.path.join(root, dr) | ||
1066 | fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir) | ||
1067 | for f in files: | ||
1068 | each_file = os.path.join(root, f) | ||
1069 | fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir) | ||
1070 | } | ||
1071 | |||
1072 | python split_and_strip_files () { | ||
1073 | import stat, errno | ||
1074 | import subprocess | ||
1075 | |||
1076 | dvar = d.getVar('PKGD') | ||
1077 | pn = d.getVar('PN') | ||
1078 | hostos = d.getVar('HOST_OS') | ||
1079 | |||
1080 | oldcwd = os.getcwd() | ||
1081 | os.chdir(dvar) | ||
1082 | |||
1083 | # We default to '.debug' style | ||
1084 | if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory': | ||
1085 | # Single debug-file-directory style debug info | ||
1086 | debugappend = ".debug" | ||
1087 | debugstaticappend = "" | ||
1088 | debugdir = "" | ||
1089 | debugstaticdir = "" | ||
1090 | debuglibdir = "/usr/lib/debug" | ||
1091 | debugstaticlibdir = "/usr/lib/debug-static" | ||
1092 | debugsrcdir = "/usr/src/debug" | ||
1093 | elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src': | ||
1094 | # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug | ||
1095 | debugappend = "" | ||
1096 | debugstaticappend = "" | ||
1097 | debugdir = "/.debug" | ||
1098 | debugstaticdir = "/.debug-static" | ||
1099 | debuglibdir = "" | ||
1100 | debugstaticlibdir = "" | ||
1101 | debugsrcdir = "" | ||
1102 | elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg': | ||
1103 | debugappend = "" | ||
1104 | debugstaticappend = "" | ||
1105 | debugdir = "/.debug" | ||
1106 | debugstaticdir = "/.debug-static" | ||
1107 | debuglibdir = "" | ||
1108 | debugstaticlibdir = "" | ||
1109 | debugsrcdir = "/usr/src/debug" | ||
1110 | else: | ||
1111 | # Original OE-core, a.k.a. ".debug", style debug info | ||
1112 | debugappend = "" | ||
1113 | debugstaticappend = "" | ||
1114 | debugdir = "/.debug" | ||
1115 | debugstaticdir = "/.debug-static" | ||
1116 | debuglibdir = "" | ||
1117 | debugstaticlibdir = "" | ||
1118 | debugsrcdir = "/usr/src/debug" | ||
1119 | |||
1120 | # | ||
1121 | # First lets figure out all of the files we may have to process ... do this only once! | ||
1122 | # | ||
1123 | elffiles = {} | ||
1124 | symlinks = {} | ||
1125 | kernmods = [] | ||
1126 | staticlibs = [] | ||
1127 | inodes = {} | ||
1128 | libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir")) | ||
1129 | baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir")) | ||
1130 | skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split() | ||
1131 | if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \ | ||
1132 | d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'): | ||
1133 | checkelf = {} | ||
1134 | checkelflinks = {} | ||
1135 | for root, dirs, files in cpath.walk(dvar): | ||
1136 | for f in files: | ||
1137 | file = os.path.join(root, f) | ||
1138 | |||
1139 | # Skip debug files | ||
1140 | if debugappend and file.endswith(debugappend): | ||
1141 | continue | ||
1142 | if debugdir and debugdir in os.path.dirname(file[len(dvar):]): | ||
1143 | continue | ||
1144 | |||
1145 | if file in skipfiles: | ||
1146 | continue | ||
1147 | |||
1148 | if file.endswith(".ko") and file.find("/lib/modules/") != -1: | ||
1149 | kernmods.append(file) | ||
1150 | continue | ||
1151 | if oe.package.is_static_lib(file): | ||
1152 | staticlibs.append(file) | ||
1153 | continue | ||
1154 | |||
1155 | try: | ||
1156 | ltarget = cpath.realpath(file, dvar, False) | ||
1157 | s = cpath.lstat(ltarget) | ||
1158 | except OSError as e: | ||
1159 | (err, strerror) = e.args | ||
1160 | if err != errno.ENOENT: | ||
1161 | raise | ||
1162 | # Skip broken symlinks | ||
1163 | continue | ||
1164 | if not s: | ||
1165 | continue | ||
1166 | # Check its an executable | ||
1167 | if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) or (s[stat.ST_MODE] & stat.S_IXOTH) \ | ||
1168 | or ((file.startswith(libdir) or file.startswith(baselibdir)) and (".so" in f or ".node" in f)): | ||
1169 | |||
1170 | if cpath.islink(file): | ||
1171 | checkelflinks[file] = ltarget | ||
1172 | continue | ||
1173 | # Use a reference of device ID and inode number to identify files | ||
1174 | file_reference = "%d_%d" % (s.st_dev, s.st_ino) | ||
1175 | checkelf[file] = (file, file_reference) | ||
1176 | |||
1177 | results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d) | ||
1178 | results_map = {} | ||
1179 | for (ltarget, elf_file) in results: | ||
1180 | results_map[ltarget] = elf_file | ||
1181 | for file in checkelflinks: | ||
1182 | ltarget = checkelflinks[file] | ||
1183 | # If it's a symlink, and points to an ELF file, we capture the readlink target | ||
1184 | if results_map[ltarget]: | ||
1185 | target = os.readlink(file) | ||
1186 | #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget])) | ||
1187 | symlinks[file] = target | ||
1188 | |||
1189 | results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d) | ||
1190 | |||
1191 | # Sort results by file path. This ensures that the files are always | ||
1192 | # processed in the same order, which is important to make sure builds | ||
1193 | # are reproducible when dealing with hardlinks | ||
1194 | results.sort(key=lambda x: x[0]) | ||
1195 | |||
1196 | for (file, elf_file) in results: | ||
1197 | # It's a file (or hardlink), not a link | ||
1198 | # ...but is it ELF, and is it already stripped? | ||
1199 | if elf_file & 1: | ||
1200 | if elf_file & 2: | ||
1201 | if 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split(): | ||
1202 | bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn)) | ||
1203 | else: | ||
1204 | msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn) | ||
1205 | oe.qa.handle_error("already-stripped", msg, d) | ||
1206 | continue | ||
1207 | |||
1208 | # At this point we have an unstripped elf file. We need to: | ||
1209 | # a) Make sure any file we strip is not hardlinked to anything else outside this tree | ||
1210 | # b) Only strip any hardlinked file once (no races) | ||
1211 | # c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks | ||
1212 | |||
1213 | # Use a reference of device ID and inode number to identify files | ||
1214 | file_reference = checkelf[file][1] | ||
1215 | if file_reference in inodes: | ||
1216 | os.unlink(file) | ||
1217 | os.link(inodes[file_reference][0], file) | ||
1218 | inodes[file_reference].append(file) | ||
1219 | else: | ||
1220 | inodes[file_reference] = [file] | ||
1221 | # break hardlink | ||
1222 | bb.utils.break_hardlinks(file) | ||
1223 | elffiles[file] = elf_file | ||
1224 | # Modified the file so clear the cache | ||
1225 | cpath.updatecache(file) | ||
1226 | |||
1227 | def strip_pkgd_prefix(f): | ||
1228 | nonlocal dvar | ||
1229 | |||
1230 | if f.startswith(dvar): | ||
1231 | return f[len(dvar):] | ||
1232 | |||
1233 | return f | ||
1234 | |||
1235 | # | ||
1236 | # First lets process debug splitting | ||
1237 | # | ||
1238 | if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'): | ||
1239 | results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, debugdir, debuglibdir, debugappend, debugsrcdir, d)) | ||
1240 | |||
1241 | if debugsrcdir and not hostos.startswith("mingw"): | ||
1242 | if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'): | ||
1243 | results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, debugstaticdir, debugstaticlibdir, debugstaticappend, debugsrcdir, d)) | ||
1244 | else: | ||
1245 | for file in staticlibs: | ||
1246 | results.append( (file,source_info(file, d)) ) | ||
1247 | |||
1248 | d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results}) | ||
1249 | |||
1250 | sources = set() | ||
1251 | for r in results: | ||
1252 | sources.update(r[1]) | ||
1253 | |||
1254 | # Hardlink our debug symbols to the other hardlink copies | ||
1255 | for ref in inodes: | ||
1256 | if len(inodes[ref]) == 1: | ||
1257 | continue | ||
1258 | |||
1259 | target = inodes[ref][0][len(dvar):] | ||
1260 | for file in inodes[ref][1:]: | ||
1261 | src = file[len(dvar):] | ||
1262 | dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(target) + debugappend | ||
1263 | fpath = dvar + dest | ||
1264 | ftarget = dvar + debuglibdir + os.path.dirname(target) + debugdir + "/" + os.path.basename(target) + debugappend | ||
1265 | bb.utils.mkdirhier(os.path.dirname(fpath)) | ||
1266 | # Only one hardlink of separated debug info file in each directory | ||
1267 | if not os.access(fpath, os.R_OK): | ||
1268 | #bb.note("Link %s -> %s" % (fpath, ftarget)) | ||
1269 | os.link(ftarget, fpath) | ||
1270 | |||
1271 | # Create symlinks for all cases we were able to split symbols | ||
1272 | for file in symlinks: | ||
1273 | src = file[len(dvar):] | ||
1274 | dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend | ||
1275 | fpath = dvar + dest | ||
1276 | # Skip it if the target doesn't exist | ||
1277 | try: | ||
1278 | s = os.stat(fpath) | ||
1279 | except OSError as e: | ||
1280 | (err, strerror) = e.args | ||
1281 | if err != errno.ENOENT: | ||
1282 | raise | ||
1283 | continue | ||
1284 | |||
1285 | ltarget = symlinks[file] | ||
1286 | lpath = os.path.dirname(ltarget) | ||
1287 | lbase = os.path.basename(ltarget) | ||
1288 | ftarget = "" | ||
1289 | if lpath and lpath != ".": | ||
1290 | ftarget += lpath + debugdir + "/" | ||
1291 | ftarget += lbase + debugappend | ||
1292 | if lpath.startswith(".."): | ||
1293 | ftarget = os.path.join("..", ftarget) | ||
1294 | bb.utils.mkdirhier(os.path.dirname(fpath)) | ||
1295 | #bb.note("Symlink %s -> %s" % (fpath, ftarget)) | ||
1296 | os.symlink(ftarget, fpath) | ||
1297 | |||
1298 | # Process the debugsrcdir if requested... | ||
1299 | # This copies and places the referenced sources for later debugging... | ||
1300 | copydebugsources(debugsrcdir, sources, d) | ||
1301 | # | ||
1302 | # End of debug splitting | ||
1303 | # | ||
1304 | |||
1305 | # | ||
1306 | # Now lets go back over things and strip them | ||
1307 | # | ||
1308 | if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'): | ||
1309 | strip = d.getVar("STRIP") | ||
1310 | sfiles = [] | ||
1311 | for file in elffiles: | ||
1312 | elf_file = int(elffiles[file]) | ||
1313 | #bb.note("Strip %s" % file) | ||
1314 | sfiles.append((file, elf_file, strip)) | ||
1315 | for f in kernmods: | ||
1316 | sfiles.append((f, 16, strip)) | ||
1317 | if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'): | ||
1318 | for f in staticlibs: | ||
1319 | sfiles.append((f, 16, strip)) | ||
1320 | |||
1321 | oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d) | ||
1322 | |||
1323 | # Build "minidebuginfo" and reinject it back into the stripped binaries | ||
1324 | if d.getVar('PACKAGE_MINIDEBUGINFO') == '1': | ||
1325 | oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d, | ||
1326 | extraargs=(dvar, debugdir, debuglibdir, debugappend, debugsrcdir, d)) | ||
1327 | |||
1328 | # | ||
1329 | # End of strip | ||
1330 | # | ||
1331 | os.chdir(oldcwd) | ||
1332 | } | ||
1333 | |||
1334 | python populate_packages () { | ||
1335 | import glob, re | ||
1336 | |||
1337 | workdir = d.getVar('WORKDIR') | ||
1338 | outdir = d.getVar('DEPLOY_DIR') | ||
1339 | dvar = d.getVar('PKGD') | ||
1340 | packages = d.getVar('PACKAGES').split() | ||
1341 | pn = d.getVar('PN') | ||
1342 | |||
1343 | bb.utils.mkdirhier(outdir) | ||
1344 | os.chdir(dvar) | ||
1345 | |||
1346 | autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False) | ||
1347 | |||
1348 | split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg') | ||
1349 | |||
1350 | # If debug-with-srcpkg mode is enabled then add the source package if it | ||
1351 | # doesn't exist and add the source file contents to the source package. | ||
1352 | if split_source_package: | ||
1353 | src_package_name = ('%s-src' % d.getVar('PN')) | ||
1354 | if not src_package_name in packages: | ||
1355 | packages.append(src_package_name) | ||
1356 | d.setVar('FILES:%s' % src_package_name, '/usr/src/debug') | ||
1357 | |||
1358 | # Sanity check PACKAGES for duplicates | ||
1359 | # Sanity should be moved to sanity.bbclass once we have the infrastructure | ||
1360 | package_dict = {} | ||
1361 | |||
1362 | for i, pkg in enumerate(packages): | ||
1363 | if pkg in package_dict: | ||
1364 | msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg | ||
1365 | oe.qa.handle_error("packages-list", msg, d) | ||
1366 | # Ensure the source package gets the chance to pick up the source files | ||
1367 | # before the debug package by ordering it first in PACKAGES. Whether it | ||
1368 | # actually picks up any source files is controlled by | ||
1369 | # PACKAGE_DEBUG_SPLIT_STYLE. | ||
1370 | elif pkg.endswith("-src"): | ||
1371 | package_dict[pkg] = (10, i) | ||
1372 | elif autodebug and pkg.endswith("-dbg"): | ||
1373 | package_dict[pkg] = (30, i) | ||
1374 | else: | ||
1375 | package_dict[pkg] = (50, i) | ||
1376 | packages = sorted(package_dict.keys(), key=package_dict.get) | ||
1377 | d.setVar('PACKAGES', ' '.join(packages)) | ||
1378 | pkgdest = d.getVar('PKGDEST') | ||
1379 | |||
1380 | seen = [] | ||
1381 | |||
1382 | # os.mkdir masks the permissions with umask so we have to unset it first | ||
1383 | oldumask = os.umask(0) | ||
1384 | |||
1385 | debug = [] | ||
1386 | for root, dirs, files in cpath.walk(dvar): | ||
1387 | dir = root[len(dvar):] | ||
1388 | if not dir: | ||
1389 | dir = os.sep | ||
1390 | for f in (files + dirs): | ||
1391 | path = "." + os.path.join(dir, f) | ||
1392 | if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"): | ||
1393 | debug.append(path) | ||
1394 | |||
1395 | for pkg in packages: | ||
1396 | root = os.path.join(pkgdest, pkg) | ||
1397 | bb.utils.mkdirhier(root) | ||
1398 | |||
1399 | filesvar = d.getVar('FILES:%s' % pkg) or "" | ||
1400 | if "//" in filesvar: | ||
1401 | msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg | ||
1402 | oe.qa.handle_error("files-invalid", msg, d) | ||
1403 | filesvar.replace("//", "/") | ||
1404 | |||
1405 | origfiles = filesvar.split() | ||
1406 | files, symlink_paths = files_from_filevars(origfiles) | ||
1407 | |||
1408 | if autodebug and pkg.endswith("-dbg"): | ||
1409 | files.extend(debug) | ||
1410 | |||
1411 | for file in files: | ||
1412 | if (not cpath.islink(file)) and (not cpath.exists(file)): | ||
1413 | continue | ||
1414 | if file in seen: | ||
1415 | continue | ||
1416 | seen.append(file) | ||
1417 | |||
1418 | def mkdir(src, dest, p): | ||
1419 | src = os.path.join(src, p) | ||
1420 | dest = os.path.join(dest, p) | ||
1421 | fstat = cpath.stat(src) | ||
1422 | os.mkdir(dest) | ||
1423 | os.chmod(dest, fstat.st_mode) | ||
1424 | os.chown(dest, fstat.st_uid, fstat.st_gid) | ||
1425 | if p not in seen: | ||
1426 | seen.append(p) | ||
1427 | cpath.updatecache(dest) | ||
1428 | |||
1429 | def mkdir_recurse(src, dest, paths): | ||
1430 | if cpath.exists(dest + '/' + paths): | ||
1431 | return | ||
1432 | while paths.startswith("./"): | ||
1433 | paths = paths[2:] | ||
1434 | p = "." | ||
1435 | for c in paths.split("/"): | ||
1436 | p = os.path.join(p, c) | ||
1437 | if not cpath.exists(os.path.join(dest, p)): | ||
1438 | mkdir(src, dest, p) | ||
1439 | |||
1440 | if cpath.isdir(file) and not cpath.islink(file): | ||
1441 | mkdir_recurse(dvar, root, file) | ||
1442 | continue | ||
1443 | |||
1444 | mkdir_recurse(dvar, root, os.path.dirname(file)) | ||
1445 | fpath = os.path.join(root,file) | ||
1446 | if not cpath.islink(file): | ||
1447 | os.link(file, fpath) | ||
1448 | continue | ||
1449 | ret = bb.utils.copyfile(file, fpath) | ||
1450 | if ret is False or ret == 0: | ||
1451 | bb.fatal("File population failed") | ||
1452 | |||
1453 | # Check if symlink paths exist | ||
1454 | for file in symlink_paths: | ||
1455 | if not os.path.exists(os.path.join(root,file)): | ||
1456 | bb.fatal("File '%s' cannot be packaged into '%s' because its " | ||
1457 | "parent directory structure does not exist. One of " | ||
1458 | "its parent directories is a symlink whose target " | ||
1459 | "directory is not included in the package." % | ||
1460 | (file, pkg)) | ||
1461 | |||
1462 | os.umask(oldumask) | ||
1463 | os.chdir(workdir) | ||
1464 | |||
1465 | # Handle LICENSE_EXCLUSION | ||
1466 | package_list = [] | ||
1467 | for pkg in packages: | ||
1468 | licenses = d.getVar('LICENSE_EXCLUSION-' + pkg) | ||
1469 | if licenses: | ||
1470 | msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses) | ||
1471 | oe.qa.handle_error("incompatible-license", msg, d) | ||
1472 | else: | ||
1473 | package_list.append(pkg) | ||
1474 | d.setVar('PACKAGES', ' '.join(package_list)) | ||
1475 | |||
1476 | unshipped = [] | ||
1477 | for root, dirs, files in cpath.walk(dvar): | ||
1478 | dir = root[len(dvar):] | ||
1479 | if not dir: | ||
1480 | dir = os.sep | ||
1481 | for f in (files + dirs): | ||
1482 | path = os.path.join(dir, f) | ||
1483 | if ('.' + path) not in seen: | ||
1484 | unshipped.append(path) | ||
1485 | |||
1486 | if unshipped != []: | ||
1487 | msg = pn + ": Files/directories were installed but not shipped in any package:" | ||
1488 | if "installed-vs-shipped" in (d.getVar('INSANE_SKIP:' + pn) or "").split(): | ||
1489 | bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn) | ||
1490 | else: | ||
1491 | for f in unshipped: | ||
1492 | msg = msg + "\n " + f | ||
1493 | msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n" | ||
1494 | msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped)) | ||
1495 | oe.qa.handle_error("installed-vs-shipped", msg, d) | ||
1496 | } | ||
1497 | populate_packages[dirs] = "${D}" | ||
1498 | |||
1499 | python package_fixsymlinks () { | ||
1500 | import errno | ||
1501 | pkgdest = d.getVar('PKGDEST') | ||
1502 | packages = d.getVar("PACKAGES", False).split() | ||
1503 | |||
1504 | dangling_links = {} | ||
1505 | pkg_files = {} | ||
1506 | for pkg in packages: | ||
1507 | dangling_links[pkg] = [] | ||
1508 | pkg_files[pkg] = [] | ||
1509 | inst_root = os.path.join(pkgdest, pkg) | ||
1510 | for path in pkgfiles[pkg]: | ||
1511 | rpath = path[len(inst_root):] | ||
1512 | pkg_files[pkg].append(rpath) | ||
1513 | rtarget = cpath.realpath(path, inst_root, True, assume_dir = True) | ||
1514 | if not cpath.lexists(rtarget): | ||
1515 | dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):])) | ||
1516 | |||
1517 | newrdepends = {} | ||
1518 | for pkg in dangling_links: | ||
1519 | for l in dangling_links[pkg]: | ||
1520 | found = False | ||
1521 | bb.debug(1, "%s contains dangling link %s" % (pkg, l)) | ||
1522 | for p in packages: | ||
1523 | if l in pkg_files[p]: | ||
1524 | found = True | ||
1525 | bb.debug(1, "target found in %s" % p) | ||
1526 | if p == pkg: | ||
1527 | break | ||
1528 | if pkg not in newrdepends: | ||
1529 | newrdepends[pkg] = [] | ||
1530 | newrdepends[pkg].append(p) | ||
1531 | break | ||
1532 | if found == False: | ||
1533 | bb.note("%s contains dangling symlink to %s" % (pkg, l)) | ||
1534 | |||
1535 | for pkg in newrdepends: | ||
1536 | rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "") | ||
1537 | for p in newrdepends[pkg]: | ||
1538 | if p not in rdepends: | ||
1539 | rdepends[p] = [] | ||
1540 | d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False)) | ||
1541 | } | ||
1542 | |||
1543 | |||
1544 | python package_package_name_hook() { | ||
1545 | """ | ||
1546 | A package_name_hook function can be used to rewrite the package names by | ||
1547 | changing PKG. For an example, see debian.bbclass. | ||
1548 | """ | ||
1549 | pass | ||
1550 | } | ||
1551 | |||
1552 | EXPORT_FUNCTIONS package_name_hook | ||
1553 | |||
1554 | |||
1555 | PKGDESTWORK = "${WORKDIR}/pkgdata" | ||
1556 | |||
1557 | PKGDATA_VARS = "PN PE PV PR PKGE PKGV PKGR LICENSE DESCRIPTION SUMMARY RDEPENDS RPROVIDES RRECOMMENDS RSUGGESTS RREPLACES RCONFLICTS SECTION PKG ALLOW_EMPTY FILES CONFFILES FILES_INFO PACKAGE_ADD_METADATA pkg_postinst pkg_postrm pkg_preinst pkg_prerm" | ||
1558 | |||
1559 | python emit_pkgdata() { | ||
1560 | from glob import glob | ||
1561 | import json | ||
1562 | import bb.compress.zstd | ||
1563 | |||
1564 | def process_postinst_on_target(pkg, mlprefix): | ||
1565 | pkgval = d.getVar('PKG:%s' % pkg) | ||
1566 | if pkgval is None: | ||
1567 | pkgval = pkg | ||
1568 | |||
1569 | defer_fragment = """ | ||
1570 | if [ -n "$D" ]; then | ||
1571 | $INTERCEPT_DIR/postinst_intercept delay_to_first_boot %s mlprefix=%s | ||
1572 | exit 0 | ||
1573 | fi | ||
1574 | """ % (pkgval, mlprefix) | ||
1575 | |||
1576 | postinst = d.getVar('pkg_postinst:%s' % pkg) | ||
1577 | postinst_ontarget = d.getVar('pkg_postinst_ontarget:%s' % pkg) | ||
1578 | |||
1579 | if postinst_ontarget: | ||
1580 | bb.debug(1, 'adding deferred pkg_postinst_ontarget() to pkg_postinst() for %s' % pkg) | ||
1581 | if not postinst: | ||
1582 | postinst = '#!/bin/sh\n' | ||
1583 | postinst += defer_fragment | ||
1584 | postinst += postinst_ontarget | ||
1585 | d.setVar('pkg_postinst:%s' % pkg, postinst) | ||
1586 | |||
1587 | def add_set_e_to_scriptlets(pkg): | ||
1588 | for scriptlet_name in ('pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm'): | ||
1589 | scriptlet = d.getVar('%s:%s' % (scriptlet_name, pkg)) | ||
1590 | if scriptlet: | ||
1591 | scriptlet_split = scriptlet.split('\n') | ||
1592 | if scriptlet_split[0].startswith("#!"): | ||
1593 | scriptlet = scriptlet_split[0] + "\nset -e\n" + "\n".join(scriptlet_split[1:]) | ||
1594 | else: | ||
1595 | scriptlet = "set -e\n" + "\n".join(scriptlet_split[0:]) | ||
1596 | d.setVar('%s:%s' % (scriptlet_name, pkg), scriptlet) | ||
1597 | |||
1598 | def write_if_exists(f, pkg, var): | ||
1599 | def encode(str): | ||
1600 | import codecs | ||
1601 | c = codecs.getencoder("unicode_escape") | ||
1602 | return c(str)[0].decode("latin1") | ||
1603 | |||
1604 | val = d.getVar('%s:%s' % (var, pkg)) | ||
1605 | if val: | ||
1606 | f.write('%s:%s: %s\n' % (var, pkg, encode(val))) | ||
1607 | return val | ||
1608 | val = d.getVar('%s' % (var)) | ||
1609 | if val: | ||
1610 | f.write('%s: %s\n' % (var, encode(val))) | ||
1611 | return val | ||
1612 | |||
1613 | def write_extra_pkgs(variants, pn, packages, pkgdatadir): | ||
1614 | for variant in variants: | ||
1615 | with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd: | ||
1616 | fd.write("PACKAGES: %s\n" % ' '.join( | ||
1617 | map(lambda pkg: '%s-%s' % (variant, pkg), packages.split()))) | ||
1618 | |||
1619 | def write_extra_runtime_pkgs(variants, packages, pkgdatadir): | ||
1620 | for variant in variants: | ||
1621 | for pkg in packages.split(): | ||
1622 | ml_pkg = "%s-%s" % (variant, pkg) | ||
1623 | subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg) | ||
1624 | with open(subdata_file, 'w') as fd: | ||
1625 | fd.write("PKG:%s: %s" % (ml_pkg, pkg)) | ||
1626 | |||
1627 | packages = d.getVar('PACKAGES') | ||
1628 | pkgdest = d.getVar('PKGDEST') | ||
1629 | pkgdatadir = d.getVar('PKGDESTWORK') | ||
1630 | |||
1631 | data_file = pkgdatadir + d.expand("/${PN}") | ||
1632 | with open(data_file, 'w') as fd: | ||
1633 | fd.write("PACKAGES: %s\n" % packages) | ||
1634 | |||
1635 | pkgdebugsource = d.getVar("PKGDEBUGSOURCES") or [] | ||
1636 | |||
1637 | pn = d.getVar('PN') | ||
1638 | global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split() | ||
1639 | variants = (d.getVar('MULTILIB_VARIANTS') or "").split() | ||
1640 | |||
1641 | if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d): | ||
1642 | write_extra_pkgs(variants, pn, packages, pkgdatadir) | ||
1643 | |||
1644 | if bb.data.inherits_class('allarch', d) and not variants \ | ||
1645 | and not bb.data.inherits_class('packagegroup', d): | ||
1646 | write_extra_pkgs(global_variants, pn, packages, pkgdatadir) | ||
1647 | |||
1648 | workdir = d.getVar('WORKDIR') | ||
1649 | |||
1650 | for pkg in packages.split(): | ||
1651 | pkgval = d.getVar('PKG:%s' % pkg) | ||
1652 | if pkgval is None: | ||
1653 | pkgval = pkg | ||
1654 | d.setVar('PKG:%s' % pkg, pkg) | ||
1655 | |||
1656 | extended_data = { | ||
1657 | "files_info": {} | ||
1658 | } | ||
1659 | |||
1660 | pkgdestpkg = os.path.join(pkgdest, pkg) | ||
1661 | files = {} | ||
1662 | files_extra = {} | ||
1663 | total_size = 0 | ||
1664 | seen = set() | ||
1665 | for f in pkgfiles[pkg]: | ||
1666 | fpath = os.sep + os.path.relpath(f, pkgdestpkg) | ||
1667 | |||
1668 | fstat = os.lstat(f) | ||
1669 | files[fpath] = fstat.st_size | ||
1670 | |||
1671 | extended_data["files_info"].setdefault(fpath, {}) | ||
1672 | extended_data["files_info"][fpath]['size'] = fstat.st_size | ||
1673 | |||
1674 | if fstat.st_ino not in seen: | ||
1675 | seen.add(fstat.st_ino) | ||
1676 | total_size += fstat.st_size | ||
1677 | |||
1678 | if fpath in pkgdebugsource: | ||
1679 | extended_data["files_info"][fpath]['debugsrc'] = pkgdebugsource[fpath] | ||
1680 | del pkgdebugsource[fpath] | ||
1681 | |||
1682 | d.setVar('FILES_INFO:' + pkg , json.dumps(files, sort_keys=True)) | ||
1683 | |||
1684 | process_postinst_on_target(pkg, d.getVar("MLPREFIX")) | ||
1685 | add_set_e_to_scriptlets(pkg) | ||
1686 | |||
1687 | subdata_file = pkgdatadir + "/runtime/%s" % pkg | ||
1688 | with open(subdata_file, 'w') as sf: | ||
1689 | for var in (d.getVar('PKGDATA_VARS') or "").split(): | ||
1690 | val = write_if_exists(sf, pkg, var) | ||
1691 | |||
1692 | write_if_exists(sf, pkg, 'FILERPROVIDESFLIST') | ||
1693 | for dfile in sorted((d.getVar('FILERPROVIDESFLIST:' + pkg) or "").split()): | ||
1694 | write_if_exists(sf, pkg, 'FILERPROVIDES:' + dfile) | ||
1695 | |||
1696 | write_if_exists(sf, pkg, 'FILERDEPENDSFLIST') | ||
1697 | for dfile in sorted((d.getVar('FILERDEPENDSFLIST:' + pkg) or "").split()): | ||
1698 | write_if_exists(sf, pkg, 'FILERDEPENDS:' + dfile) | ||
1699 | |||
1700 | sf.write('%s:%s: %d\n' % ('PKGSIZE', pkg, total_size)) | ||
1701 | |||
1702 | subdata_extended_file = pkgdatadir + "/extended/%s.json.zstd" % pkg | ||
1703 | num_threads = int(d.getVar("BB_NUMBER_THREADS")) | ||
1704 | with bb.compress.zstd.open(subdata_extended_file, "wt", encoding="utf-8", num_threads=num_threads) as f: | ||
1705 | json.dump(extended_data, f, sort_keys=True, separators=(",", ":")) | ||
1706 | |||
1707 | # Symlinks needed for rprovides lookup | ||
1708 | rprov = d.getVar('RPROVIDES:%s' % pkg) or d.getVar('RPROVIDES') | ||
1709 | if rprov: | ||
1710 | for p in bb.utils.explode_deps(rprov): | ||
1711 | subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg) | ||
1712 | bb.utils.mkdirhier(os.path.dirname(subdata_sym)) | ||
1713 | oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True) | ||
1714 | |||
1715 | allow_empty = d.getVar('ALLOW_EMPTY:%s' % pkg) | ||
1716 | if not allow_empty: | ||
1717 | allow_empty = d.getVar('ALLOW_EMPTY') | ||
1718 | root = "%s/%s" % (pkgdest, pkg) | ||
1719 | os.chdir(root) | ||
1720 | g = glob('*') | ||
1721 | if g or allow_empty == "1": | ||
1722 | # Symlinks needed for reverse lookups (from the final package name) | ||
1723 | subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval | ||
1724 | oe.path.symlink("../runtime/%s" % pkg, subdata_sym, True) | ||
1725 | |||
1726 | packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg | ||
1727 | open(packagedfile, 'w').close() | ||
1728 | |||
1729 | if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d): | ||
1730 | write_extra_runtime_pkgs(variants, packages, pkgdatadir) | ||
1731 | |||
1732 | if bb.data.inherits_class('allarch', d) and not variants \ | ||
1733 | and not bb.data.inherits_class('packagegroup', d): | ||
1734 | write_extra_runtime_pkgs(global_variants, packages, pkgdatadir) | ||
1735 | |||
1736 | } | ||
1737 | emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime ${PKGDESTWORK}/runtime-reverse ${PKGDESTWORK}/runtime-rprovides ${PKGDESTWORK}/extended" | ||
1738 | emit_pkgdata[vardepsexclude] = "BB_NUMBER_THREADS" | ||
1739 | |||
1740 | ldconfig_postinst_fragment() { | ||
1741 | if [ x"$D" = "x" ]; then | ||
1742 | if [ -x /sbin/ldconfig ]; then /sbin/ldconfig ; fi | ||
1743 | fi | ||
1744 | } | ||
1745 | |||
1746 | RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/rpmdeps --alldeps --define '__font_provides %{nil}'" | ||
1747 | |||
1748 | # Collect perfile run-time dependency metadata | ||
1749 | # Output: | ||
1750 | # FILERPROVIDESFLIST:pkg - list of all files w/ deps | ||
1751 | # FILERPROVIDES:filepath:pkg - per file dep | ||
1752 | # | ||
1753 | # FILERDEPENDSFLIST:pkg - list of all files w/ deps | ||
1754 | # FILERDEPENDS:filepath:pkg - per file dep | ||
1755 | |||
1756 | python package_do_filedeps() { | ||
1757 | if d.getVar('SKIP_FILEDEPS') == '1': | ||
1758 | return | ||
1759 | |||
1760 | pkgdest = d.getVar('PKGDEST') | ||
1761 | packages = d.getVar('PACKAGES') | ||
1762 | rpmdeps = d.getVar('RPMDEPS') | ||
1763 | |||
1764 | def chunks(files, n): | ||
1765 | return [files[i:i+n] for i in range(0, len(files), n)] | ||
1766 | |||
1767 | pkglist = [] | ||
1768 | for pkg in packages.split(): | ||
1769 | if d.getVar('SKIP_FILEDEPS:' + pkg) == '1': | ||
1770 | continue | ||
1771 | if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'): | ||
1772 | continue | ||
1773 | for files in chunks(pkgfiles[pkg], 100): | ||
1774 | pkglist.append((pkg, files, rpmdeps, pkgdest)) | ||
1775 | |||
1776 | processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d) | ||
1777 | |||
1778 | provides_files = {} | ||
1779 | requires_files = {} | ||
1780 | |||
1781 | for result in processed: | ||
1782 | (pkg, provides, requires) = result | ||
1783 | |||
1784 | if pkg not in provides_files: | ||
1785 | provides_files[pkg] = [] | ||
1786 | if pkg not in requires_files: | ||
1787 | requires_files[pkg] = [] | ||
1788 | |||
1789 | for file in sorted(provides): | ||
1790 | provides_files[pkg].append(file) | ||
1791 | key = "FILERPROVIDES:" + file + ":" + pkg | ||
1792 | d.appendVar(key, " " + " ".join(provides[file])) | ||
1793 | |||
1794 | for file in sorted(requires): | ||
1795 | requires_files[pkg].append(file) | ||
1796 | key = "FILERDEPENDS:" + file + ":" + pkg | ||
1797 | d.appendVar(key, " " + " ".join(requires[file])) | ||
1798 | |||
1799 | for pkg in requires_files: | ||
1800 | d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg]))) | ||
1801 | for pkg in provides_files: | ||
1802 | d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg]))) | ||
1803 | } | ||
1804 | |||
1805 | SHLIBSDIRS = "${WORKDIR_PKGDATA}/${MLPREFIX}shlibs2" | ||
1806 | SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs2" | ||
1807 | |||
1808 | python package_do_shlibs() { | ||
1809 | import itertools | ||
1810 | import re, pipes | ||
1811 | import subprocess | ||
1812 | |||
1813 | exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False) | ||
1814 | if exclude_shlibs: | ||
1815 | bb.note("not generating shlibs") | ||
1816 | return | ||
1817 | |||
1818 | lib_re = re.compile(r"^.*\.so") | ||
1819 | libdir_re = re.compile(r".*/%s$" % d.getVar('baselib')) | ||
1820 | |||
1821 | packages = d.getVar('PACKAGES') | ||
1822 | |||
1823 | shlib_pkgs = [] | ||
1824 | exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS") | ||
1825 | if exclusion_list: | ||
1826 | for pkg in packages.split(): | ||
1827 | if pkg not in exclusion_list.split(): | ||
1828 | shlib_pkgs.append(pkg) | ||
1829 | else: | ||
1830 | bb.note("not generating shlibs for %s" % pkg) | ||
1831 | else: | ||
1832 | shlib_pkgs = packages.split() | ||
1833 | |||
1834 | hostos = d.getVar('HOST_OS') | ||
1835 | |||
1836 | workdir = d.getVar('WORKDIR') | ||
1837 | |||
1838 | ver = d.getVar('PKGV') | ||
1839 | if not ver: | ||
1840 | msg = "PKGV not defined" | ||
1841 | oe.qa.handle_error("pkgv-undefined", msg, d) | ||
1842 | return | ||
1843 | |||
1844 | pkgdest = d.getVar('PKGDEST') | ||
1845 | |||
1846 | shlibswork_dir = d.getVar('SHLIBSWORKDIR') | ||
1847 | |||
1848 | def linux_so(file, pkg, pkgver, d): | ||
1849 | needs_ldconfig = False | ||
1850 | needed = set() | ||
1851 | sonames = set() | ||
1852 | renames = [] | ||
1853 | ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') | ||
1854 | cmd = d.getVar('OBJDUMP') + " -p " + pipes.quote(file) + " 2>/dev/null" | ||
1855 | fd = os.popen(cmd) | ||
1856 | lines = fd.readlines() | ||
1857 | fd.close() | ||
1858 | rpath = tuple() | ||
1859 | for l in lines: | ||
1860 | m = re.match(r"\s+RPATH\s+([^\s]*)", l) | ||
1861 | if m: | ||
1862 | rpaths = m.group(1).replace("$ORIGIN", ldir).split(":") | ||
1863 | rpath = tuple(map(os.path.normpath, rpaths)) | ||
1864 | for l in lines: | ||
1865 | m = re.match(r"\s+NEEDED\s+([^\s]*)", l) | ||
1866 | if m: | ||
1867 | dep = m.group(1) | ||
1868 | if dep not in needed: | ||
1869 | needed.add((dep, file, rpath)) | ||
1870 | m = re.match(r"\s+SONAME\s+([^\s]*)", l) | ||
1871 | if m: | ||
1872 | this_soname = m.group(1) | ||
1873 | prov = (this_soname, ldir, pkgver) | ||
1874 | if not prov in sonames: | ||
1875 | # if library is private (only used by package) then do not build shlib for it | ||
1876 | import fnmatch | ||
1877 | if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0: | ||
1878 | sonames.add(prov) | ||
1879 | if libdir_re.match(os.path.dirname(file)): | ||
1880 | needs_ldconfig = True | ||
1881 | if snap_symlinks and (os.path.basename(file) != this_soname): | ||
1882 | renames.append((file, os.path.join(os.path.dirname(file), this_soname))) | ||
1883 | return (needs_ldconfig, needed, sonames, renames) | ||
1884 | |||
1885 | def darwin_so(file, needed, sonames, renames, pkgver): | ||
1886 | if not os.path.exists(file): | ||
1887 | return | ||
1888 | ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') | ||
1889 | |||
1890 | def get_combinations(base): | ||
1891 | # | ||
1892 | # Given a base library name, find all combinations of this split by "." and "-" | ||
1893 | # | ||
1894 | combos = [] | ||
1895 | options = base.split(".") | ||
1896 | for i in range(1, len(options) + 1): | ||
1897 | combos.append(".".join(options[0:i])) | ||
1898 | options = base.split("-") | ||
1899 | for i in range(1, len(options) + 1): | ||
1900 | combos.append("-".join(options[0:i])) | ||
1901 | return combos | ||
1902 | |||
1903 | if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'): | ||
1904 | # Drop suffix | ||
1905 | name = os.path.basename(file).rsplit(".",1)[0] | ||
1906 | # Find all combinations | ||
1907 | combos = get_combinations(name) | ||
1908 | for combo in combos: | ||
1909 | if not combo in sonames: | ||
1910 | prov = (combo, ldir, pkgver) | ||
1911 | sonames.add(prov) | ||
1912 | if file.endswith('.dylib') or file.endswith('.so'): | ||
1913 | rpath = [] | ||
1914 | p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) | ||
1915 | out, err = p.communicate() | ||
1916 | # If returned successfully, process stdout for results | ||
1917 | if p.returncode == 0: | ||
1918 | for l in out.split("\n"): | ||
1919 | l = l.strip() | ||
1920 | if l.startswith('path '): | ||
1921 | rpath.append(l.split()[1]) | ||
1922 | |||
1923 | p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) | ||
1924 | out, err = p.communicate() | ||
1925 | # If returned successfully, process stdout for results | ||
1926 | if p.returncode == 0: | ||
1927 | for l in out.split("\n"): | ||
1928 | l = l.strip() | ||
1929 | if not l or l.endswith(":"): | ||
1930 | continue | ||
1931 | if "is not an object file" in l: | ||
1932 | continue | ||
1933 | name = os.path.basename(l.split()[0]).rsplit(".", 1)[0] | ||
1934 | if name and name not in needed[pkg]: | ||
1935 | needed[pkg].add((name, file, tuple())) | ||
1936 | |||
1937 | def mingw_dll(file, needed, sonames, renames, pkgver): | ||
1938 | if not os.path.exists(file): | ||
1939 | return | ||
1940 | |||
1941 | if file.endswith(".dll"): | ||
1942 | # assume all dlls are shared objects provided by the package | ||
1943 | sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver)) | ||
1944 | |||
1945 | if (file.endswith(".dll") or file.endswith(".exe")): | ||
1946 | # use objdump to search for "DLL Name: .*\.dll" | ||
1947 | p = subprocess.Popen([d.expand("${HOST_PREFIX}objdump"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) | ||
1948 | out, err = p.communicate() | ||
1949 | # process the output, grabbing all .dll names | ||
1950 | if p.returncode == 0: | ||
1951 | for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE): | ||
1952 | dllname = m.group(1) | ||
1953 | if dllname: | ||
1954 | needed[pkg].add((dllname, file, tuple())) | ||
1955 | |||
1956 | if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1": | ||
1957 | snap_symlinks = True | ||
1958 | else: | ||
1959 | snap_symlinks = False | ||
1960 | |||
1961 | needed = {} | ||
1962 | |||
1963 | shlib_provider = oe.package.read_shlib_providers(d) | ||
1964 | |||
1965 | for pkg in shlib_pkgs: | ||
1966 | private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or "" | ||
1967 | private_libs = private_libs.split() | ||
1968 | needs_ldconfig = False | ||
1969 | bb.debug(2, "calculating shlib provides for %s" % pkg) | ||
1970 | |||
1971 | pkgver = d.getVar('PKGV:' + pkg) | ||
1972 | if not pkgver: | ||
1973 | pkgver = d.getVar('PV_' + pkg) | ||
1974 | if not pkgver: | ||
1975 | pkgver = ver | ||
1976 | |||
1977 | needed[pkg] = set() | ||
1978 | sonames = set() | ||
1979 | renames = [] | ||
1980 | linuxlist = [] | ||
1981 | for file in pkgfiles[pkg]: | ||
1982 | soname = None | ||
1983 | if cpath.islink(file): | ||
1984 | continue | ||
1985 | if hostos == "darwin" or hostos == "darwin8": | ||
1986 | darwin_so(file, needed, sonames, renames, pkgver) | ||
1987 | elif hostos.startswith("mingw"): | ||
1988 | mingw_dll(file, needed, sonames, renames, pkgver) | ||
1989 | elif os.access(file, os.X_OK) or lib_re.match(file): | ||
1990 | linuxlist.append(file) | ||
1991 | |||
1992 | if linuxlist: | ||
1993 | results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d)) | ||
1994 | for r in results: | ||
1995 | ldconfig = r[0] | ||
1996 | needed[pkg] |= r[1] | ||
1997 | sonames |= r[2] | ||
1998 | renames.extend(r[3]) | ||
1999 | needs_ldconfig = needs_ldconfig or ldconfig | ||
2000 | |||
2001 | for (old, new) in renames: | ||
2002 | bb.note("Renaming %s to %s" % (old, new)) | ||
2003 | bb.utils.rename(old, new) | ||
2004 | pkgfiles[pkg].remove(old) | ||
2005 | |||
2006 | shlibs_file = os.path.join(shlibswork_dir, pkg + ".list") | ||
2007 | if len(sonames): | ||
2008 | with open(shlibs_file, 'w') as fd: | ||
2009 | for s in sorted(sonames): | ||
2010 | if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]: | ||
2011 | (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]] | ||
2012 | if old_pkg != pkg: | ||
2013 | bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver)) | ||
2014 | bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0])) | ||
2015 | fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n') | ||
2016 | if s[0] not in shlib_provider: | ||
2017 | shlib_provider[s[0]] = {} | ||
2018 | shlib_provider[s[0]][s[1]] = (pkg, pkgver) | ||
2019 | if needs_ldconfig: | ||
2020 | bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg) | ||
2021 | postinst = d.getVar('pkg_postinst:%s' % pkg) | ||
2022 | if not postinst: | ||
2023 | postinst = '#!/bin/sh\n' | ||
2024 | postinst += d.getVar('ldconfig_postinst_fragment') | ||
2025 | d.setVar('pkg_postinst:%s' % pkg, postinst) | ||
2026 | bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames)) | ||
2027 | |||
2028 | assumed_libs = d.getVar('ASSUME_SHLIBS') | ||
2029 | if assumed_libs: | ||
2030 | libdir = d.getVar("libdir") | ||
2031 | for e in assumed_libs.split(): | ||
2032 | l, dep_pkg = e.split(":") | ||
2033 | lib_ver = None | ||
2034 | dep_pkg = dep_pkg.rsplit("_", 1) | ||
2035 | if len(dep_pkg) == 2: | ||
2036 | lib_ver = dep_pkg[1] | ||
2037 | dep_pkg = dep_pkg[0] | ||
2038 | if l not in shlib_provider: | ||
2039 | shlib_provider[l] = {} | ||
2040 | shlib_provider[l][libdir] = (dep_pkg, lib_ver) | ||
2041 | |||
2042 | libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')] | ||
2043 | |||
2044 | for pkg in shlib_pkgs: | ||
2045 | bb.debug(2, "calculating shlib requirements for %s" % pkg) | ||
2046 | |||
2047 | private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or "" | ||
2048 | private_libs = private_libs.split() | ||
2049 | |||
2050 | deps = list() | ||
2051 | for n in needed[pkg]: | ||
2052 | # if n is in private libraries, don't try to search provider for it | ||
2053 | # this could cause problem in case some abc.bb provides private | ||
2054 | # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1 | ||
2055 | # but skipping it is still better alternative than providing own | ||
2056 | # version and then adding runtime dependency for the same system library | ||
2057 | import fnmatch | ||
2058 | if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0: | ||
2059 | bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0])) | ||
2060 | continue | ||
2061 | if n[0] in shlib_provider.keys(): | ||
2062 | shlib_provider_map = shlib_provider[n[0]] | ||
2063 | matches = set() | ||
2064 | for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath): | ||
2065 | if p in shlib_provider_map: | ||
2066 | matches.add(p) | ||
2067 | if len(matches) > 1: | ||
2068 | matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches]) | ||
2069 | bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1])) | ||
2070 | elif len(matches) == 1: | ||
2071 | (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()] | ||
2072 | |||
2073 | bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1])) | ||
2074 | |||
2075 | if dep_pkg == pkg: | ||
2076 | continue | ||
2077 | |||
2078 | if ver_needed: | ||
2079 | dep = "%s (>= %s)" % (dep_pkg, ver_needed) | ||
2080 | else: | ||
2081 | dep = dep_pkg | ||
2082 | if not dep in deps: | ||
2083 | deps.append(dep) | ||
2084 | continue | ||
2085 | bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1])) | ||
2086 | |||
2087 | deps_file = os.path.join(pkgdest, pkg + ".shlibdeps") | ||
2088 | if os.path.exists(deps_file): | ||
2089 | os.remove(deps_file) | ||
2090 | if deps: | ||
2091 | with open(deps_file, 'w') as fd: | ||
2092 | for dep in sorted(deps): | ||
2093 | fd.write(dep + '\n') | ||
2094 | } | ||
2095 | |||
2096 | python package_do_pkgconfig () { | ||
2097 | import re | ||
2098 | |||
2099 | packages = d.getVar('PACKAGES') | ||
2100 | workdir = d.getVar('WORKDIR') | ||
2101 | pkgdest = d.getVar('PKGDEST') | ||
2102 | |||
2103 | shlibs_dirs = d.getVar('SHLIBSDIRS').split() | ||
2104 | shlibswork_dir = d.getVar('SHLIBSWORKDIR') | ||
2105 | |||
2106 | pc_re = re.compile(r'(.*)\.pc$') | ||
2107 | var_re = re.compile(r'(.*)=(.*)') | ||
2108 | field_re = re.compile(r'(.*): (.*)') | ||
2109 | |||
2110 | pkgconfig_provided = {} | ||
2111 | pkgconfig_needed = {} | ||
2112 | for pkg in packages.split(): | ||
2113 | pkgconfig_provided[pkg] = [] | ||
2114 | pkgconfig_needed[pkg] = [] | ||
2115 | for file in sorted(pkgfiles[pkg]): | ||
2116 | m = pc_re.match(file) | ||
2117 | if m: | ||
2118 | pd = bb.data.init() | ||
2119 | name = m.group(1) | ||
2120 | pkgconfig_provided[pkg].append(os.path.basename(name)) | ||
2121 | if not os.access(file, os.R_OK): | ||
2122 | continue | ||
2123 | with open(file, 'r') as f: | ||
2124 | lines = f.readlines() | ||
2125 | for l in lines: | ||
2126 | m = var_re.match(l) | ||
2127 | if m: | ||
2128 | name = m.group(1) | ||
2129 | val = m.group(2) | ||
2130 | pd.setVar(name, pd.expand(val)) | ||
2131 | continue | ||
2132 | m = field_re.match(l) | ||
2133 | if m: | ||
2134 | hdr = m.group(1) | ||
2135 | exp = pd.expand(m.group(2)) | ||
2136 | if hdr == 'Requires': | ||
2137 | pkgconfig_needed[pkg] += exp.replace(',', ' ').split() | ||
2138 | |||
2139 | for pkg in packages.split(): | ||
2140 | pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist") | ||
2141 | if pkgconfig_provided[pkg] != []: | ||
2142 | with open(pkgs_file, 'w') as f: | ||
2143 | for p in sorted(pkgconfig_provided[pkg]): | ||
2144 | f.write('%s\n' % p) | ||
2145 | |||
2146 | # Go from least to most specific since the last one found wins | ||
2147 | for dir in reversed(shlibs_dirs): | ||
2148 | if not os.path.exists(dir): | ||
2149 | continue | ||
2150 | for file in sorted(os.listdir(dir)): | ||
2151 | m = re.match(r'^(.*)\.pclist$', file) | ||
2152 | if m: | ||
2153 | pkg = m.group(1) | ||
2154 | with open(os.path.join(dir, file)) as fd: | ||
2155 | lines = fd.readlines() | ||
2156 | pkgconfig_provided[pkg] = [] | ||
2157 | for l in lines: | ||
2158 | pkgconfig_provided[pkg].append(l.rstrip()) | ||
2159 | |||
2160 | for pkg in packages.split(): | ||
2161 | deps = [] | ||
2162 | for n in pkgconfig_needed[pkg]: | ||
2163 | found = False | ||
2164 | for k in pkgconfig_provided.keys(): | ||
2165 | if n in pkgconfig_provided[k]: | ||
2166 | if k != pkg and not (k in deps): | ||
2167 | deps.append(k) | ||
2168 | found = True | ||
2169 | if found == False: | ||
2170 | bb.note("couldn't find pkgconfig module '%s' in any package" % n) | ||
2171 | deps_file = os.path.join(pkgdest, pkg + ".pcdeps") | ||
2172 | if len(deps): | ||
2173 | with open(deps_file, 'w') as fd: | ||
2174 | for dep in deps: | ||
2175 | fd.write(dep + '\n') | ||
2176 | } | ||
2177 | |||
2178 | def read_libdep_files(d): | ||
2179 | pkglibdeps = {} | ||
2180 | packages = d.getVar('PACKAGES').split() | ||
2181 | for pkg in packages: | ||
2182 | pkglibdeps[pkg] = {} | ||
2183 | for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": | ||
2184 | depsfile = d.expand("${PKGDEST}/" + pkg + extension) | ||
2185 | if os.access(depsfile, os.R_OK): | ||
2186 | with open(depsfile) as fd: | ||
2187 | lines = fd.readlines() | ||
2188 | for l in lines: | ||
2189 | l.rstrip() | ||
2190 | deps = bb.utils.explode_dep_versions2(l) | ||
2191 | for dep in deps: | ||
2192 | if not dep in pkglibdeps[pkg]: | ||
2193 | pkglibdeps[pkg][dep] = deps[dep] | ||
2194 | return pkglibdeps | ||
2195 | |||
2196 | python read_shlibdeps () { | ||
2197 | pkglibdeps = read_libdep_files(d) | ||
2198 | |||
2199 | packages = d.getVar('PACKAGES').split() | ||
2200 | for pkg in packages: | ||
2201 | rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "") | ||
2202 | for dep in sorted(pkglibdeps[pkg]): | ||
2203 | # Add the dep if it's not already there, or if no comparison is set | ||
2204 | if dep not in rdepends: | ||
2205 | rdepends[dep] = [] | ||
2206 | for v in pkglibdeps[pkg][dep]: | ||
2207 | if v not in rdepends[dep]: | ||
2208 | rdepends[dep].append(v) | ||
2209 | d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False)) | ||
2210 | } | ||
2211 | |||
2212 | python package_depchains() { | ||
2213 | """ | ||
2214 | For a given set of prefix and postfix modifiers, make those packages | ||
2215 | RRECOMMENDS on the corresponding packages for its RDEPENDS. | ||
2216 | |||
2217 | Example: If package A depends upon package B, and A's .bb emits an | ||
2218 | A-dev package, this would make A-dev Recommends: B-dev. | ||
2219 | |||
2220 | If only one of a given suffix is specified, it will take the RRECOMMENDS | ||
2221 | based on the RDEPENDS of *all* other packages. If more than one of a given | ||
2222 | suffix is specified, its will only use the RDEPENDS of the single parent | ||
2223 | package. | ||
2224 | """ | ||
2225 | |||
2226 | packages = d.getVar('PACKAGES') | ||
2227 | postfixes = (d.getVar('DEPCHAIN_POST') or '').split() | ||
2228 | prefixes = (d.getVar('DEPCHAIN_PRE') or '').split() | ||
2229 | |||
2230 | def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d): | ||
2231 | |||
2232 | #bb.note('depends for %s is %s' % (base, depends)) | ||
2233 | rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "") | ||
2234 | |||
2235 | for depend in sorted(depends): | ||
2236 | if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'): | ||
2237 | #bb.note("Skipping %s" % depend) | ||
2238 | continue | ||
2239 | if depend.endswith('-dev'): | ||
2240 | depend = depend[:-4] | ||
2241 | if depend.endswith('-dbg'): | ||
2242 | depend = depend[:-4] | ||
2243 | pkgname = getname(depend, suffix) | ||
2244 | #bb.note("Adding %s for %s" % (pkgname, depend)) | ||
2245 | if pkgname not in rreclist and pkgname != pkg: | ||
2246 | rreclist[pkgname] = [] | ||
2247 | |||
2248 | #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist))) | ||
2249 | d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) | ||
2250 | |||
2251 | def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): | ||
2252 | |||
2253 | #bb.note('rdepends for %s is %s' % (base, rdepends)) | ||
2254 | rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "") | ||
2255 | |||
2256 | for depend in sorted(rdepends): | ||
2257 | if depend.find('virtual-locale-') != -1: | ||
2258 | #bb.note("Skipping %s" % depend) | ||
2259 | continue | ||
2260 | if depend.endswith('-dev'): | ||
2261 | depend = depend[:-4] | ||
2262 | if depend.endswith('-dbg'): | ||
2263 | depend = depend[:-4] | ||
2264 | pkgname = getname(depend, suffix) | ||
2265 | #bb.note("Adding %s for %s" % (pkgname, depend)) | ||
2266 | if pkgname not in rreclist and pkgname != pkg: | ||
2267 | rreclist[pkgname] = [] | ||
2268 | |||
2269 | #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist))) | ||
2270 | d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) | ||
2271 | |||
2272 | def add_dep(list, dep): | ||
2273 | if dep not in list: | ||
2274 | list.append(dep) | ||
2275 | |||
2276 | depends = [] | ||
2277 | for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""): | ||
2278 | add_dep(depends, dep) | ||
2279 | |||
2280 | rdepends = [] | ||
2281 | for pkg in packages.split(): | ||
2282 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + pkg) or ""): | ||
2283 | add_dep(rdepends, dep) | ||
2284 | |||
2285 | #bb.note('rdepends is %s' % rdepends) | ||
2286 | |||
2287 | def post_getname(name, suffix): | ||
2288 | return '%s%s' % (name, suffix) | ||
2289 | def pre_getname(name, suffix): | ||
2290 | return '%s%s' % (suffix, name) | ||
2291 | |||
2292 | pkgs = {} | ||
2293 | for pkg in packages.split(): | ||
2294 | for postfix in postfixes: | ||
2295 | if pkg.endswith(postfix): | ||
2296 | if not postfix in pkgs: | ||
2297 | pkgs[postfix] = {} | ||
2298 | pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname) | ||
2299 | |||
2300 | for prefix in prefixes: | ||
2301 | if pkg.startswith(prefix): | ||
2302 | if not prefix in pkgs: | ||
2303 | pkgs[prefix] = {} | ||
2304 | pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname) | ||
2305 | |||
2306 | if "-dbg" in pkgs: | ||
2307 | pkglibdeps = read_libdep_files(d) | ||
2308 | pkglibdeplist = [] | ||
2309 | for pkg in pkglibdeps: | ||
2310 | for k in pkglibdeps[pkg]: | ||
2311 | add_dep(pkglibdeplist, k) | ||
2312 | dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d))) | ||
2313 | |||
2314 | for suffix in pkgs: | ||
2315 | for pkg in pkgs[suffix]: | ||
2316 | if d.getVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs'): | ||
2317 | continue | ||
2318 | (base, func) = pkgs[suffix][pkg] | ||
2319 | if suffix == "-dev": | ||
2320 | pkg_adddeprrecs(pkg, base, suffix, func, depends, d) | ||
2321 | elif suffix == "-dbg": | ||
2322 | if not dbgdefaultdeps: | ||
2323 | pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d) | ||
2324 | continue | ||
2325 | if len(pkgs[suffix]) == 1: | ||
2326 | pkg_addrrecs(pkg, base, suffix, func, rdepends, d) | ||
2327 | else: | ||
2328 | rdeps = [] | ||
2329 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + base) or ""): | ||
2330 | add_dep(rdeps, dep) | ||
2331 | pkg_addrrecs(pkg, base, suffix, func, rdeps, d) | ||
2332 | } | ||
2333 | |||
2334 | # Since bitbake can't determine which variables are accessed during package | ||
2335 | # iteration, we need to list them here: | ||
2336 | PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm pkg_postinst_ontarget INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR USERADD_PARAM GROUPADD_PARAM CONFFILES SYSTEMD_SERVICE LICENSE SECTION pkg_preinst pkg_prerm RREPLACES GROUPMEMS_PARAM SYSTEMD_AUTO_ENABLE SKIP_FILEDEPS PRIVATE_LIBS PACKAGE_ADD_METADATA" | ||
2337 | |||
2338 | def gen_packagevar(d, pkgvars="PACKAGEVARS"): | ||
2339 | ret = [] | ||
2340 | pkgs = (d.getVar("PACKAGES") or "").split() | ||
2341 | vars = (d.getVar(pkgvars) or "").split() | ||
2342 | for v in vars: | ||
2343 | ret.append(v) | ||
2344 | for p in pkgs: | ||
2345 | for v in vars: | ||
2346 | ret.append(v + ":" + p) | ||
2347 | |||
2348 | # Ensure that changes to INCOMPATIBLE_LICENSE re-run do_package for | ||
2349 | # affected recipes. | ||
2350 | ret.append('LICENSE_EXCLUSION-%s' % p) | ||
2351 | return " ".join(ret) | ||
2352 | |||
2353 | PACKAGE_PREPROCESS_FUNCS ?= "" | ||
2354 | # Functions for setting up PKGD | ||
2355 | PACKAGEBUILDPKGD ?= " \ | ||
2356 | package_prepare_pkgdata \ | ||
2357 | perform_packagecopy \ | ||
2358 | ${PACKAGE_PREPROCESS_FUNCS} \ | ||
2359 | split_and_strip_files \ | ||
2360 | fixup_perms \ | ||
2361 | " | ||
2362 | # Functions which split PKGD up into separate packages | ||
2363 | PACKAGESPLITFUNCS ?= " \ | ||
2364 | package_do_split_locales \ | ||
2365 | populate_packages" | ||
2366 | # Functions which process metadata based on split packages | ||
2367 | PACKAGEFUNCS += " \ | ||
2368 | package_fixsymlinks \ | ||
2369 | package_name_hook \ | ||
2370 | package_do_filedeps \ | ||
2371 | package_do_shlibs \ | ||
2372 | package_do_pkgconfig \ | ||
2373 | read_shlibdeps \ | ||
2374 | package_depchains \ | ||
2375 | emit_pkgdata" | ||
2376 | |||
2377 | python do_package () { | ||
2378 | # Change the following version to cause sstate to invalidate the package | ||
2379 | # cache. This is useful if an item this class depends on changes in a | ||
2380 | # way that the output of this class changes. rpmdeps is a good example | ||
2381 | # as any change to rpmdeps requires this to be rerun. | ||
2382 | # PACKAGE_BBCLASS_VERSION = "4" | ||
2383 | |||
2384 | # Init cachedpath | ||
2385 | global cpath | ||
2386 | cpath = oe.cachedpath.CachedPath() | ||
2387 | |||
2388 | ########################################################################### | ||
2389 | # Sanity test the setup | ||
2390 | ########################################################################### | ||
2391 | |||
2392 | packages = (d.getVar('PACKAGES') or "").split() | ||
2393 | if len(packages) < 1: | ||
2394 | bb.debug(1, "No packages to build, skipping do_package") | ||
2395 | return | ||
2396 | |||
2397 | workdir = d.getVar('WORKDIR') | ||
2398 | outdir = d.getVar('DEPLOY_DIR') | ||
2399 | dest = d.getVar('D') | ||
2400 | dvar = d.getVar('PKGD') | ||
2401 | pn = d.getVar('PN') | ||
2402 | |||
2403 | if not workdir or not outdir or not dest or not dvar or not pn: | ||
2404 | msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package" | ||
2405 | oe.qa.handle_error("var-undefined", msg, d) | ||
2406 | return | ||
2407 | |||
2408 | bb.build.exec_func("package_convert_pr_autoinc", d) | ||
2409 | |||
2410 | ########################################################################### | ||
2411 | # Optimisations | ||
2412 | ########################################################################### | ||
2413 | |||
2414 | # Continually expanding complex expressions is inefficient, particularly | ||
2415 | # when we write to the datastore and invalidate the expansion cache. This | ||
2416 | # code pre-expands some frequently used variables | ||
2417 | |||
2418 | def expandVar(x, d): | ||
2419 | d.setVar(x, d.getVar(x)) | ||
2420 | |||
2421 | for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO': | ||
2422 | expandVar(x, d) | ||
2423 | |||
2424 | ########################################################################### | ||
2425 | # Setup PKGD (from D) | ||
2426 | ########################################################################### | ||
2427 | |||
2428 | for f in (d.getVar('PACKAGEBUILDPKGD') or '').split(): | ||
2429 | bb.build.exec_func(f, d) | ||
2430 | |||
2431 | ########################################################################### | ||
2432 | # Split up PKGD into PKGDEST | ||
2433 | ########################################################################### | ||
2434 | |||
2435 | cpath = oe.cachedpath.CachedPath() | ||
2436 | |||
2437 | for f in (d.getVar('PACKAGESPLITFUNCS') or '').split(): | ||
2438 | bb.build.exec_func(f, d) | ||
2439 | |||
2440 | ########################################################################### | ||
2441 | # Process PKGDEST | ||
2442 | ########################################################################### | ||
2443 | |||
2444 | # Build global list of files in each split package | ||
2445 | global pkgfiles | ||
2446 | pkgfiles = {} | ||
2447 | packages = d.getVar('PACKAGES').split() | ||
2448 | pkgdest = d.getVar('PKGDEST') | ||
2449 | for pkg in packages: | ||
2450 | pkgfiles[pkg] = [] | ||
2451 | for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg): | ||
2452 | for file in files: | ||
2453 | pkgfiles[pkg].append(walkroot + os.sep + file) | ||
2454 | |||
2455 | for f in (d.getVar('PACKAGEFUNCS') or '').split(): | ||
2456 | bb.build.exec_func(f, d) | ||
2457 | |||
2458 | oe.qa.exit_if_errors(d) | ||
2459 | } | ||
2460 | |||
2461 | do_package[dirs] = "${SHLIBSWORKDIR} ${PKGDESTWORK} ${D}" | ||
2462 | do_package[vardeps] += "${PACKAGEBUILDPKGD} ${PACKAGESPLITFUNCS} ${PACKAGEFUNCS} ${@gen_packagevar(d)}" | ||
2463 | addtask package after do_install | ||
2464 | |||
2465 | SSTATETASKS += "do_package" | ||
2466 | do_package[cleandirs] = "${PKGDEST} ${PKGDESTWORK}" | ||
2467 | do_package[sstate-plaindirs] = "${PKGD} ${PKGDEST} ${PKGDESTWORK}" | ||
2468 | do_package_setscene[dirs] = "${STAGING_DIR}" | ||
2469 | |||
2470 | python do_package_setscene () { | ||
2471 | sstate_setscene(d) | ||
2472 | } | ||
2473 | addtask do_package_setscene | ||
2474 | |||
2475 | # Copy from PKGDESTWORK to tempdirectory as tempdirectory can be cleaned at both | ||
2476 | # do_package_setscene and do_packagedata_setscene leading to races | ||
2477 | python do_packagedata () { | ||
2478 | bb.build.exec_func("package_get_auto_pr", d) | ||
2479 | |||
2480 | src = d.expand("${PKGDESTWORK}") | ||
2481 | dest = d.expand("${WORKDIR}/pkgdata-pdata-input") | ||
2482 | oe.path.copyhardlinktree(src, dest) | ||
2483 | |||
2484 | bb.build.exec_func("packagedata_translate_pr_autoinc", d) | ||
2485 | } | ||
2486 | do_packagedata[cleandirs] += "${WORKDIR}/pkgdata-pdata-input" | ||
2487 | |||
2488 | # Translate the EXTENDPRAUTO and AUTOINC to the final values | ||
2489 | packagedata_translate_pr_autoinc() { | ||
2490 | find ${WORKDIR}/pkgdata-pdata-input -type f | xargs --no-run-if-empty \ | ||
2491 | sed -e 's,@PRSERV_PV_AUTOINC@,${PRSERV_PV_AUTOINC},g' \ | ||
2492 | -e 's,@EXTENDPRAUTO@,${EXTENDPRAUTO},g' -i | ||
2493 | } | ||
2494 | |||
2495 | addtask packagedata before do_build after do_package | ||
2496 | |||
2497 | SSTATETASKS += "do_packagedata" | ||
2498 | do_packagedata[sstate-inputdirs] = "${WORKDIR}/pkgdata-pdata-input" | ||
2499 | do_packagedata[sstate-outputdirs] = "${PKGDATA_DIR}" | ||
2500 | do_packagedata[stamp-extra-info] = "${MACHINE_ARCH}" | ||
2501 | |||
2502 | python do_packagedata_setscene () { | ||
2503 | sstate_setscene(d) | ||
2504 | } | ||
2505 | addtask do_packagedata_setscene | ||
2506 | |||
2507 | # | ||
2508 | # Helper functions for the package writing classes | ||
2509 | # | ||
2510 | |||
2511 | def mapping_rename_hook(d): | ||
2512 | """ | ||
2513 | Rewrite variables to account for package renaming in things | ||
2514 | like debian.bbclass or manual PKG variable name changes | ||
2515 | """ | ||
2516 | pkg = d.getVar("PKG") | ||
2517 | runtime_mapping_rename("RDEPENDS", pkg, d) | ||
2518 | runtime_mapping_rename("RRECOMMENDS", pkg, d) | ||
2519 | runtime_mapping_rename("RSUGGESTS", pkg, d) | ||