summaryrefslogtreecommitdiffstats
path: root/meta/classes/package.bbclass
diff options
context:
space:
mode:
authorRichard Purdie <richard.purdie@linuxfoundation.org>2012-07-11 17:33:43 +0000
committerRichard Purdie <richard.purdie@linuxfoundation.org>2012-07-19 10:24:50 +0100
commitbfd279de3275abbfaf3e630383ec244131e0375f (patch)
tree0d1c90461a890d21444f5d2afb13c52b302427f1 /meta/classes/package.bbclass
parent99203edda6f0b09d817454d656c100b7a8806b18 (diff)
downloadpoky-bfd279de3275abbfaf3e630383ec244131e0375f.tar.gz
Convert tab indentation in python functions into four-space
(From OE-Core rev: 604d46c686d06d62d5a07b9c7f4fa170f99307d8) Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta/classes/package.bbclass')
-rw-r--r--meta/classes/package.bbclass2930
1 files changed, 1465 insertions, 1465 deletions
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass
index dfd42117c5..a51e955325 100644
--- a/meta/classes/package.bbclass
+++ b/meta/classes/package.bbclass
@@ -25,8 +25,8 @@
25# The data is stores in FILER{PROVIDES,DEPENDS}_file_pkg variables with 25# The data is stores in FILER{PROVIDES,DEPENDS}_file_pkg variables with
26# a list of affected files in FILER{PROVIDES,DEPENDS}FLIST_pkg 26# a list of affected files in FILER{PROVIDES,DEPENDS}FLIST_pkg
27# 27#
28# h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any 28# h) package_do_shlibs - Look at the shared libraries generated and autotmatically add any
29# depenedencies found. Also stores the package name so anyone else using this library 29# depenedencies found. Also stores the package name so anyone else using this library
30# knows which package to depend on. 30# knows which package to depend on.
31# 31#
32# i) package_do_pkgconfig - Keep track of which packages need and provide which .pc files 32# i) package_do_pkgconfig - Keep track of which packages need and provide which .pc files
@@ -35,7 +35,7 @@
35# 35#
36# k) package_depchains - Adds automatic dependencies to -dbg and -dev packages 36# k) package_depchains - Adds automatic dependencies to -dbg and -dev packages
37# 37#
38# l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later 38# l) emit_pkgdata - saves the packaging data into PKGDATA_DIR for use in later
39# packaging steps 39# packaging steps
40 40
41inherit packagedata 41inherit packagedata
@@ -52,112 +52,112 @@ ALL_MULTILIB_PACKAGE_ARCHS = "${@all_multilib_tune_values(d, 'PACKAGE_ARCHS')}"
52PACKAGE_DEPENDS += "rpm-native" 52PACKAGE_DEPENDS += "rpm-native"
53 53
54def legitimize_package_name(s): 54def legitimize_package_name(s):
55 """ 55 """
56 Make sure package names are legitimate strings 56 Make sure package names are legitimate strings
57 """ 57 """
58 import re 58 import re
59 59
60 def fixutf(m): 60 def fixutf(m):
61 cp = m.group(1) 61 cp = m.group(1)
62 if cp: 62 if cp:
63 return ('\u%s' % cp).decode('unicode_escape').encode('utf-8') 63 return ('\u%s' % cp).decode('unicode_escape').encode('utf-8')
64 64
65 # Handle unicode codepoints encoded as <U0123>, as in glibc locale files. 65 # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
66 s = re.sub('<U([0-9A-Fa-f]{1,4})>', fixutf, s) 66 s = re.sub('<U([0-9A-Fa-f]{1,4})>', fixutf, s)
67 67
68 # Remaining package name validity fixes 68 # Remaining package name validity fixes
69 return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-') 69 return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
70 70
71def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False): 71def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None, allow_links=False):
72 """ 72 """
73 Used in .bb files to split up dynamically generated subpackages of a 73 Used in .bb files to split up dynamically generated subpackages of a
74 given package, usually plugins or modules. 74 given package, usually plugins or modules.
75 """ 75 """
76 76
77 ml = d.getVar("MLPREFIX", True) 77 ml = d.getVar("MLPREFIX", True)
78 if ml: 78 if ml:
79 if not output_pattern.startswith(ml): 79 if not output_pattern.startswith(ml):
80 output_pattern = ml + output_pattern 80 output_pattern = ml + output_pattern
81 81
82 newdeps = [] 82 newdeps = []
83 for dep in (extra_depends or "").split(): 83 for dep in (extra_depends or "").split():
84 if dep.startswith(ml): 84 if dep.startswith(ml):
85 newdeps.append(dep) 85 newdeps.append(dep)
86 else: 86 else:
87 newdeps.append(ml + dep) 87 newdeps.append(ml + dep)
88 if newdeps: 88 if newdeps:
89 extra_depends = " ".join(newdeps) 89 extra_depends = " ".join(newdeps)
90 90
91 dvar = d.getVar('PKGD', True) 91 dvar = d.getVar('PKGD', True)
92 92
93 packages = d.getVar('PACKAGES', True).split() 93 packages = d.getVar('PACKAGES', True).split()
94 94
95 if postinst: 95 if postinst:
96 postinst = '#!/bin/sh\n' + postinst + '\n' 96 postinst = '#!/bin/sh\n' + postinst + '\n'
97 if postrm: 97 if postrm:
98 postrm = '#!/bin/sh\n' + postrm + '\n' 98 postrm = '#!/bin/sh\n' + postrm + '\n'
99 if not recursive: 99 if not recursive:
100 objs = os.listdir(dvar + root) 100 objs = os.listdir(dvar + root)
101 else: 101 else:
102 objs = [] 102 objs = []
103 for walkroot, dirs, files in os.walk(dvar + root): 103 for walkroot, dirs, files in os.walk(dvar + root):
104 for file in files: 104 for file in files:
105 relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1) 105 relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1)
106 if relpath: 106 if relpath:
107 objs.append(relpath) 107 objs.append(relpath)
108 108
109 if extra_depends == None: 109 if extra_depends == None:
110 extra_depends = d.getVar("PN", True) 110 extra_depends = d.getVar("PN", True)
111 111
112 for o in sorted(objs): 112 for o in sorted(objs):
113 import re, stat 113 import re, stat
114 if match_path: 114 if match_path:
115 m = re.match(file_regex, o) 115 m = re.match(file_regex, o)
116 else: 116 else:
117 m = re.match(file_regex, os.path.basename(o)) 117 m = re.match(file_regex, os.path.basename(o))
118 118
119 if not m: 119 if not m:
120 continue 120 continue
121 f = os.path.join(dvar + root, o) 121 f = os.path.join(dvar + root, o)
122 mode = os.lstat(f).st_mode 122 mode = os.lstat(f).st_mode
123 if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))): 123 if not (stat.S_ISREG(mode) or (allow_links and stat.S_ISLNK(mode)) or (allow_dirs and stat.S_ISDIR(mode))):
124 continue 124 continue
125 on = legitimize_package_name(m.group(1)) 125 on = legitimize_package_name(m.group(1))
126 pkg = output_pattern % on 126 pkg = output_pattern % on
127 if not pkg in packages: 127 if not pkg in packages:
128 if prepend: 128 if prepend:
129 packages = [pkg] + packages 129 packages = [pkg] + packages
130 else: 130 else:
131 packages.append(pkg) 131 packages.append(pkg)
132 oldfiles = d.getVar('FILES_' + pkg, True) 132 oldfiles = d.getVar('FILES_' + pkg, True)
133 if not oldfiles: 133 if not oldfiles:
134 the_files = [os.path.join(root, o)] 134 the_files = [os.path.join(root, o)]
135 if aux_files_pattern: 135 if aux_files_pattern:
136 if type(aux_files_pattern) is list: 136 if type(aux_files_pattern) is list:
137 for fp in aux_files_pattern: 137 for fp in aux_files_pattern:
138 the_files.append(fp % on) 138 the_files.append(fp % on)
139 else: 139 else:
140 the_files.append(aux_files_pattern % on) 140 the_files.append(aux_files_pattern % on)
141 if aux_files_pattern_verbatim: 141 if aux_files_pattern_verbatim:
142 if type(aux_files_pattern_verbatim) is list: 142 if type(aux_files_pattern_verbatim) is list:
143 for fp in aux_files_pattern_verbatim: 143 for fp in aux_files_pattern_verbatim:
144 the_files.append(fp % m.group(1)) 144 the_files.append(fp % m.group(1))
145 else: 145 else:
146 the_files.append(aux_files_pattern_verbatim % m.group(1)) 146 the_files.append(aux_files_pattern_verbatim % m.group(1))
147 d.setVar('FILES_' + pkg, " ".join(the_files)) 147 d.setVar('FILES_' + pkg, " ".join(the_files))
148 if extra_depends != '': 148 if extra_depends != '':
149 d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends) 149 d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends)
150 d.setVar('DESCRIPTION_' + pkg, description % on) 150 d.setVar('DESCRIPTION_' + pkg, description % on)
151 if postinst: 151 if postinst:
152 d.setVar('pkg_postinst_' + pkg, postinst) 152 d.setVar('pkg_postinst_' + pkg, postinst)
153 if postrm: 153 if postrm:
154 d.setVar('pkg_postrm_' + pkg, postrm) 154 d.setVar('pkg_postrm_' + pkg, postrm)
155 else: 155 else:
156 d.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o)) 156 d.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o))
157 if callable(hook): 157 if callable(hook):
158 hook(f, pkg, file_regex, output_pattern, m.group(1)) 158 hook(f, pkg, file_regex, output_pattern, m.group(1))
159 159
160 d.setVar('PACKAGES', ' '.join(packages)) 160 d.setVar('PACKAGES', ' '.join(packages))
161 161
162PACKAGE_DEPENDS += "file-native" 162PACKAGE_DEPENDS += "file-native"
163 163
@@ -195,7 +195,7 @@ def splitfile(file, debugfile, debugsrcdir, d):
195 195
196 # We ignore kernel modules, we don't generate debug info files. 196 # We ignore kernel modules, we don't generate debug info files.
197 if file.find("/lib/modules/") != -1 and file.endswith(".ko"): 197 if file.find("/lib/modules/") != -1 and file.endswith(".ko"):
198 return 1 198 return 1
199 199
200 newmode = None 200 newmode = None
201 if not os.access(file, os.W_OK) or os.access(file, os.R_OK): 201 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
@@ -205,7 +205,7 @@ def splitfile(file, debugfile, debugsrcdir, d):
205 205
206 # We need to extract the debug src information here... 206 # We need to extract the debug src information here...
207 if debugsrcdir: 207 if debugsrcdir:
208 subprocess.call("%s'%s' -b '%s' -d '%s' -i -l '%s' '%s'" % (pathprefix, debugedit, workparentdir, debugsrcdir, sourcefile, file), shell=True) 208 subprocess.call("%s'%s' -b '%s' -d '%s' -i -l '%s' '%s'" % (pathprefix, debugedit, workparentdir, debugsrcdir, sourcefile, file), shell=True)
209 209
210 bb.mkdirhier(os.path.dirname(debugfile)) 210 bb.mkdirhier(os.path.dirname(debugfile))
211 211
@@ -316,826 +316,826 @@ def runstrip(file, elftype, d):
316# 316#
317 317
318def get_package_mapping (pkg, d): 318def get_package_mapping (pkg, d):
319 import oe.packagedata 319 import oe.packagedata
320 320
321 data = oe.packagedata.read_subpkgdata(pkg, d) 321 data = oe.packagedata.read_subpkgdata(pkg, d)
322 key = "PKG_%s" % pkg 322 key = "PKG_%s" % pkg
323 323
324 if key in data: 324 if key in data:
325 return data[key] 325 return data[key]
326 326
327 return pkg 327 return pkg
328 328
329def runtime_mapping_rename (varname, d): 329def runtime_mapping_rename (varname, d):
330 #bb.note("%s before: %s" % (varname, d.getVar(varname, True))) 330 #bb.note("%s before: %s" % (varname, d.getVar(varname, True)))
331 331
332 new_depends = [] 332 new_depends = []
333 deps = bb.utils.explode_dep_versions(d.getVar(varname, True) or "") 333 deps = bb.utils.explode_dep_versions(d.getVar(varname, True) or "")
334 for depend in deps: 334 for depend in deps:
335 # Have to be careful with any version component of the depend 335 # Have to be careful with any version component of the depend
336 new_depend = get_package_mapping(depend, d) 336 new_depend = get_package_mapping(depend, d)
337 if deps[depend]: 337 if deps[depend]:
338 new_depends.append("%s (%s)" % (new_depend, deps[depend])) 338 new_depends.append("%s (%s)" % (new_depend, deps[depend]))
339 else: 339 else:
340 new_depends.append(new_depend) 340 new_depends.append(new_depend)
341 341
342 d.setVar(varname, " ".join(new_depends) or None) 342 d.setVar(varname, " ".join(new_depends) or None)
343 343
344 #bb.note("%s after: %s" % (varname, d.getVar(varname, True))) 344 #bb.note("%s after: %s" % (varname, d.getVar(varname, True)))
345 345
346# 346#
347# Package functions suitable for inclusion in PACKAGEFUNCS 347# Package functions suitable for inclusion in PACKAGEFUNCS
348# 348#
349 349
350python package_get_auto_pr() { 350python package_get_auto_pr() {
351 # per recipe PRSERV_HOST PRSERV_PORT 351 # per recipe PRSERV_HOST PRSERV_PORT
352 pn = d.getVar('PN', True) 352 pn = d.getVar('PN', True)
353 host = d.getVar("PRSERV_HOST_" + pn, True) 353 host = d.getVar("PRSERV_HOST_" + pn, True)
354 port = d.getVar("PRSERV_PORT_" + pn, True) 354 port = d.getVar("PRSERV_PORT_" + pn, True)
355 if not (host is None): 355 if not (host is None):
356 d.setVar("PRSERV_HOST", host) 356 d.setVar("PRSERV_HOST", host)
357 if not (port is None): 357 if not (port is None):
358 d.setVar("PRSERV_PORT", port) 358 d.setVar("PRSERV_PORT", port)
359 if d.getVar('USE_PR_SERV', True) != "0": 359 if d.getVar('USE_PR_SERV', True) != "0":
360 try: 360 try:
361 auto_pr=prserv_get_pr_auto(d) 361 auto_pr=prserv_get_pr_auto(d)
362 except Exception as e: 362 except Exception as e:
363 bb.fatal("Can NOT get PRAUTO, exception %s" % str(e)) 363 bb.fatal("Can NOT get PRAUTO, exception %s" % str(e))
364 return 364 return
365 if auto_pr is None: 365 if auto_pr is None:
366 if d.getVar('PRSERV_LOCKDOWN', True): 366 if d.getVar('PRSERV_LOCKDOWN', True):
367 bb.fatal("Can NOT get PRAUTO from lockdown exported file") 367 bb.fatal("Can NOT get PRAUTO from lockdown exported file")
368 else: 368 else:
369 bb.fatal("Can NOT get PRAUTO from remote PR service") 369 bb.fatal("Can NOT get PRAUTO from remote PR service")
370 return 370 return
371 d.setVar('PRAUTO',str(auto_pr)) 371 d.setVar('PRAUTO',str(auto_pr))
372} 372}
373 373
374python package_do_split_locales() { 374python package_do_split_locales() {
375 if (d.getVar('PACKAGE_NO_LOCALE', True) == '1'): 375 if (d.getVar('PACKAGE_NO_LOCALE', True) == '1'):
376 bb.debug(1, "package requested not splitting locales") 376 bb.debug(1, "package requested not splitting locales")
377 return 377 return
378 378
379 packages = (d.getVar('PACKAGES', True) or "").split() 379 packages = (d.getVar('PACKAGES', True) or "").split()
380 380
381 datadir = d.getVar('datadir', True) 381 datadir = d.getVar('datadir', True)
382 if not datadir: 382 if not datadir:
383 bb.note("datadir not defined") 383 bb.note("datadir not defined")
384 return 384 return
385 385
386 dvar = d.getVar('PKGD', True) 386 dvar = d.getVar('PKGD', True)
387 pn = d.getVar('PN', True) 387 pn = d.getVar('PN', True)
388 388
389 if pn + '-locale' in packages: 389 if pn + '-locale' in packages:
390 packages.remove(pn + '-locale') 390 packages.remove(pn + '-locale')
391 391
392 localedir = os.path.join(dvar + datadir, 'locale') 392 localedir = os.path.join(dvar + datadir, 'locale')
393 393
394 if not os.path.isdir(localedir): 394 if not os.path.isdir(localedir):
395 bb.debug(1, "No locale files in this package") 395 bb.debug(1, "No locale files in this package")
396 return 396 return
397 397
398 locales = os.listdir(localedir) 398 locales = os.listdir(localedir)
399 399
400 summary = d.getVar('SUMMARY', True) or pn 400 summary = d.getVar('SUMMARY', True) or pn
401 description = d.getVar('DESCRIPTION', True) or "" 401 description = d.getVar('DESCRIPTION', True) or ""
402 locale_section = d.getVar('LOCALE_SECTION', True) 402 locale_section = d.getVar('LOCALE_SECTION', True)
403 mlprefix = d.getVar('MLPREFIX', True) or "" 403 mlprefix = d.getVar('MLPREFIX', True) or ""
404 for l in sorted(locales): 404 for l in sorted(locales):
405 ln = legitimize_package_name(l) 405 ln = legitimize_package_name(l)
406 pkg = pn + '-locale-' + ln 406 pkg = pn + '-locale-' + ln
407 packages.append(pkg) 407 packages.append(pkg)
408 d.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l)) 408 d.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l))
409 d.setVar('RDEPENDS_' + pkg, '%s %svirtual-locale-%s' % (pn, mlprefix, ln)) 409 d.setVar('RDEPENDS_' + pkg, '%s %svirtual-locale-%s' % (pn, mlprefix, ln))
410 d.setVar('RPROVIDES_' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln)) 410 d.setVar('RPROVIDES_' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
411 d.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l)) 411 d.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l))
412 d.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l)) 412 d.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l))
413 if locale_section: 413 if locale_section:
414 d.setVar('SECTION_' + pkg, locale_section) 414 d.setVar('SECTION_' + pkg, locale_section)
415 415
416 d.setVar('PACKAGES', ' '.join(packages)) 416 d.setVar('PACKAGES', ' '.join(packages))
417 417
418 # Disabled by RP 18/06/07 418 # Disabled by RP 18/06/07
419 # Wildcards aren't supported in debian 419 # Wildcards aren't supported in debian
420 # They break with ipkg since glibc-locale* will mean that 420 # They break with ipkg since glibc-locale* will mean that
421 # glibc-localedata-translit* won't install as a dependency 421 # glibc-localedata-translit* won't install as a dependency
422 # for some other package which breaks meta-toolchain 422 # for some other package which breaks meta-toolchain
423 # Probably breaks since virtual-locale- isn't provided anywhere 423 # Probably breaks since virtual-locale- isn't provided anywhere
424 #rdep = (d.getVar('RDEPENDS_%s' % pn, True) or d.getVar('RDEPENDS', True) or "").split() 424 #rdep = (d.getVar('RDEPENDS_%s' % pn, True) or d.getVar('RDEPENDS', True) or "").split()
425 #rdep.append('%s-locale*' % pn) 425 #rdep.append('%s-locale*' % pn)
426 #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep)) 426 #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep))
427} 427}
428 428
429python perform_packagecopy () { 429python perform_packagecopy () {
430 import subprocess 430 import subprocess
431 dest = d.getVar('D', True) 431 dest = d.getVar('D', True)
432 dvar = d.getVar('PKGD', True) 432 dvar = d.getVar('PKGD', True)
433 433
434 bb.mkdirhier(dvar) 434 bb.mkdirhier(dvar)
435 435
436 # Start by package population by taking a copy of the installed 436 # Start by package population by taking a copy of the installed
437 # files to operate on 437 # files to operate on
438 subprocess.call('rm -rf %s/*' % (dvar), shell=True) 438 subprocess.call('rm -rf %s/*' % (dvar), shell=True)
439 # Preserve sparse files and hard links 439 # Preserve sparse files and hard links
440 subprocess.call('tar -cf - -C %s -ps . | tar -xf - -C %s' % (dest, dvar), shell=True) 440 subprocess.call('tar -cf - -C %s -ps . | tar -xf - -C %s' % (dest, dvar), shell=True)
441} 441}
442 442
443# We generate a master list of directories to process, we start by 443# We generate a master list of directories to process, we start by
444# seeding this list with reasonable defaults, then load from 444# seeding this list with reasonable defaults, then load from
445# the fs-perms.txt files 445# the fs-perms.txt files
446python fixup_perms () { 446python fixup_perms () {
447 import os, pwd, grp 447 import os, pwd, grp
448 448
449 # init using a string with the same format as a line as documented in 449 # init using a string with the same format as a line as documented in
450 # the fs-perms.txt file 450 # the fs-perms.txt file
451 # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid> 451 # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
452 # <path> link <link target> 452 # <path> link <link target>
453 # 453 #
454 # __str__ can be used to print out an entry in the input format 454 # __str__ can be used to print out an entry in the input format
455 # 455 #
456 # if fs_perms_entry.path is None: 456 # if fs_perms_entry.path is None:
457 # an error occured 457 # an error occured
458 # if fs_perms_entry.link, you can retrieve: 458 # if fs_perms_entry.link, you can retrieve:
459 # fs_perms_entry.path = path 459 # fs_perms_entry.path = path
460 # fs_perms_entry.link = target of link 460 # fs_perms_entry.link = target of link
461 # if not fs_perms_entry.link, you can retrieve: 461 # if not fs_perms_entry.link, you can retrieve:
462 # fs_perms_entry.path = path 462 # fs_perms_entry.path = path
463 # fs_perms_entry.mode = expected dir mode or None 463 # fs_perms_entry.mode = expected dir mode or None
464 # fs_perms_entry.uid = expected uid or -1 464 # fs_perms_entry.uid = expected uid or -1
465 # fs_perms_entry.gid = expected gid or -1 465 # fs_perms_entry.gid = expected gid or -1
466 # fs_perms_entry.walk = 'true' or something else 466 # fs_perms_entry.walk = 'true' or something else
467 # fs_perms_entry.fmode = expected file mode or None 467 # fs_perms_entry.fmode = expected file mode or None
468 # fs_perms_entry.fuid = expected file uid or -1 468 # fs_perms_entry.fuid = expected file uid or -1
469 # fs_perms_entry_fgid = expected file gid or -1 469 # fs_perms_entry_fgid = expected file gid or -1
470 class fs_perms_entry(): 470 class fs_perms_entry():
471 def __init__(self, line): 471 def __init__(self, line):
472 lsplit = line.split() 472 lsplit = line.split()
473 if len(lsplit) == 3 and lsplit[1].lower() == "link": 473 if len(lsplit) == 3 and lsplit[1].lower() == "link":
474 self._setlink(lsplit[0], lsplit[2]) 474 self._setlink(lsplit[0], lsplit[2])
475 elif len(lsplit) == 8: 475 elif len(lsplit) == 8:
476 self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7]) 476 self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
477 else: 477 else:
478 bb.error("Fixup Perms: invalid config line %s" % line) 478 bb.error("Fixup Perms: invalid config line %s" % line)
479 self.path = None 479 self.path = None
480 self.link = None 480 self.link = None
481 481
482 def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid): 482 def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
483 self.path = os.path.normpath(path) 483 self.path = os.path.normpath(path)
484 self.link = None 484 self.link = None
485 self.mode = self._procmode(mode) 485 self.mode = self._procmode(mode)
486 self.uid = self._procuid(uid) 486 self.uid = self._procuid(uid)
487 self.gid = self._procgid(gid) 487 self.gid = self._procgid(gid)
488 self.walk = walk.lower() 488 self.walk = walk.lower()
489 self.fmode = self._procmode(fmode) 489 self.fmode = self._procmode(fmode)
490 self.fuid = self._procuid(fuid) 490 self.fuid = self._procuid(fuid)
491 self.fgid = self._procgid(fgid) 491 self.fgid = self._procgid(fgid)
492 492
493 def _setlink(self, path, link): 493 def _setlink(self, path, link):
494 self.path = os.path.normpath(path) 494 self.path = os.path.normpath(path)
495 self.link = link 495 self.link = link
496 496
497 def _procmode(self, mode): 497 def _procmode(self, mode):
498 if not mode or (mode and mode == "-"): 498 if not mode or (mode and mode == "-"):
499 return None 499 return None
500 else: 500 else:
501 return int(mode,8) 501 return int(mode,8)
502 502
503 # Note uid/gid -1 has special significance in os.lchown 503 # Note uid/gid -1 has special significance in os.lchown
504 def _procuid(self, uid): 504 def _procuid(self, uid):
505 if uid is None or uid == "-": 505 if uid is None or uid == "-":
506 return -1 506 return -1
507 elif uid.isdigit(): 507 elif uid.isdigit():
508 return int(uid) 508 return int(uid)
509 else: 509 else:
510 return pwd.getpwnam(uid).pw_uid 510 return pwd.getpwnam(uid).pw_uid
511 511
512 def _procgid(self, gid): 512 def _procgid(self, gid):
513 if gid is None or gid == "-": 513 if gid is None or gid == "-":
514 return -1 514 return -1
515 elif gid.isdigit(): 515 elif gid.isdigit():
516 return int(gid) 516 return int(gid)
517 else: 517 else:
518 return grp.getgrnam(gid).gr_gid 518 return grp.getgrnam(gid).gr_gid
519 519
520 # Use for debugging the entries 520 # Use for debugging the entries
521 def __str__(self): 521 def __str__(self):
522 if self.link: 522 if self.link:
523 return "%s link %s" % (self.path, self.link) 523 return "%s link %s" % (self.path, self.link)
524 else: 524 else:
525 mode = "-" 525 mode = "-"
526 if self.mode: 526 if self.mode:
527 mode = "0%o" % self.mode 527 mode = "0%o" % self.mode
528 fmode = "-" 528 fmode = "-"
529 if self.fmode: 529 if self.fmode:
530 fmode = "0%o" % self.fmode 530 fmode = "0%o" % self.fmode
531 uid = self._mapugid(self.uid) 531 uid = self._mapugid(self.uid)
532 gid = self._mapugid(self.gid) 532 gid = self._mapugid(self.gid)
533 fuid = self._mapugid(self.fuid) 533 fuid = self._mapugid(self.fuid)
534 fgid = self._mapugid(self.fgid) 534 fgid = self._mapugid(self.fgid)
535 return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid) 535 return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
536 536
537 def _mapugid(self, id): 537 def _mapugid(self, id):
538 if id is None or id == -1: 538 if id is None or id == -1:
539 return "-" 539 return "-"
540 else: 540 else:
541 return "%d" % id 541 return "%d" % id
542 542
543 # Fix the permission, owner and group of path 543 # Fix the permission, owner and group of path
544 def fix_perms(path, mode, uid, gid, dir): 544 def fix_perms(path, mode, uid, gid, dir):
545 if mode and not os.path.islink(path): 545 if mode and not os.path.islink(path):
546 #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir)) 546 #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
547 os.chmod(path, mode) 547 os.chmod(path, mode)
548 # -1 is a special value that means don't change the uid/gid 548 # -1 is a special value that means don't change the uid/gid
549 # if they are BOTH -1, don't bother to lchown 549 # if they are BOTH -1, don't bother to lchown
550 if not (uid == -1 and gid == -1): 550 if not (uid == -1 and gid == -1):
551 #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir)) 551 #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
552 os.lchown(path, uid, gid) 552 os.lchown(path, uid, gid)
553 553
554 # Return a list of configuration files based on either the default 554 # Return a list of configuration files based on either the default
555 # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES 555 # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
556 # paths are resolved via BBPATH 556 # paths are resolved via BBPATH
557 def get_fs_perms_list(d): 557 def get_fs_perms_list(d):
558 str = "" 558 str = ""
559 fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES', True) 559 fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES', True)
560 if not fs_perms_tables: 560 if not fs_perms_tables:
561 fs_perms_tables = 'files/fs-perms.txt' 561 fs_perms_tables = 'files/fs-perms.txt'
562 for conf_file in fs_perms_tables.split(): 562 for conf_file in fs_perms_tables.split():
563 str += " %s" % bb.which(d.getVar('BBPATH', True), conf_file) 563 str += " %s" % bb.which(d.getVar('BBPATH', True), conf_file)
564 return str 564 return str
565 565
566 566
567 567
568 dvar = d.getVar('PKGD', True) 568 dvar = d.getVar('PKGD', True)
569 569
570 fs_perms_table = {} 570 fs_perms_table = {}
571 571
572 # By default all of the standard directories specified in 572 # By default all of the standard directories specified in
573 # bitbake.conf will get 0755 root:root. 573 # bitbake.conf will get 0755 root:root.
574 target_path_vars = [ 'base_prefix', 574 target_path_vars = [ 'base_prefix',
575 'prefix', 575 'prefix',
576 'exec_prefix', 576 'exec_prefix',
577 'base_bindir', 577 'base_bindir',
578 'base_sbindir', 578 'base_sbindir',
579 'base_libdir', 579 'base_libdir',
580 'datadir', 580 'datadir',
581 'sysconfdir', 581 'sysconfdir',
582 'servicedir', 582 'servicedir',
583 'sharedstatedir', 583 'sharedstatedir',
584 'localstatedir', 584 'localstatedir',
585 'infodir', 585 'infodir',
586 'mandir', 586 'mandir',
587 'docdir', 587 'docdir',
588 'bindir', 588 'bindir',
589 'sbindir', 589 'sbindir',
590 'libexecdir', 590 'libexecdir',
591 'libdir', 591 'libdir',
592 'includedir', 592 'includedir',
593 'oldincludedir' ] 593 'oldincludedir' ]
594 594
595 for path in target_path_vars: 595 for path in target_path_vars:
596 dir = d.getVar(path, True) or "" 596 dir = d.getVar(path, True) or ""
597 if dir == "": 597 if dir == "":
598 continue 598 continue
599 fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d)) 599 fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d))
600 600
601 # Now we actually load from the configuration files 601 # Now we actually load from the configuration files
602 for conf in get_fs_perms_list(d).split(): 602 for conf in get_fs_perms_list(d).split():
603 if os.path.exists(conf): 603 if os.path.exists(conf):
604 f = open(conf) 604 f = open(conf)
605 for line in f: 605 for line in f:
606 if line.startswith('#'): 606 if line.startswith('#'):
607 continue 607 continue
608 lsplit = line.split() 608 lsplit = line.split()
609 if len(lsplit) == 0: 609 if len(lsplit) == 0:
610 continue 610 continue
611 if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"): 611 if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
612 bb.error("Fixup perms: %s invalid line: %s" % (conf, line)) 612 bb.error("Fixup perms: %s invalid line: %s" % (conf, line))
613 continue 613 continue
614 entry = fs_perms_entry(d.expand(line)) 614 entry = fs_perms_entry(d.expand(line))
615 if entry and entry.path: 615 if entry and entry.path:
616 fs_perms_table[entry.path] = entry 616 fs_perms_table[entry.path] = entry
617 f.close() 617 f.close()
618 618
619 # Debug -- list out in-memory table 619 # Debug -- list out in-memory table
620 #for dir in fs_perms_table: 620 #for dir in fs_perms_table:
621 # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir]))) 621 # bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
622 622
623 # We process links first, so we can go back and fixup directory ownership 623 # We process links first, so we can go back and fixup directory ownership
624 # for any newly created directories 624 # for any newly created directories
625 for dir in fs_perms_table: 625 for dir in fs_perms_table:
626 if not fs_perms_table[dir].link: 626 if not fs_perms_table[dir].link:
627 continue 627 continue
628 628
629 origin = dvar + dir 629 origin = dvar + dir
630 if not (os.path.exists(origin) and os.path.isdir(origin) and not os.path.islink(origin)): 630 if not (os.path.exists(origin) and os.path.isdir(origin) and not os.path.islink(origin)):
631 continue 631 continue
632 632
633 link = fs_perms_table[dir].link 633 link = fs_perms_table[dir].link
634 if link[0] == "/": 634 if link[0] == "/":
635 target = dvar + link 635 target = dvar + link
636 ptarget = link 636 ptarget = link
637 else: 637 else:
638 target = os.path.join(os.path.dirname(origin), link) 638 target = os.path.join(os.path.dirname(origin), link)
639 ptarget = os.path.join(os.path.dirname(dir), link) 639 ptarget = os.path.join(os.path.dirname(dir), link)
640 if os.path.exists(target): 640 if os.path.exists(target):
641 bb.error("Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)) 641 bb.error("Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget))
642 continue 642 continue
643 643
644 # Create path to move directory to, move it, and then setup the symlink 644 # Create path to move directory to, move it, and then setup the symlink
645 bb.mkdirhier(os.path.dirname(target)) 645 bb.mkdirhier(os.path.dirname(target))
646 #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget)) 646 #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
647 os.rename(origin, target) 647 os.rename(origin, target)
648 #bb.note("Fixup Perms: Link %s -> %s" % (dir, link)) 648 #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
649 os.symlink(link, origin) 649 os.symlink(link, origin)
650 650
651 for dir in fs_perms_table: 651 for dir in fs_perms_table:
652 if fs_perms_table[dir].link: 652 if fs_perms_table[dir].link:
653 continue 653 continue
654 654
655 origin = dvar + dir 655 origin = dvar + dir
656 if not (os.path.exists(origin) and os.path.isdir(origin)): 656 if not (os.path.exists(origin) and os.path.isdir(origin)):
657 continue 657 continue
658 658
659 fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir) 659 fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
660 660
661 if fs_perms_table[dir].walk == 'true': 661 if fs_perms_table[dir].walk == 'true':
662 for root, dirs, files in os.walk(origin): 662 for root, dirs, files in os.walk(origin):
663 for dr in dirs: 663 for dr in dirs:
664 each_dir = os.path.join(root, dr) 664 each_dir = os.path.join(root, dr)
665 fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir) 665 fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
666 for f in files: 666 for f in files:
667 each_file = os.path.join(root, f) 667 each_file = os.path.join(root, f)
668 fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir) 668 fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
669} 669}
670 670
671python split_and_strip_files () { 671python split_and_strip_files () {
672 import commands, stat, errno, subprocess 672 import commands, stat, errno, subprocess
673 673
674 dvar = d.getVar('PKGD', True) 674 dvar = d.getVar('PKGD', True)
675 pn = d.getVar('PN', True) 675 pn = d.getVar('PN', True)
676 676
677 # We default to '.debug' style 677 # We default to '.debug' style
678 if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory': 678 if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory':
679 # Single debug-file-directory style debug info 679 # Single debug-file-directory style debug info
680 debugappend = ".debug" 680 debugappend = ".debug"
681 debugdir = "" 681 debugdir = ""
682 debuglibdir = "/usr/lib/debug" 682 debuglibdir = "/usr/lib/debug"
683 debugsrcdir = "/usr/src/debug" 683 debugsrcdir = "/usr/src/debug"
684 else: 684 else:
685 # Original OE-core, a.k.a. ".debug", style debug info 685 # Original OE-core, a.k.a. ".debug", style debug info
686 debugappend = "" 686 debugappend = ""
687 debugdir = "/.debug" 687 debugdir = "/.debug"
688 debuglibdir = "" 688 debuglibdir = ""
689 debugsrcdir = "/usr/src/debug" 689 debugsrcdir = "/usr/src/debug"
690 690
691 os.chdir(dvar) 691 os.chdir(dvar)
692 692
693 # Return type (bits): 693 # Return type (bits):
694 # 0 - not elf 694 # 0 - not elf
695 # 1 - ELF 695 # 1 - ELF
696 # 2 - stripped 696 # 2 - stripped
697 # 4 - executable 697 # 4 - executable
698 # 8 - shared library 698 # 8 - shared library
699 def isELF(path): 699 def isELF(path):
700 type = 0 700 type = 0
701 pathprefix = "export PATH=%s; " % d.getVar('PATH', True) 701 pathprefix = "export PATH=%s; " % d.getVar('PATH', True)
702 ret, result = commands.getstatusoutput("%sfile '%s'" % (pathprefix, path)) 702 ret, result = commands.getstatusoutput("%sfile '%s'" % (pathprefix, path))
703 703
704 if ret: 704 if ret:
705 bb.error("split_and_strip_files: 'file %s' failed" % path) 705 bb.error("split_and_strip_files: 'file %s' failed" % path)
706 return type 706 return type
707 707
708 # Not stripped 708 # Not stripped
709 if "ELF" in result: 709 if "ELF" in result:
710 type |= 1 710 type |= 1
711 if "not stripped" not in result: 711 if "not stripped" not in result:
712 type |= 2 712 type |= 2
713 if "executable" in result: 713 if "executable" in result:
714 type |= 4 714 type |= 4
715 if "shared" in result: 715 if "shared" in result:
716 type |= 8 716 type |= 8
717 return type 717 return type
718 718
719 719
720 # 720 #
721 # First lets figure out all of the files we may have to process ... do this only once! 721 # First lets figure out all of the files we may have to process ... do this only once!
722 # 722 #
723 file_list = {} 723 file_list = {}
724 file_links = {} 724 file_links = {}
725 if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1') and \ 725 if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1') and \
726 (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'): 726 (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
727 for root, dirs, files in os.walk(dvar): 727 for root, dirs, files in os.walk(dvar):
728 for f in files: 728 for f in files:
729 file = os.path.join(root, f) 729 file = os.path.join(root, f)
730 # Only process files (and symlinks)... Skip files that are obviously debug files 730 # Only process files (and symlinks)... Skip files that are obviously debug files
731 if not (debugappend != "" and file.endswith(debugappend)) and \ 731 if not (debugappend != "" and file.endswith(debugappend)) and \
732 not (debugdir != "" and debugdir in os.path.dirname(file[len(dvar):])) and \ 732 not (debugdir != "" and debugdir in os.path.dirname(file[len(dvar):])) and \
733 os.path.isfile(file): 733 os.path.isfile(file):
734 try: 734 try:
735 s = os.stat(file) 735 s = os.stat(file)
736 except OSError, (err, strerror): 736 except OSError, (err, strerror):
737 if err != errno.ENOENT: 737 if err != errno.ENOENT:
738 raise 738 raise
739 # Skip broken symlinks 739 # Skip broken symlinks
740 continue 740 continue
741 # Is the item excutable? Then we need to process it. 741 # Is the item excutable? Then we need to process it.
742 if (s[stat.ST_MODE] & stat.S_IXUSR) or \ 742 if (s[stat.ST_MODE] & stat.S_IXUSR) or \
743 (s[stat.ST_MODE] & stat.S_IXGRP) or \ 743 (s[stat.ST_MODE] & stat.S_IXGRP) or \
744 (s[stat.ST_MODE] & stat.S_IXOTH): 744 (s[stat.ST_MODE] & stat.S_IXOTH):
745 # If it's a symlink, and points to an ELF file, we capture the readlink target 745 # If it's a symlink, and points to an ELF file, we capture the readlink target
746 if os.path.islink(file): 746 if os.path.islink(file):
747 target = os.readlink(file) 747 target = os.readlink(file)
748 if not os.path.isabs(target): 748 if not os.path.isabs(target):
749 ltarget = os.path.join(os.path.dirname(file), target) 749 ltarget = os.path.join(os.path.dirname(file), target)
750 else: 750 else:
751 ltarget = target 751 ltarget = target
752 752
753 if isELF(ltarget): 753 if isELF(ltarget):
754 #bb.note("Sym: %s (%d)" % (ltarget, isELF(ltarget))) 754 #bb.note("Sym: %s (%d)" % (ltarget, isELF(ltarget)))
755 file_list[file] = "sym: " + target 755 file_list[file] = "sym: " + target
756 continue 756 continue
757 # It's a file (or hardlink), not a link 757 # It's a file (or hardlink), not a link
758 # ...but is it ELF, and is it already stripped? 758 # ...but is it ELF, and is it already stripped?
759 elf_file = isELF(file) 759 elf_file = isELF(file)
760 if elf_file & 1: 760 if elf_file & 1:
761 # Check if it's a hard link to something else 761 # Check if it's a hard link to something else
762 if s.st_nlink > 1: 762 if s.st_nlink > 1:
763 file_reference = "%d_%d" % (s.st_dev, s.st_ino) 763 file_reference = "%d_%d" % (s.st_dev, s.st_ino)
764 # Hard link to something else 764 # Hard link to something else
765 file_list[file] = "hard: " + file_reference 765 file_list[file] = "hard: " + file_reference
766 continue 766 continue
767 767
768 file_list[file] = "ELF: %d" % elf_file 768 file_list[file] = "ELF: %d" % elf_file
769 769
770 770
771 # 771 #
772 # First lets process debug splitting 772 # First lets process debug splitting
773 # 773 #
774 if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'): 774 if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'):
775 for file in file_list: 775 for file in file_list:
776 src = file[len(dvar):] 776 src = file[len(dvar):]
777 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend 777 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
778 fpath = dvar + dest 778 fpath = dvar + dest
779 # Preserve symlinks in debug area... 779 # Preserve symlinks in debug area...
780 if file_list[file].startswith("sym: "): 780 if file_list[file].startswith("sym: "):
781 ltarget = file_list[file][5:] 781 ltarget = file_list[file][5:]
782 lpath = os.path.dirname(ltarget) 782 lpath = os.path.dirname(ltarget)
783 lbase = os.path.basename(ltarget) 783 lbase = os.path.basename(ltarget)
784 ftarget = "" 784 ftarget = ""
785 if lpath and lpath != ".": 785 if lpath and lpath != ".":
786 ftarget += lpath + debugdir + "/" 786 ftarget += lpath + debugdir + "/"
787 ftarget += lbase + debugappend 787 ftarget += lbase + debugappend
788 if lpath.startswith(".."): 788 if lpath.startswith(".."):
789 ftarget = os.path.join("..", ftarget) 789 ftarget = os.path.join("..", ftarget)
790 bb.mkdirhier(os.path.dirname(fpath)) 790 bb.mkdirhier(os.path.dirname(fpath))
791 #bb.note("Symlink %s -> %s" % (fpath, ftarget)) 791 #bb.note("Symlink %s -> %s" % (fpath, ftarget))
792 os.symlink(ftarget, fpath) 792 os.symlink(ftarget, fpath)
793 continue 793 continue
794 794
795 # Preserve hard links in debug area... 795 # Preserve hard links in debug area...
796 file_reference = "" 796 file_reference = ""
797 if file_list[file].startswith("hard: "): 797 if file_list[file].startswith("hard: "):
798 file_reference = file_list[file][6:] 798 file_reference = file_list[file][6:]
799 if file_reference not in file_links: 799 if file_reference not in file_links:
800 # If this is a new file, add it as a reference, and 800 # If this is a new file, add it as a reference, and
801 # update it's type, so we can fall through and split 801 # update it's type, so we can fall through and split
802 file_list[file] = "ELF: %d" % (isELF(file)) 802 file_list[file] = "ELF: %d" % (isELF(file))
803 else: 803 else:
804 target = file_links[file_reference][len(dvar):] 804 target = file_links[file_reference][len(dvar):]
805 ftarget = dvar + debuglibdir + os.path.dirname(target) + debugdir + "/" + os.path.basename(target) + debugappend 805 ftarget = dvar + debuglibdir + os.path.dirname(target) + debugdir + "/" + os.path.basename(target) + debugappend
806 bb.mkdirhier(os.path.dirname(fpath)) 806 bb.mkdirhier(os.path.dirname(fpath))
807 #bb.note("Link %s -> %s" % (fpath, ftarget)) 807 #bb.note("Link %s -> %s" % (fpath, ftarget))
808 os.link(ftarget, fpath) 808 os.link(ftarget, fpath)
809 continue 809 continue
810 810
811 # It's ELF... 811 # It's ELF...
812 if file_list[file].startswith("ELF: "): 812 if file_list[file].startswith("ELF: "):
813 elf_file = int(file_list[file][5:]) 813 elf_file = int(file_list[file][5:])
814 if elf_file & 2: 814 if elf_file & 2:
815 bb.warn("File '%s' from %s was already stripped, this will prevent future debugging!" % (src, pn)) 815 bb.warn("File '%s' from %s was already stripped, this will prevent future debugging!" % (src, pn))
816 continue 816 continue
817 817
818 # Split the file... 818 # Split the file...
819 bb.mkdirhier(os.path.dirname(fpath)) 819 bb.mkdirhier(os.path.dirname(fpath))
820 #bb.note("Split %s -> %s" % (file, fpath)) 820 #bb.note("Split %s -> %s" % (file, fpath))
821 # Only store off the hard link reference if we successfully split! 821 # Only store off the hard link reference if we successfully split!
822 if splitfile(file, fpath, debugsrcdir, d) == 0 and file_reference != "": 822 if splitfile(file, fpath, debugsrcdir, d) == 0 and file_reference != "":
823 file_links[file_reference] = file 823 file_links[file_reference] = file
824 824
825 # The above may have generated dangling symlinks, remove them! 825 # The above may have generated dangling symlinks, remove them!
826 # Dangling symlinks are a result of something NOT being split, such as a stripped binary. 826 # Dangling symlinks are a result of something NOT being split, such as a stripped binary.
827 # This should be a rare occurance, but we want to clean up anyway. 827 # This should be a rare occurance, but we want to clean up anyway.
828 for file in file_list: 828 for file in file_list:
829 if file_list[file].startswith("sym: "): 829 if file_list[file].startswith("sym: "):
830 src = file[len(dvar):] 830 src = file[len(dvar):]
831 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend 831 dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
832 fpath = dvar + dest 832 fpath = dvar + dest
833 try: 833 try:
834 s = os.stat(fpath) 834 s = os.stat(fpath)
835 except OSError, (err, strerror): 835 except OSError, (err, strerror):
836 if err != errno.ENOENT: 836 if err != errno.ENOENT:
837 raise 837 raise
838 #bb.note("Remove dangling link %s -> %s" % (fpath, os.readlink(fpath))) 838 #bb.note("Remove dangling link %s -> %s" % (fpath, os.readlink(fpath)))
839 os.unlink(fpath) 839 os.unlink(fpath)
840 # This could leave an empty debug directory laying around 840 # This could leave an empty debug directory laying around
841 # take care of the obvious case... 841 # take care of the obvious case...
842 subprocess.call("rmdir %s 2>/dev/null" % os.path.dirname(fpath), shell=True) 842 subprocess.call("rmdir %s 2>/dev/null" % os.path.dirname(fpath), shell=True)
843 843
844 # Process the debugsrcdir if requested... 844 # Process the debugsrcdir if requested...
845 # This copies and places the referenced sources for later debugging... 845 # This copies and places the referenced sources for later debugging...
846 splitfile2(debugsrcdir, d) 846 splitfile2(debugsrcdir, d)
847 # 847 #
848 # End of debug splitting 848 # End of debug splitting
849 # 849 #
850 850
851 # 851 #
852 # Now lets go back over things and strip them 852 # Now lets go back over things and strip them
853 # 853 #
854 if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'): 854 if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
855 for file in file_list: 855 for file in file_list:
856 if file_list[file].startswith("ELF: "): 856 if file_list[file].startswith("ELF: "):
857 elf_file = int(file_list[file][5:]) 857 elf_file = int(file_list[file][5:])
858 #bb.note("Strip %s" % file) 858 #bb.note("Strip %s" % file)
859 runstrip(file, elf_file, d) 859 runstrip(file, elf_file, d)
860 860
861 861
862 if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'): 862 if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'):
863 for root, dirs, files in os.walk(dvar): 863 for root, dirs, files in os.walk(dvar):
864 for f in files: 864 for f in files:
865 if not f.endswith(".ko"): 865 if not f.endswith(".ko"):
866 continue 866 continue
867 runstrip(os.path.join(root, f), None, d) 867 runstrip(os.path.join(root, f), None, d)
868 # 868 #
869 # End of strip 869 # End of strip
870 # 870 #
871} 871}
872 872
873python populate_packages () { 873python populate_packages () {
874 import glob, stat, errno, re, subprocess 874 import glob, stat, errno, re, subprocess
875 875
876 workdir = d.getVar('WORKDIR', True) 876 workdir = d.getVar('WORKDIR', True)
877 outdir = d.getVar('DEPLOY_DIR', True) 877 outdir = d.getVar('DEPLOY_DIR', True)
878 dvar = d.getVar('PKGD', True) 878 dvar = d.getVar('PKGD', True)
879 packages = d.getVar('PACKAGES', True) 879 packages = d.getVar('PACKAGES', True)
880 pn = d.getVar('PN', True) 880 pn = d.getVar('PN', True)
881 881
882 bb.mkdirhier(outdir) 882 bb.mkdirhier(outdir)
883 os.chdir(dvar) 883 os.chdir(dvar)
884 884
885 # Sanity check PACKAGES for duplicates and for LICENSE_EXCLUSION 885 # Sanity check PACKAGES for duplicates and for LICENSE_EXCLUSION
886 # Sanity should be moved to sanity.bbclass once we have the infrastucture 886 # Sanity should be moved to sanity.bbclass once we have the infrastucture
887 package_list = [] 887 package_list = []
888 888
889 for pkg in packages.split(): 889 for pkg in packages.split():
890 if d.getVar('LICENSE_EXCLUSION-' + pkg, True): 890 if d.getVar('LICENSE_EXCLUSION-' + pkg, True):
891 bb.warn("%s has an incompatible license. Excluding from packaging." % pkg) 891 bb.warn("%s has an incompatible license. Excluding from packaging." % pkg)
892 packages.remove(pkg) 892 packages.remove(pkg)
893 else: 893 else:
894 if pkg in package_list: 894 if pkg in package_list:
895 bb.error("%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg) 895 bb.error("%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg)
896 else: 896 else:
897 package_list.append(pkg) 897 package_list.append(pkg)
898 d.setVar('PACKAGES', ' '.join(package_list)) 898 d.setVar('PACKAGES', ' '.join(package_list))
899 pkgdest = d.getVar('PKGDEST', True) 899 pkgdest = d.getVar('PKGDEST', True)
900 subprocess.call('rm -rf %s' % pkgdest, shell=True) 900 subprocess.call('rm -rf %s' % pkgdest, shell=True)
901 901
902 seen = [] 902 seen = []
903 903
904 for pkg in package_list: 904 for pkg in package_list:
905 localdata = bb.data.createCopy(d) 905 localdata = bb.data.createCopy(d)
906 root = os.path.join(pkgdest, pkg) 906 root = os.path.join(pkgdest, pkg)
907 bb.mkdirhier(root) 907 bb.mkdirhier(root)
908 908
909 localdata.setVar('PKG', pkg) 909 localdata.setVar('PKG', pkg)
910 overrides = localdata.getVar('OVERRIDES', True) 910 overrides = localdata.getVar('OVERRIDES', True)
911 if not overrides: 911 if not overrides:
912 raise bb.build.FuncFailed('OVERRIDES not defined') 912 raise bb.build.FuncFailed('OVERRIDES not defined')
913 localdata.setVar('OVERRIDES', overrides + ':' + pkg) 913 localdata.setVar('OVERRIDES', overrides + ':' + pkg)
914 bb.data.update_data(localdata) 914 bb.data.update_data(localdata)
915 915
916 filesvar = localdata.getVar('FILES', True) or "" 916 filesvar = localdata.getVar('FILES', True) or ""
917 files = filesvar.split() 917 files = filesvar.split()
918 file_links = {} 918 file_links = {}
919 for file in files: 919 for file in files:
920 if file.find("//") != -1: 920 if file.find("//") != -1:
921 bb.warn("FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg) 921 bb.warn("FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg)
922 file.replace("//", "/") 922 file.replace("//", "/")
923 if os.path.isabs(file): 923 if os.path.isabs(file):
924 file = '.' + file 924 file = '.' + file
925 if not os.path.islink(file): 925 if not os.path.islink(file):
926 if os.path.isdir(file): 926 if os.path.isdir(file):
927 newfiles = [ os.path.join(file,x) for x in os.listdir(file) ] 927 newfiles = [ os.path.join(file,x) for x in os.listdir(file) ]
928 if newfiles: 928 if newfiles:
929 files += newfiles 929 files += newfiles
930 continue 930 continue
931 globbed = glob.glob(file) 931 globbed = glob.glob(file)
932 if globbed: 932 if globbed:
933 if [ file ] != globbed: 933 if [ file ] != globbed:
934 files += globbed 934 files += globbed
935 continue 935 continue
936 if (not os.path.islink(file)) and (not os.path.exists(file)): 936 if (not os.path.islink(file)) and (not os.path.exists(file)):
937 continue 937 continue
938 if file in seen: 938 if file in seen:
939 continue 939 continue
940 seen.append(file) 940 seen.append(file)
941 941
942 def mkdir(src, dest, p): 942 def mkdir(src, dest, p):
943 src = os.path.join(src, p) 943 src = os.path.join(src, p)
944 dest = os.path.join(dest, p) 944 dest = os.path.join(dest, p)
945 bb.mkdirhier(dest) 945 bb.mkdirhier(dest)
946 fstat = os.stat(src) 946 fstat = os.stat(src)
947 os.chmod(dest, fstat.st_mode) 947 os.chmod(dest, fstat.st_mode)
948 os.chown(dest, fstat.st_uid, fstat.st_gid) 948 os.chown(dest, fstat.st_uid, fstat.st_gid)
949 if p not in seen: 949 if p not in seen:
950 seen.append(p) 950 seen.append(p)
951 951
952 def mkdir_recurse(src, dest, paths): 952 def mkdir_recurse(src, dest, paths):
953 while paths.startswith("./"): 953 while paths.startswith("./"):
954 paths = paths[2:] 954 paths = paths[2:]
955 p = "." 955 p = "."
956 for c in paths.split("/"): 956 for c in paths.split("/"):
957 p = os.path.join(p, c) 957 p = os.path.join(p, c)
958 if not os.path.exists(os.path.join(dest, p)): 958 if not os.path.exists(os.path.join(dest, p)):
959 mkdir(src, dest, p) 959 mkdir(src, dest, p)
960 960
961 if os.path.isdir(file) and not os.path.islink(file): 961 if os.path.isdir(file) and not os.path.islink(file):
962 mkdir_recurse(dvar, root, file) 962 mkdir_recurse(dvar, root, file)
963 continue 963 continue
964 964
965 mkdir_recurse(dvar, root, os.path.dirname(file)) 965 mkdir_recurse(dvar, root, os.path.dirname(file))
966 fpath = os.path.join(root,file) 966 fpath = os.path.join(root,file)
967 if not os.path.islink(file): 967 if not os.path.islink(file):
968 os.link(file, fpath) 968 os.link(file, fpath)
969 fstat = os.stat(file) 969 fstat = os.stat(file)
970 os.chmod(fpath, fstat.st_mode) 970 os.chmod(fpath, fstat.st_mode)
971 os.chown(fpath, fstat.st_uid, fstat.st_gid) 971 os.chown(fpath, fstat.st_uid, fstat.st_gid)
972 continue 972 continue
973 ret = bb.copyfile(file, fpath) 973 ret = bb.copyfile(file, fpath)
974 if ret is False or ret == 0: 974 if ret is False or ret == 0:
975 raise bb.build.FuncFailed("File population failed") 975 raise bb.build.FuncFailed("File population failed")
976 976
977 del localdata 977 del localdata
978 os.chdir(workdir) 978 os.chdir(workdir)
979 979
980 unshipped = [] 980 unshipped = []
981 for root, dirs, files in os.walk(dvar): 981 for root, dirs, files in os.walk(dvar):
982 dir = root[len(dvar):] 982 dir = root[len(dvar):]
983 if not dir: 983 if not dir:
984 dir = os.sep 984 dir = os.sep
985 for f in (files + dirs): 985 for f in (files + dirs):
986 path = os.path.join(dir, f) 986 path = os.path.join(dir, f)
987 if ('.' + path) not in seen: 987 if ('.' + path) not in seen:
988 unshipped.append(path) 988 unshipped.append(path)
989 989
990 if unshipped != []: 990 if unshipped != []:
991 msg = pn + ": Files/directories were installed but not shipped" 991 msg = pn + ": Files/directories were installed but not shipped"
992 if "installed_vs_shipped" in (d.getVar('INSANE_SKIP_' + pn, True) or "").split(): 992 if "installed_vs_shipped" in (d.getVar('INSANE_SKIP_' + pn, True) or "").split():
993 bb.note("Package %s skipping QA tests: installed_vs_shipped" % pn) 993 bb.note("Package %s skipping QA tests: installed_vs_shipped" % pn)
994 else: 994 else:
995 for f in unshipped: 995 for f in unshipped:
996 msg = msg + "\n " + f 996 msg = msg + "\n " + f
997 package_qa_handle_error("installed_vs_shipped", msg, d) 997 package_qa_handle_error("installed_vs_shipped", msg, d)
998 998
999 bb.build.exec_func("package_name_hook", d) 999 bb.build.exec_func("package_name_hook", d)
1000 1000
1001 for pkg in package_list: 1001 for pkg in package_list:
1002 pkgname = d.getVar('PKG_%s' % pkg, True) 1002 pkgname = d.getVar('PKG_%s' % pkg, True)
1003 if pkgname is None: 1003 if pkgname is None:
1004 d.setVar('PKG_%s' % pkg, pkg) 1004 d.setVar('PKG_%s' % pkg, pkg)
1005 1005
1006 dangling_links = {} 1006 dangling_links = {}
1007 pkg_files = {} 1007 pkg_files = {}
1008 for pkg in package_list: 1008 for pkg in package_list:
1009 dangling_links[pkg] = [] 1009 dangling_links[pkg] = []
1010 pkg_files[pkg] = [] 1010 pkg_files[pkg] = []
1011 inst_root = os.path.join(pkgdest, pkg) 1011 inst_root = os.path.join(pkgdest, pkg)
1012 for root, dirs, files in os.walk(inst_root): 1012 for root, dirs, files in os.walk(inst_root):
1013 for f in files: 1013 for f in files:
1014 path = os.path.join(root, f) 1014 path = os.path.join(root, f)
1015 rpath = path[len(inst_root):] 1015 rpath = path[len(inst_root):]
1016 pkg_files[pkg].append(rpath) 1016 pkg_files[pkg].append(rpath)
1017 try: 1017 try:
1018 s = os.stat(path) 1018 s = os.stat(path)
1019 except OSError, (err, strerror): 1019 except OSError, (err, strerror):
1020 if err != errno.ENOENT: 1020 if err != errno.ENOENT:
1021 raise 1021 raise
1022 target = os.readlink(path) 1022 target = os.readlink(path)
1023 if target[0] != '/': 1023 if target[0] != '/':
1024 target = os.path.join(root[len(inst_root):], target) 1024 target = os.path.join(root[len(inst_root):], target)
1025 dangling_links[pkg].append(os.path.normpath(target)) 1025 dangling_links[pkg].append(os.path.normpath(target))
1026 1026
1027 for pkg in package_list: 1027 for pkg in package_list:
1028 rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, True) or d.getVar('RDEPENDS', True) or "") 1028 rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, True) or d.getVar('RDEPENDS', True) or "")
1029 1029
1030 for l in dangling_links[pkg]: 1030 for l in dangling_links[pkg]:
1031 found = False 1031 found = False
1032 bb.debug(1, "%s contains dangling link %s" % (pkg, l)) 1032 bb.debug(1, "%s contains dangling link %s" % (pkg, l))
1033 for p in package_list: 1033 for p in package_list:
1034 for f in pkg_files[p]: 1034 for f in pkg_files[p]:
1035 if f == l: 1035 if f == l:
1036 found = True 1036 found = True
1037 bb.debug(1, "target found in %s" % p) 1037 bb.debug(1, "target found in %s" % p)
1038 if p == pkg: 1038 if p == pkg:
1039 break 1039 break
1040 if p not in rdepends: 1040 if p not in rdepends:
1041 rdepends[p] = "" 1041 rdepends[p] = ""
1042 break 1042 break
1043 if found == False: 1043 if found == False:
1044 bb.note("%s contains dangling symlink to %s" % (pkg, l)) 1044 bb.note("%s contains dangling symlink to %s" % (pkg, l))
1045 d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False)) 1045 d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1046} 1046}
1047populate_packages[dirs] = "${D}" 1047populate_packages[dirs] = "${D}"
1048 1048
1049PKGDESTWORK = "${WORKDIR}/pkgdata" 1049PKGDESTWORK = "${WORKDIR}/pkgdata"
1050 1050
1051python emit_pkgdata() { 1051python emit_pkgdata() {
1052 from glob import glob 1052 from glob import glob
1053 1053
1054 def write_if_exists(f, pkg, var): 1054 def write_if_exists(f, pkg, var):
1055 def encode(str): 1055 def encode(str):
1056 import codecs 1056 import codecs
1057 c = codecs.getencoder("string_escape") 1057 c = codecs.getencoder("string_escape")
1058 return c(str)[0] 1058 return c(str)[0]
1059 1059
1060 val = d.getVar('%s_%s' % (var, pkg), True) 1060 val = d.getVar('%s_%s' % (var, pkg), True)
1061 if val: 1061 if val:
1062 f.write('%s_%s: %s\n' % (var, pkg, encode(val))) 1062 f.write('%s_%s: %s\n' % (var, pkg, encode(val)))
1063 return 1063 return
1064 val = d.getVar('%s' % (var), True) 1064 val = d.getVar('%s' % (var), True)
1065 if val: 1065 if val:
1066 f.write('%s: %s\n' % (var, encode(val))) 1066 f.write('%s: %s\n' % (var, encode(val)))
1067 return 1067 return
1068 1068
1069 def get_directory_size(dir): 1069 def get_directory_size(dir):
1070 if os.listdir(dir): 1070 if os.listdir(dir):
1071 size = int(os.popen('du -sk %s' % dir).readlines()[0].split('\t')[0]) 1071 size = int(os.popen('du -sk %s' % dir).readlines()[0].split('\t')[0])
1072 else: 1072 else:
1073 size = 0 1073 size = 0
1074 return size 1074 return size
1075 1075
1076 packages = d.getVar('PACKAGES', True) 1076 packages = d.getVar('PACKAGES', True)
1077 pkgdest = d.getVar('PKGDEST', True) 1077 pkgdest = d.getVar('PKGDEST', True)
1078 pkgdatadir = d.getVar('PKGDESTWORK', True) 1078 pkgdatadir = d.getVar('PKGDESTWORK', True)
1079 1079
1080 # Take shared lock since we're only reading, not writing 1080 # Take shared lock since we're only reading, not writing
1081 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True) 1081 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True)
1082 1082
1083 data_file = pkgdatadir + d.expand("/${PN}" ) 1083 data_file = pkgdatadir + d.expand("/${PN}" )
1084 f = open(data_file, 'w') 1084 f = open(data_file, 'w')
1085 f.write("PACKAGES: %s\n" % packages) 1085 f.write("PACKAGES: %s\n" % packages)
1086 f.close() 1086 f.close()
1087 1087
1088 workdir = d.getVar('WORKDIR', True) 1088 workdir = d.getVar('WORKDIR', True)
1089 1089
1090 for pkg in packages.split(): 1090 for pkg in packages.split():
1091 subdata_file = pkgdatadir + "/runtime/%s" % pkg 1091 subdata_file = pkgdatadir + "/runtime/%s" % pkg
1092 1092
1093 sf = open(subdata_file, 'w') 1093 sf = open(subdata_file, 'w')
1094 write_if_exists(sf, pkg, 'PN') 1094 write_if_exists(sf, pkg, 'PN')
1095 write_if_exists(sf, pkg, 'PV') 1095 write_if_exists(sf, pkg, 'PV')
1096 write_if_exists(sf, pkg, 'PR') 1096 write_if_exists(sf, pkg, 'PR')
1097 write_if_exists(sf, pkg, 'PKGV') 1097 write_if_exists(sf, pkg, 'PKGV')
1098 write_if_exists(sf, pkg, 'PKGR') 1098 write_if_exists(sf, pkg, 'PKGR')
1099 write_if_exists(sf, pkg, 'LICENSE') 1099 write_if_exists(sf, pkg, 'LICENSE')
1100 write_if_exists(sf, pkg, 'DESCRIPTION') 1100 write_if_exists(sf, pkg, 'DESCRIPTION')
1101 write_if_exists(sf, pkg, 'SUMMARY') 1101 write_if_exists(sf, pkg, 'SUMMARY')
1102 write_if_exists(sf, pkg, 'RDEPENDS') 1102 write_if_exists(sf, pkg, 'RDEPENDS')
1103 write_if_exists(sf, pkg, 'RPROVIDES') 1103 write_if_exists(sf, pkg, 'RPROVIDES')
1104 write_if_exists(sf, pkg, 'RRECOMMENDS') 1104 write_if_exists(sf, pkg, 'RRECOMMENDS')
1105 write_if_exists(sf, pkg, 'RSUGGESTS') 1105 write_if_exists(sf, pkg, 'RSUGGESTS')
1106 write_if_exists(sf, pkg, 'RREPLACES') 1106 write_if_exists(sf, pkg, 'RREPLACES')
1107 write_if_exists(sf, pkg, 'RCONFLICTS') 1107 write_if_exists(sf, pkg, 'RCONFLICTS')
1108 write_if_exists(sf, pkg, 'SECTION') 1108 write_if_exists(sf, pkg, 'SECTION')
1109 write_if_exists(sf, pkg, 'PKG') 1109 write_if_exists(sf, pkg, 'PKG')
1110 write_if_exists(sf, pkg, 'ALLOW_EMPTY') 1110 write_if_exists(sf, pkg, 'ALLOW_EMPTY')
1111 write_if_exists(sf, pkg, 'FILES') 1111 write_if_exists(sf, pkg, 'FILES')
1112 write_if_exists(sf, pkg, 'pkg_postinst') 1112 write_if_exists(sf, pkg, 'pkg_postinst')
1113 write_if_exists(sf, pkg, 'pkg_postrm') 1113 write_if_exists(sf, pkg, 'pkg_postrm')
1114 write_if_exists(sf, pkg, 'pkg_preinst') 1114 write_if_exists(sf, pkg, 'pkg_preinst')
1115 write_if_exists(sf, pkg, 'pkg_prerm') 1115 write_if_exists(sf, pkg, 'pkg_prerm')
1116 write_if_exists(sf, pkg, 'FILERPROVIDESFLIST') 1116 write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
1117 for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg, True) or "").split(): 1117 for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg, True) or "").split():
1118 write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile) 1118 write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile)
1119 1119
1120 write_if_exists(sf, pkg, 'FILERDEPENDSFLIST') 1120 write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
1121 for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg, True) or "").split(): 1121 for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg, True) or "").split():
1122 write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile) 1122 write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile)
1123 1123
1124 sf.write('%s_%s: %s\n' % ('PKGSIZE', pkg, get_directory_size(pkgdest + "/%s" % pkg))) 1124 sf.write('%s_%s: %s\n' % ('PKGSIZE', pkg, get_directory_size(pkgdest + "/%s" % pkg)))
1125 sf.close() 1125 sf.close()
1126 1126
1127 1127
1128 allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg, True) 1128 allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg, True)
1129 if not allow_empty: 1129 if not allow_empty:
1130 allow_empty = d.getVar('ALLOW_EMPTY', True) 1130 allow_empty = d.getVar('ALLOW_EMPTY', True)
1131 root = "%s/%s" % (pkgdest, pkg) 1131 root = "%s/%s" % (pkgdest, pkg)
1132 os.chdir(root) 1132 os.chdir(root)
1133 g = glob('*') 1133 g = glob('*')
1134 if g or allow_empty == "1": 1134 if g or allow_empty == "1":
1135 packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg 1135 packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
1136 file(packagedfile, 'w').close() 1136 file(packagedfile, 'w').close()
1137 1137
1138 bb.utils.unlockfile(lf) 1138 bb.utils.unlockfile(lf)
1139} 1139}
1140emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime" 1140emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime"
1141 1141
@@ -1156,557 +1156,557 @@ RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/bin/rpmdeps-oecore --macros ${STAGING_LI
1156# FILERDEPENDS_filepath_pkg - per file dep 1156# FILERDEPENDS_filepath_pkg - per file dep
1157 1157
1158python package_do_filedeps() { 1158python package_do_filedeps() {
1159 import re 1159 import re
1160 1160
1161 if d.getVar('SKIP_FILEDEPS', True) == '1': 1161 if d.getVar('SKIP_FILEDEPS', True) == '1':
1162 return 1162 return
1163 1163
1164 pkgdest = d.getVar('PKGDEST', True) 1164 pkgdest = d.getVar('PKGDEST', True)
1165 packages = d.getVar('PACKAGES', True) 1165 packages = d.getVar('PACKAGES', True)
1166 1166
1167 rpmdeps = d.expand("${RPMDEPS}") 1167 rpmdeps = d.expand("${RPMDEPS}")
1168 r = re.compile(r'[<>=]+ +[^ ]*') 1168 r = re.compile(r'[<>=]+ +[^ ]*')
1169 1169
1170 def file_translate(file): 1170 def file_translate(file):
1171 ft = file.replace("@", "@at@") 1171 ft = file.replace("@", "@at@")
1172 ft = ft.replace(" ", "@space@") 1172 ft = ft.replace(" ", "@space@")
1173 ft = ft.replace("\t", "@tab@") 1173 ft = ft.replace("\t", "@tab@")
1174 ft = ft.replace("[", "@openbrace@") 1174 ft = ft.replace("[", "@openbrace@")
1175 ft = ft.replace("]", "@closebrace@") 1175 ft = ft.replace("]", "@closebrace@")
1176 ft = ft.replace("_", "@underscore@") 1176 ft = ft.replace("_", "@underscore@")
1177 return ft 1177 return ft
1178 1178
1179 # Quick routine to process the results of the rpmdeps call... 1179 # Quick routine to process the results of the rpmdeps call...
1180 def process_deps(pipe, pkg, provides_files, requires_files): 1180 def process_deps(pipe, pkg, provides_files, requires_files):
1181 provides = {} 1181 provides = {}
1182 requires = {} 1182 requires = {}
1183 1183
1184 for line in pipe: 1184 for line in pipe:
1185 f = line.split(" ", 1)[0].strip() 1185 f = line.split(" ", 1)[0].strip()
1186 line = line.split(" ", 1)[1].strip() 1186 line = line.split(" ", 1)[1].strip()
1187 1187
1188 if line.startswith("Requires:"): 1188 if line.startswith("Requires:"):
1189 i = requires 1189 i = requires
1190 elif line.startswith("Provides:"): 1190 elif line.startswith("Provides:"):
1191 i = provides 1191 i = provides
1192 else: 1192 else:
1193 continue 1193 continue
1194 1194
1195 file = f.replace(pkgdest + "/" + pkg, "") 1195 file = f.replace(pkgdest + "/" + pkg, "")
1196 file = file_translate(file) 1196 file = file_translate(file)
1197 value = line.split(":", 1)[1].strip() 1197 value = line.split(":", 1)[1].strip()
1198 value = r.sub(r'(\g<0>)', value) 1198 value = r.sub(r'(\g<0>)', value)
1199 1199
1200 if value.startswith("rpmlib("): 1200 if value.startswith("rpmlib("):
1201 continue 1201 continue
1202 if value == "python": 1202 if value == "python":
1203 continue 1203 continue
1204 if file not in i: 1204 if file not in i:
1205 i[file] = [] 1205 i[file] = []
1206 i[file].append(value) 1206 i[file].append(value)
1207 1207
1208 for file in provides: 1208 for file in provides:
1209 provides_files.append(file) 1209 provides_files.append(file)
1210 key = "FILERPROVIDES_" + file + "_" + pkg 1210 key = "FILERPROVIDES_" + file + "_" + pkg
1211 d.setVar(key, " ".join(provides[file])) 1211 d.setVar(key, " ".join(provides[file]))
1212 1212
1213 for file in requires: 1213 for file in requires:
1214 requires_files.append(file) 1214 requires_files.append(file)
1215 key = "FILERDEPENDS_" + file + "_" + pkg 1215 key = "FILERDEPENDS_" + file + "_" + pkg
1216 d.setVar(key, " ".join(requires[file])) 1216 d.setVar(key, " ".join(requires[file]))
1217 1217
1218 def chunks(files, n): 1218 def chunks(files, n):
1219 return [files[i:i+n] for i in range(0, len(files), n)] 1219 return [files[i:i+n] for i in range(0, len(files), n)]
1220 1220
1221 # Determine dependencies 1221 # Determine dependencies
1222 for pkg in packages.split(): 1222 for pkg in packages.split():
1223 if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-'): 1223 if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-'):
1224 continue 1224 continue
1225 1225
1226 provides_files = [] 1226 provides_files = []
1227 requires_files = [] 1227 requires_files = []
1228 rpfiles = [] 1228 rpfiles = []
1229 for root, dirs, files in os.walk(pkgdest + "/" + pkg): 1229 for root, dirs, files in os.walk(pkgdest + "/" + pkg):
1230 for file in files: 1230 for file in files:
1231 rpfiles.append(os.path.join(root, file)) 1231 rpfiles.append(os.path.join(root, file))
1232 1232
1233 for files in chunks(rpfiles, 100): 1233 for files in chunks(rpfiles, 100):
1234 dep_pipe = os.popen(rpmdeps + " " + " ".join(files)) 1234 dep_pipe = os.popen(rpmdeps + " " + " ".join(files))
1235 1235
1236 process_deps(dep_pipe, pkg, provides_files, requires_files) 1236 process_deps(dep_pipe, pkg, provides_files, requires_files)
1237 1237
1238 d.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files)) 1238 d.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files))
1239 d.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files)) 1239 d.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files))
1240} 1240}
1241 1241
1242SHLIBSDIR = "${STAGING_DIR_HOST}/shlibs" 1242SHLIBSDIR = "${STAGING_DIR_HOST}/shlibs"
1243SHLIBSWORKDIR = "${WORKDIR}/shlibs" 1243SHLIBSWORKDIR = "${WORKDIR}/shlibs"
1244 1244
1245python package_do_shlibs() { 1245python package_do_shlibs() {
1246 import re, pipes 1246 import re, pipes
1247 1247
1248 exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', 0) 1248 exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', 0)
1249 if exclude_shlibs: 1249 if exclude_shlibs:
1250 bb.note("not generating shlibs") 1250 bb.note("not generating shlibs")
1251 return 1251 return
1252 1252
1253 lib_re = re.compile("^.*\.so") 1253 lib_re = re.compile("^.*\.so")
1254 libdir_re = re.compile(".*/%s$" % d.getVar('baselib', True)) 1254 libdir_re = re.compile(".*/%s$" % d.getVar('baselib', True))
1255 1255
1256 packages = d.getVar('PACKAGES', True) 1256 packages = d.getVar('PACKAGES', True)
1257 targetos = d.getVar('TARGET_OS', True) 1257 targetos = d.getVar('TARGET_OS', True)
1258 1258
1259 workdir = d.getVar('WORKDIR', True) 1259 workdir = d.getVar('WORKDIR', True)
1260 1260
1261 ver = d.getVar('PKGV', True) 1261 ver = d.getVar('PKGV', True)
1262 if not ver: 1262 if not ver:
1263 bb.error("PKGV not defined") 1263 bb.error("PKGV not defined")
1264 return 1264 return
1265 1265
1266 pkgdest = d.getVar('PKGDEST', True) 1266 pkgdest = d.getVar('PKGDEST', True)
1267 1267
1268 shlibs_dir = d.getVar('SHLIBSDIR', True) 1268 shlibs_dir = d.getVar('SHLIBSDIR', True)
1269 shlibswork_dir = d.getVar('SHLIBSWORKDIR', True) 1269 shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
1270 1270
1271 # Take shared lock since we're only reading, not writing 1271 # Take shared lock since we're only reading, not writing
1272 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}")) 1272 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
1273 1273
1274 def linux_so(root, path, file): 1274 def linux_so(root, path, file):
1275 needs_ldconfig = False 1275 needs_ldconfig = False
1276 cmd = d.getVar('OBJDUMP', True) + " -p " + pipes.quote(os.path.join(root, file)) + " 2>/dev/null" 1276 cmd = d.getVar('OBJDUMP', True) + " -p " + pipes.quote(os.path.join(root, file)) + " 2>/dev/null"
1277 cmd = "PATH=\"%s\" %s" % (d.getVar('PATH', True), cmd) 1277 cmd = "PATH=\"%s\" %s" % (d.getVar('PATH', True), cmd)
1278 fd = os.popen(cmd) 1278 fd = os.popen(cmd)
1279 lines = fd.readlines() 1279 lines = fd.readlines()
1280 fd.close() 1280 fd.close()
1281 for l in lines: 1281 for l in lines:
1282 m = re.match("\s+NEEDED\s+([^\s]*)", l) 1282 m = re.match("\s+NEEDED\s+([^\s]*)", l)
1283 if m: 1283 if m:
1284 if m.group(1) not in needed[pkg]: 1284 if m.group(1) not in needed[pkg]:
1285 needed[pkg].append(m.group(1)) 1285 needed[pkg].append(m.group(1))
1286 m = re.match("\s+SONAME\s+([^\s]*)", l) 1286 m = re.match("\s+SONAME\s+([^\s]*)", l)
1287 if m: 1287 if m:
1288 this_soname = m.group(1) 1288 this_soname = m.group(1)
1289 if not this_soname in sonames: 1289 if not this_soname in sonames:
1290 # if library is private (only used by package) then do not build shlib for it 1290 # if library is private (only used by package) then do not build shlib for it
1291 if not private_libs or -1 == private_libs.find(this_soname): 1291 if not private_libs or -1 == private_libs.find(this_soname):
1292 sonames.append(this_soname) 1292 sonames.append(this_soname)
1293 if libdir_re.match(root): 1293 if libdir_re.match(root):
1294 needs_ldconfig = True 1294 needs_ldconfig = True
1295 if snap_symlinks and (file != this_soname): 1295 if snap_symlinks and (file != this_soname):
1296 renames.append((os.path.join(root, file), os.path.join(root, this_soname))) 1296 renames.append((os.path.join(root, file), os.path.join(root, this_soname)))
1297 return needs_ldconfig 1297 return needs_ldconfig
1298 1298
1299 def darwin_so(root, path, file): 1299 def darwin_so(root, path, file):
1300 fullpath = os.path.join(root, file) 1300 fullpath = os.path.join(root, file)
1301 if not os.path.exists(fullpath): 1301 if not os.path.exists(fullpath):
1302 return 1302 return
1303 1303
1304 def get_combinations(base): 1304 def get_combinations(base):
1305 # 1305 #
1306 # Given a base library name, find all combinations of this split by "." and "-" 1306 # Given a base library name, find all combinations of this split by "." and "-"
1307 # 1307 #
1308 combos = [] 1308 combos = []
1309 options = base.split(".") 1309 options = base.split(".")
1310 for i in range(1, len(options) + 1): 1310 for i in range(1, len(options) + 1):
1311 combos.append(".".join(options[0:i])) 1311 combos.append(".".join(options[0:i]))
1312 options = base.split("-") 1312 options = base.split("-")
1313 for i in range(1, len(options) + 1): 1313 for i in range(1, len(options) + 1):
1314 combos.append("-".join(options[0:i])) 1314 combos.append("-".join(options[0:i]))
1315 return combos 1315 return combos
1316 1316
1317 if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg'): 1317 if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg'):
1318 # Drop suffix 1318 # Drop suffix
1319 name = file.rsplit(".",1)[0] 1319 name = file.rsplit(".",1)[0]
1320 # Find all combinations 1320 # Find all combinations
1321 combos = get_combinations(name) 1321 combos = get_combinations(name)
1322 for combo in combos: 1322 for combo in combos:
1323 if not combo in sonames: 1323 if not combo in sonames:
1324 sonames.append(combo) 1324 sonames.append(combo)
1325 if file.endswith('.dylib') or file.endswith('.so'): 1325 if file.endswith('.dylib') or file.endswith('.so'):
1326 lafile = fullpath.replace(os.path.join(pkgdest, pkg), d.getVar('PKGD', True)) 1326 lafile = fullpath.replace(os.path.join(pkgdest, pkg), d.getVar('PKGD', True))
1327 # Drop suffix 1327 # Drop suffix
1328 lafile = lafile.rsplit(".",1)[0] 1328 lafile = lafile.rsplit(".",1)[0]
1329 lapath = os.path.dirname(lafile) 1329 lapath = os.path.dirname(lafile)
1330 lafile = os.path.basename(lafile) 1330 lafile = os.path.basename(lafile)
1331 # Find all combinations 1331 # Find all combinations
1332 combos = get_combinations(lafile) 1332 combos = get_combinations(lafile)
1333 for combo in combos: 1333 for combo in combos:
1334 if os.path.exists(lapath + '/' + combo + '.la'): 1334 if os.path.exists(lapath + '/' + combo + '.la'):
1335 break 1335 break
1336 lafile = lapath + '/' + combo + '.la' 1336 lafile = lapath + '/' + combo + '.la'
1337 1337
1338 #bb.note("Foo2: %s" % lafile) 1338 #bb.note("Foo2: %s" % lafile)
1339 #bb.note("Foo %s %s" % (file, fullpath)) 1339 #bb.note("Foo %s %s" % (file, fullpath))
1340 if os.path.exists(lafile): 1340 if os.path.exists(lafile):
1341 fd = open(lafile, 'r') 1341 fd = open(lafile, 'r')
1342 lines = fd.readlines() 1342 lines = fd.readlines()
1343 fd.close() 1343 fd.close()
1344 for l in lines: 1344 for l in lines:
1345 m = re.match("\s*dependency_libs=\s*'(.*)'", l) 1345 m = re.match("\s*dependency_libs=\s*'(.*)'", l)
1346 if m: 1346 if m:
1347 deps = m.group(1).split(" ") 1347 deps = m.group(1).split(" ")
1348 for dep in deps: 1348 for dep in deps:
1349 #bb.note("Trying %s for %s" % (dep, pkg)) 1349 #bb.note("Trying %s for %s" % (dep, pkg))
1350 name = None 1350 name = None
1351 if dep.endswith(".la"): 1351 if dep.endswith(".la"):
1352 name = os.path.basename(dep).replace(".la", "") 1352 name = os.path.basename(dep).replace(".la", "")
1353 elif dep.startswith("-l"): 1353 elif dep.startswith("-l"):
1354 name = dep.replace("-l", "lib") 1354 name = dep.replace("-l", "lib")
1355 if pkg not in needed: 1355 if pkg not in needed:
1356 needed[pkg] = [] 1356 needed[pkg] = []
1357 if name and name not in needed[pkg]: 1357 if name and name not in needed[pkg]:
1358 needed[pkg].append(name) 1358 needed[pkg].append(name)
1359 #bb.note("Adding %s for %s" % (name, pkg)) 1359 #bb.note("Adding %s for %s" % (name, pkg))
1360 1360
1361 if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS', True) == "1": 1361 if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS', True) == "1":
1362 snap_symlinks = True 1362 snap_symlinks = True
1363 else: 1363 else:
1364 snap_symlinks = False 1364 snap_symlinks = False
1365 1365
1366 if (d.getVar('USE_LDCONFIG', True) or "1") == "1": 1366 if (d.getVar('USE_LDCONFIG', True) or "1") == "1":
1367 use_ldconfig = True 1367 use_ldconfig = True
1368 else: 1368 else:
1369 use_ldconfig = False 1369 use_ldconfig = False
1370 1370
1371 needed = {} 1371 needed = {}
1372 shlib_provider = {} 1372 shlib_provider = {}
1373 for pkg in packages.split(): 1373 for pkg in packages.split():
1374 private_libs = d.getVar('PRIVATE_LIBS_' + pkg, True) or d.getVar('PRIVATE_LIBS', True) 1374 private_libs = d.getVar('PRIVATE_LIBS_' + pkg, True) or d.getVar('PRIVATE_LIBS', True)
1375 needs_ldconfig = False 1375 needs_ldconfig = False
1376 bb.debug(2, "calculating shlib provides for %s" % pkg) 1376 bb.debug(2, "calculating shlib provides for %s" % pkg)
1377 1377
1378 pkgver = d.getVar('PKGV_' + pkg, True) 1378 pkgver = d.getVar('PKGV_' + pkg, True)
1379 if not pkgver: 1379 if not pkgver:
1380 pkgver = d.getVar('PV_' + pkg, True) 1380 pkgver = d.getVar('PV_' + pkg, True)
1381 if not pkgver: 1381 if not pkgver:
1382 pkgver = ver 1382 pkgver = ver
1383 1383
1384 needed[pkg] = [] 1384 needed[pkg] = []
1385 sonames = list() 1385 sonames = list()
1386 renames = list() 1386 renames = list()
1387 top = os.path.join(pkgdest, pkg) 1387 top = os.path.join(pkgdest, pkg)
1388 for root, dirs, files in os.walk(top): 1388 for root, dirs, files in os.walk(top):
1389 for file in files: 1389 for file in files:
1390 soname = None 1390 soname = None
1391 path = os.path.join(root, file) 1391 path = os.path.join(root, file)
1392 if os.path.islink(path): 1392 if os.path.islink(path):
1393 continue 1393 continue
1394 if targetos == "darwin" or targetos == "darwin8": 1394 if targetos == "darwin" or targetos == "darwin8":
1395 darwin_so(root, dirs, file) 1395 darwin_so(root, dirs, file)
1396 elif os.access(path, os.X_OK) or lib_re.match(file): 1396 elif os.access(path, os.X_OK) or lib_re.match(file):
1397 ldconfig = linux_so(root, dirs, file) 1397 ldconfig = linux_so(root, dirs, file)
1398 needs_ldconfig = needs_ldconfig or ldconfig 1398 needs_ldconfig = needs_ldconfig or ldconfig
1399 for (old, new) in renames: 1399 for (old, new) in renames:
1400 bb.note("Renaming %s to %s" % (old, new)) 1400 bb.note("Renaming %s to %s" % (old, new))
1401 os.rename(old, new) 1401 os.rename(old, new)
1402 shlibs_file = os.path.join(shlibswork_dir, pkg + ".list") 1402 shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
1403 shver_file = os.path.join(shlibswork_dir, pkg + ".ver") 1403 shver_file = os.path.join(shlibswork_dir, pkg + ".ver")
1404 if len(sonames): 1404 if len(sonames):
1405 fd = open(shlibs_file, 'w') 1405 fd = open(shlibs_file, 'w')
1406 for s in sonames: 1406 for s in sonames:
1407 fd.write(s + '\n') 1407 fd.write(s + '\n')
1408 shlib_provider[s] = (pkg, pkgver) 1408 shlib_provider[s] = (pkg, pkgver)
1409 fd.close() 1409 fd.close()
1410 fd = open(shver_file, 'w') 1410 fd = open(shver_file, 'w')
1411 fd.write(pkgver + '\n') 1411 fd.write(pkgver + '\n')
1412 fd.close() 1412 fd.close()
1413 if needs_ldconfig and use_ldconfig: 1413 if needs_ldconfig and use_ldconfig:
1414 bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg) 1414 bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
1415 postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) 1415 postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
1416 if not postinst: 1416 if not postinst:
1417 postinst = '#!/bin/sh\n' 1417 postinst = '#!/bin/sh\n'
1418 postinst += d.getVar('ldconfig_postinst_fragment', True) 1418 postinst += d.getVar('ldconfig_postinst_fragment', True)
1419 d.setVar('pkg_postinst_%s' % pkg, postinst) 1419 d.setVar('pkg_postinst_%s' % pkg, postinst)
1420 1420
1421 list_re = re.compile('^(.*)\.list$') 1421 list_re = re.compile('^(.*)\.list$')
1422 for dir in [shlibs_dir]: 1422 for dir in [shlibs_dir]:
1423 if not os.path.exists(dir): 1423 if not os.path.exists(dir):
1424 continue 1424 continue
1425 for file in os.listdir(dir): 1425 for file in os.listdir(dir):
1426 m = list_re.match(file) 1426 m = list_re.match(file)
1427 if m: 1427 if m:
1428 dep_pkg = m.group(1) 1428 dep_pkg = m.group(1)
1429 fd = open(os.path.join(dir, file)) 1429 fd = open(os.path.join(dir, file))
1430 lines = fd.readlines() 1430 lines = fd.readlines()
1431 fd.close() 1431 fd.close()
1432 ver_file = os.path.join(dir, dep_pkg + '.ver') 1432 ver_file = os.path.join(dir, dep_pkg + '.ver')
1433 lib_ver = None 1433 lib_ver = None
1434 if os.path.exists(ver_file): 1434 if os.path.exists(ver_file):
1435 fd = open(ver_file) 1435 fd = open(ver_file)
1436 lib_ver = fd.readline().rstrip() 1436 lib_ver = fd.readline().rstrip()
1437 fd.close() 1437 fd.close()
1438 for l in lines: 1438 for l in lines:
1439 shlib_provider[l.rstrip()] = (dep_pkg, lib_ver) 1439 shlib_provider[l.rstrip()] = (dep_pkg, lib_ver)
1440 1440
1441 bb.utils.unlockfile(lf) 1441 bb.utils.unlockfile(lf)
1442 1442
1443 assumed_libs = d.getVar('ASSUME_SHLIBS', True) 1443 assumed_libs = d.getVar('ASSUME_SHLIBS', True)
1444 if assumed_libs: 1444 if assumed_libs:
1445 for e in assumed_libs.split(): 1445 for e in assumed_libs.split():
1446 l, dep_pkg = e.split(":") 1446 l, dep_pkg = e.split(":")
1447 lib_ver = None 1447 lib_ver = None
1448 dep_pkg = dep_pkg.rsplit("_", 1) 1448 dep_pkg = dep_pkg.rsplit("_", 1)
1449 if len(dep_pkg) == 2: 1449 if len(dep_pkg) == 2:
1450 lib_ver = dep_pkg[1] 1450 lib_ver = dep_pkg[1]
1451 dep_pkg = dep_pkg[0] 1451 dep_pkg = dep_pkg[0]
1452 shlib_provider[l] = (dep_pkg, lib_ver) 1452 shlib_provider[l] = (dep_pkg, lib_ver)
1453 1453
1454 for pkg in packages.split(): 1454 for pkg in packages.split():
1455 bb.debug(2, "calculating shlib requirements for %s" % pkg) 1455 bb.debug(2, "calculating shlib requirements for %s" % pkg)
1456 1456
1457 deps = list() 1457 deps = list()
1458 for n in needed[pkg]: 1458 for n in needed[pkg]:
1459 if n in shlib_provider.keys(): 1459 if n in shlib_provider.keys():
1460 (dep_pkg, ver_needed) = shlib_provider[n] 1460 (dep_pkg, ver_needed) = shlib_provider[n]
1461 1461
1462 bb.debug(2, '%s: Dependency %s requires package %s' % (pkg, n, dep_pkg)) 1462 bb.debug(2, '%s: Dependency %s requires package %s' % (pkg, n, dep_pkg))
1463 1463
1464 if dep_pkg == pkg: 1464 if dep_pkg == pkg:
1465 continue 1465 continue
1466 1466
1467 if ver_needed: 1467 if ver_needed:
1468 dep = "%s (>= %s)" % (dep_pkg, ver_needed) 1468 dep = "%s (>= %s)" % (dep_pkg, ver_needed)
1469 else: 1469 else:
1470 dep = dep_pkg 1470 dep = dep_pkg
1471 if not dep in deps: 1471 if not dep in deps:
1472 deps.append(dep) 1472 deps.append(dep)
1473 else: 1473 else:
1474 bb.note("Couldn't find shared library provider for %s" % n) 1474 bb.note("Couldn't find shared library provider for %s" % n)
1475 1475
1476 deps_file = os.path.join(pkgdest, pkg + ".shlibdeps") 1476 deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
1477 if os.path.exists(deps_file): 1477 if os.path.exists(deps_file):
1478 os.remove(deps_file) 1478 os.remove(deps_file)
1479 if len(deps): 1479 if len(deps):
1480 fd = open(deps_file, 'w') 1480 fd = open(deps_file, 'w')
1481 for dep in deps: 1481 for dep in deps:
1482 fd.write(dep + '\n') 1482 fd.write(dep + '\n')
1483 fd.close() 1483 fd.close()
1484} 1484}
1485 1485
1486python package_do_pkgconfig () { 1486python package_do_pkgconfig () {
1487 import re 1487 import re
1488 1488
1489 packages = d.getVar('PACKAGES', True) 1489 packages = d.getVar('PACKAGES', True)
1490 workdir = d.getVar('WORKDIR', True) 1490 workdir = d.getVar('WORKDIR', True)
1491 pkgdest = d.getVar('PKGDEST', True) 1491 pkgdest = d.getVar('PKGDEST', True)
1492 1492
1493 shlibs_dir = d.getVar('SHLIBSDIR', True) 1493 shlibs_dir = d.getVar('SHLIBSDIR', True)
1494 shlibswork_dir = d.getVar('SHLIBSWORKDIR', True) 1494 shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
1495 1495
1496 pc_re = re.compile('(.*)\.pc$') 1496 pc_re = re.compile('(.*)\.pc$')
1497 var_re = re.compile('(.*)=(.*)') 1497 var_re = re.compile('(.*)=(.*)')
1498 field_re = re.compile('(.*): (.*)') 1498 field_re = re.compile('(.*): (.*)')
1499 1499
1500 pkgconfig_provided = {} 1500 pkgconfig_provided = {}
1501 pkgconfig_needed = {} 1501 pkgconfig_needed = {}
1502 for pkg in packages.split(): 1502 for pkg in packages.split():
1503 pkgconfig_provided[pkg] = [] 1503 pkgconfig_provided[pkg] = []
1504 pkgconfig_needed[pkg] = [] 1504 pkgconfig_needed[pkg] = []
1505 top = os.path.join(pkgdest, pkg) 1505 top = os.path.join(pkgdest, pkg)
1506 for root, dirs, files in os.walk(top): 1506 for root, dirs, files in os.walk(top):
1507 for file in files: 1507 for file in files:
1508 m = pc_re.match(file) 1508 m = pc_re.match(file)
1509 if m: 1509 if m:
1510 pd = bb.data.init() 1510 pd = bb.data.init()
1511 name = m.group(1) 1511 name = m.group(1)
1512 pkgconfig_provided[pkg].append(name) 1512 pkgconfig_provided[pkg].append(name)
1513 path = os.path.join(root, file) 1513 path = os.path.join(root, file)
1514 if not os.access(path, os.R_OK): 1514 if not os.access(path, os.R_OK):
1515 continue 1515 continue
1516 f = open(path, 'r') 1516 f = open(path, 'r')
1517 lines = f.readlines() 1517 lines = f.readlines()
1518 f.close() 1518 f.close()
1519 for l in lines: 1519 for l in lines:
1520 m = var_re.match(l) 1520 m = var_re.match(l)
1521 if m: 1521 if m:
1522 name = m.group(1) 1522 name = m.group(1)
1523 val = m.group(2) 1523 val = m.group(2)
1524 pd.setVar(name, pd.expand(val)) 1524 pd.setVar(name, pd.expand(val))
1525 continue 1525 continue
1526 m = field_re.match(l) 1526 m = field_re.match(l)
1527 if m: 1527 if m:
1528 hdr = m.group(1) 1528 hdr = m.group(1)
1529 exp = bb.data.expand(m.group(2), pd) 1529 exp = bb.data.expand(m.group(2), pd)
1530 if hdr == 'Requires': 1530 if hdr == 'Requires':
1531 pkgconfig_needed[pkg] += exp.replace(',', ' ').split() 1531 pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
1532 1532
1533 # Take shared lock since we're only reading, not writing 1533 # Take shared lock since we're only reading, not writing
1534 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}")) 1534 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
1535 1535
1536 for pkg in packages.split(): 1536 for pkg in packages.split():
1537 pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist") 1537 pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
1538 if pkgconfig_provided[pkg] != []: 1538 if pkgconfig_provided[pkg] != []:
1539 f = open(pkgs_file, 'w') 1539 f = open(pkgs_file, 'w')
1540 for p in pkgconfig_provided[pkg]: 1540 for p in pkgconfig_provided[pkg]:
1541 f.write('%s\n' % p) 1541 f.write('%s\n' % p)
1542 f.close() 1542 f.close()
1543 1543
1544 for dir in [shlibs_dir]: 1544 for dir in [shlibs_dir]:
1545 if not os.path.exists(dir): 1545 if not os.path.exists(dir):
1546 continue 1546 continue
1547 for file in os.listdir(dir): 1547 for file in os.listdir(dir):
1548 m = re.match('^(.*)\.pclist$', file) 1548 m = re.match('^(.*)\.pclist$', file)
1549 if m: 1549 if m:
1550 pkg = m.group(1) 1550 pkg = m.group(1)
1551 fd = open(os.path.join(dir, file)) 1551 fd = open(os.path.join(dir, file))
1552 lines = fd.readlines() 1552 lines = fd.readlines()
1553 fd.close() 1553 fd.close()
1554 pkgconfig_provided[pkg] = [] 1554 pkgconfig_provided[pkg] = []
1555 for l in lines: 1555 for l in lines:
1556 pkgconfig_provided[pkg].append(l.rstrip()) 1556 pkgconfig_provided[pkg].append(l.rstrip())
1557 1557
1558 for pkg in packages.split(): 1558 for pkg in packages.split():
1559 deps = [] 1559 deps = []
1560 for n in pkgconfig_needed[pkg]: 1560 for n in pkgconfig_needed[pkg]:
1561 found = False 1561 found = False
1562 for k in pkgconfig_provided.keys(): 1562 for k in pkgconfig_provided.keys():
1563 if n in pkgconfig_provided[k]: 1563 if n in pkgconfig_provided[k]:
1564 if k != pkg and not (k in deps): 1564 if k != pkg and not (k in deps):
1565 deps.append(k) 1565 deps.append(k)
1566 found = True 1566 found = True
1567 if found == False: 1567 if found == False:
1568 bb.note("couldn't find pkgconfig module '%s' in any package" % n) 1568 bb.note("couldn't find pkgconfig module '%s' in any package" % n)
1569 deps_file = os.path.join(pkgdest, pkg + ".pcdeps") 1569 deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
1570 if len(deps): 1570 if len(deps):
1571 fd = open(deps_file, 'w') 1571 fd = open(deps_file, 'w')
1572 for dep in deps: 1572 for dep in deps:
1573 fd.write(dep + '\n') 1573 fd.write(dep + '\n')
1574 fd.close() 1574 fd.close()
1575 1575
1576 bb.utils.unlockfile(lf) 1576 bb.utils.unlockfile(lf)
1577} 1577}
1578 1578
1579python read_shlibdeps () { 1579python read_shlibdeps () {
1580 packages = d.getVar('PACKAGES', True).split() 1580 packages = d.getVar('PACKAGES', True).split()
1581 for pkg in packages: 1581 for pkg in packages:
1582 rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, False) or d.getVar('RDEPENDS', False) or "") 1582 rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, False) or d.getVar('RDEPENDS', False) or "")
1583 1583
1584 for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": 1584 for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
1585 depsfile = d.expand("${PKGDEST}/" + pkg + extension) 1585 depsfile = d.expand("${PKGDEST}/" + pkg + extension)
1586 if os.access(depsfile, os.R_OK): 1586 if os.access(depsfile, os.R_OK):
1587 fd = file(depsfile) 1587 fd = file(depsfile)
1588 lines = fd.readlines() 1588 lines = fd.readlines()
1589 fd.close() 1589 fd.close()
1590 for l in lines: 1590 for l in lines:
1591 rdepends[l.rstrip()] = "" 1591 rdepends[l.rstrip()] = ""
1592 d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False)) 1592 d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1593} 1593}
1594 1594
1595python package_depchains() { 1595python package_depchains() {
1596 """ 1596 """
1597 For a given set of prefix and postfix modifiers, make those packages 1597 For a given set of prefix and postfix modifiers, make those packages
1598 RRECOMMENDS on the corresponding packages for its RDEPENDS. 1598 RRECOMMENDS on the corresponding packages for its RDEPENDS.
1599 1599
1600 Example: If package A depends upon package B, and A's .bb emits an 1600 Example: If package A depends upon package B, and A's .bb emits an
1601 A-dev package, this would make A-dev Recommends: B-dev. 1601 A-dev package, this would make A-dev Recommends: B-dev.
1602 1602
1603 If only one of a given suffix is specified, it will take the RRECOMMENDS 1603 If only one of a given suffix is specified, it will take the RRECOMMENDS
1604 based on the RDEPENDS of *all* other packages. If more than one of a given 1604 based on the RDEPENDS of *all* other packages. If more than one of a given
1605 suffix is specified, its will only use the RDEPENDS of the single parent 1605 suffix is specified, its will only use the RDEPENDS of the single parent
1606 package. 1606 package.
1607 """ 1607 """
1608 1608
1609 packages = d.getVar('PACKAGES', True) 1609 packages = d.getVar('PACKAGES', True)
1610 postfixes = (d.getVar('DEPCHAIN_POST', True) or '').split() 1610 postfixes = (d.getVar('DEPCHAIN_POST', True) or '').split()
1611 prefixes = (d.getVar('DEPCHAIN_PRE', True) or '').split() 1611 prefixes = (d.getVar('DEPCHAIN_PRE', True) or '').split()
1612 1612
1613 def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d): 1613 def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
1614 1614
1615 #bb.note('depends for %s is %s' % (base, depends)) 1615 #bb.note('depends for %s is %s' % (base, depends))
1616 rreclist = bb.utils.explode_dep_versions(d.getVar('RRECOMMENDS_' + pkg, True) or d.getVar('RRECOMMENDS', True) or "") 1616 rreclist = bb.utils.explode_dep_versions(d.getVar('RRECOMMENDS_' + pkg, True) or d.getVar('RRECOMMENDS', True) or "")
1617 1617
1618 for depend in depends: 1618 for depend in depends:
1619 if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'): 1619 if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
1620 #bb.note("Skipping %s" % depend) 1620 #bb.note("Skipping %s" % depend)
1621 continue 1621 continue
1622 if depend.endswith('-dev'): 1622 if depend.endswith('-dev'):
1623 depend = depend.replace('-dev', '') 1623 depend = depend.replace('-dev', '')
1624 if depend.endswith('-dbg'): 1624 if depend.endswith('-dbg'):
1625 depend = depend.replace('-dbg', '') 1625 depend = depend.replace('-dbg', '')
1626 pkgname = getname(depend, suffix) 1626 pkgname = getname(depend, suffix)
1627 #bb.note("Adding %s for %s" % (pkgname, depend)) 1627 #bb.note("Adding %s for %s" % (pkgname, depend))
1628 if pkgname not in rreclist: 1628 if pkgname not in rreclist:
1629 rreclist[pkgname] = "" 1629 rreclist[pkgname] = ""
1630 1630
1631 #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist))) 1631 #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
1632 d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) 1632 d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
1633 1633
1634 def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): 1634 def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
1635 1635
1636 #bb.note('rdepends for %s is %s' % (base, rdepends)) 1636 #bb.note('rdepends for %s is %s' % (base, rdepends))
1637 rreclist = bb.utils.explode_dep_versions(d.getVar('RRECOMMENDS_' + pkg, True) or d.getVar('RRECOMMENDS', True) or "") 1637 rreclist = bb.utils.explode_dep_versions(d.getVar('RRECOMMENDS_' + pkg, True) or d.getVar('RRECOMMENDS', True) or "")
1638 1638
1639 for depend in rdepends: 1639 for depend in rdepends:
1640 if depend.find('virtual-locale-') != -1: 1640 if depend.find('virtual-locale-') != -1:
1641 #bb.note("Skipping %s" % depend) 1641 #bb.note("Skipping %s" % depend)
1642 continue 1642 continue
1643 if depend.endswith('-dev'): 1643 if depend.endswith('-dev'):
1644 depend = depend.replace('-dev', '') 1644 depend = depend.replace('-dev', '')
1645 if depend.endswith('-dbg'): 1645 if depend.endswith('-dbg'):
1646 depend = depend.replace('-dbg', '') 1646 depend = depend.replace('-dbg', '')
1647 pkgname = getname(depend, suffix) 1647 pkgname = getname(depend, suffix)
1648 #bb.note("Adding %s for %s" % (pkgname, depend)) 1648 #bb.note("Adding %s for %s" % (pkgname, depend))
1649 if pkgname not in rreclist: 1649 if pkgname not in rreclist:
1650 rreclist[pkgname] = "" 1650 rreclist[pkgname] = ""
1651 1651
1652 #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist))) 1652 #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
1653 d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) 1653 d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
1654 1654
1655 def add_dep(list, dep): 1655 def add_dep(list, dep):
1656 dep = dep.split(' (')[0].strip() 1656 dep = dep.split(' (')[0].strip()
1657 if dep not in list: 1657 if dep not in list:
1658 list.append(dep) 1658 list.append(dep)
1659 1659
1660 depends = [] 1660 depends = []
1661 for dep in bb.utils.explode_deps(d.getVar('DEPENDS', True) or ""): 1661 for dep in bb.utils.explode_deps(d.getVar('DEPENDS', True) or ""):
1662 add_dep(depends, dep) 1662 add_dep(depends, dep)
1663 1663
1664 rdepends = [] 1664 rdepends = []
1665 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS', True) or ""): 1665 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS', True) or ""):
1666 add_dep(rdepends, dep) 1666 add_dep(rdepends, dep)
1667 1667
1668 for pkg in packages.split(): 1668 for pkg in packages.split():
1669 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg, True) or ""): 1669 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg, True) or ""):
1670 add_dep(rdepends, dep) 1670 add_dep(rdepends, dep)
1671 1671
1672 #bb.note('rdepends is %s' % rdepends) 1672 #bb.note('rdepends is %s' % rdepends)
1673 1673
1674 def post_getname(name, suffix): 1674 def post_getname(name, suffix):
1675 return '%s%s' % (name, suffix) 1675 return '%s%s' % (name, suffix)
1676 def pre_getname(name, suffix): 1676 def pre_getname(name, suffix):
1677 return '%s%s' % (suffix, name) 1677 return '%s%s' % (suffix, name)
1678 1678
1679 pkgs = {} 1679 pkgs = {}
1680 for pkg in packages.split(): 1680 for pkg in packages.split():
1681 for postfix in postfixes: 1681 for postfix in postfixes:
1682 if pkg.endswith(postfix): 1682 if pkg.endswith(postfix):
1683 if not postfix in pkgs: 1683 if not postfix in pkgs:
1684 pkgs[postfix] = {} 1684 pkgs[postfix] = {}
1685 pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname) 1685 pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
1686 1686
1687 for prefix in prefixes: 1687 for prefix in prefixes:
1688 if pkg.startswith(prefix): 1688 if pkg.startswith(prefix):
1689 if not prefix in pkgs: 1689 if not prefix in pkgs:
1690 pkgs[prefix] = {} 1690 pkgs[prefix] = {}
1691 pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname) 1691 pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
1692 1692
1693 for suffix in pkgs: 1693 for suffix in pkgs:
1694 for pkg in pkgs[suffix]: 1694 for pkg in pkgs[suffix]:
1695 if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs'): 1695 if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs'):
1696 continue 1696 continue
1697 (base, func) = pkgs[suffix][pkg] 1697 (base, func) = pkgs[suffix][pkg]
1698 if suffix == "-dev": 1698 if suffix == "-dev":
1699 pkg_adddeprrecs(pkg, base, suffix, func, depends, d) 1699 pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
1700 if len(pkgs[suffix]) == 1: 1700 if len(pkgs[suffix]) == 1:
1701 pkg_addrrecs(pkg, base, suffix, func, rdepends, d) 1701 pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
1702 else: 1702 else:
1703 rdeps = [] 1703 rdeps = []
1704 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base, True) or d.getVar('RDEPENDS', True) or ""): 1704 for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base, True) or d.getVar('RDEPENDS', True) or ""):
1705 add_dep(rdeps, dep) 1705 add_dep(rdeps, dep)
1706 pkg_addrrecs(pkg, base, suffix, func, rdeps, d) 1706 pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
1707} 1707}
1708 1708
1709# Since bitbake can't determine which variables are accessed during package 1709# Since bitbake can't determine which variables are accessed during package
1710# iteration, we need to list them here: 1710# iteration, we need to list them here:
1711PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR" 1711PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR"
1712 1712
@@ -1720,44 +1720,44 @@ def gen_packagevar(d):
1720 return " ".join(ret) 1720 return " ".join(ret)
1721 1721
1722PACKAGE_PREPROCESS_FUNCS ?= "" 1722PACKAGE_PREPROCESS_FUNCS ?= ""
1723PACKAGEFUNCS ?= "package_get_auto_pr \ 1723PACKAGEFUNCS ?= "package_get_auto_pr \
1724 perform_packagecopy \ 1724 perform_packagecopy \
1725 ${PACKAGE_PREPROCESS_FUNCS} \ 1725 ${PACKAGE_PREPROCESS_FUNCS} \
1726 package_do_split_locales \ 1726 package_do_split_locales \
1727 split_and_strip_files \ 1727 split_and_strip_files \
1728 fixup_perms \ 1728 fixup_perms \
1729 populate_packages \ 1729 populate_packages \
1730 package_do_filedeps \ 1730 package_do_filedeps \
1731 package_do_shlibs \ 1731 package_do_shlibs \
1732 package_do_pkgconfig \ 1732 package_do_pkgconfig \
1733 read_shlibdeps \ 1733 read_shlibdeps \
1734 package_depchains \ 1734 package_depchains \
1735 emit_pkgdata" 1735 emit_pkgdata"
1736 1736
1737python do_package () { 1737python do_package () {
1738 # Change the following version to cause sstate to invalidate the package 1738 # Change the following version to cause sstate to invalidate the package
1739 # cache. This is useful if an item this class depends on changes in a 1739 # cache. This is useful if an item this class depends on changes in a
1740 # way that the output of this class changes. rpmdeps is a good example 1740 # way that the output of this class changes. rpmdeps is a good example
1741 # as any change to rpmdeps requires this to be rerun. 1741 # as any change to rpmdeps requires this to be rerun.
1742 # PACKAGE_BBCLASS_VERSION = "1" 1742 # PACKAGE_BBCLASS_VERSION = "1"
1743 1743
1744 packages = (d.getVar('PACKAGES', True) or "").split() 1744 packages = (d.getVar('PACKAGES', True) or "").split()
1745 if len(packages) < 1: 1745 if len(packages) < 1:
1746 bb.debug(1, "No packages to build, skipping do_package") 1746 bb.debug(1, "No packages to build, skipping do_package")
1747 return 1747 return
1748 1748
1749 workdir = d.getVar('WORKDIR', True) 1749 workdir = d.getVar('WORKDIR', True)
1750 outdir = d.getVar('DEPLOY_DIR', True) 1750 outdir = d.getVar('DEPLOY_DIR', True)
1751 dest = d.getVar('D', True) 1751 dest = d.getVar('D', True)
1752 dvar = d.getVar('PKGD', True) 1752 dvar = d.getVar('PKGD', True)
1753 pn = d.getVar('PN', True) 1753 pn = d.getVar('PN', True)
1754 1754
1755 if not workdir or not outdir or not dest or not dvar or not pn or not packages: 1755 if not workdir or not outdir or not dest or not dvar or not pn or not packages:
1756 bb.error("WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package") 1756 bb.error("WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package")
1757 return 1757 return
1758 1758
1759 for f in (d.getVar('PACKAGEFUNCS', True) or '').split(): 1759 for f in (d.getVar('PACKAGEFUNCS', True) or '').split():
1760 bb.build.exec_func(f, d) 1760 bb.build.exec_func(f, d)
1761} 1761}
1762 1762
1763do_package[dirs] = "${SHLIBSWORKDIR} ${PKGDESTWORK} ${D}" 1763do_package[dirs] = "${SHLIBSWORKDIR} ${PKGDESTWORK} ${D}"
@@ -1775,7 +1775,7 @@ do_package[stamp-extra-info] = "${MACHINE}"
1775do_package_setscene[dirs] = "${STAGING_DIR}" 1775do_package_setscene[dirs] = "${STAGING_DIR}"
1776 1776
1777python do_package_setscene () { 1777python do_package_setscene () {
1778 sstate_setscene(d) 1778 sstate_setscene(d)
1779} 1779}
1780addtask do_package_setscene 1780addtask do_package_setscene
1781 1781
@@ -1793,14 +1793,14 @@ addtask package_write before do_build after do_package
1793# 1793#
1794 1794
1795def mapping_rename_hook(d): 1795def mapping_rename_hook(d):
1796 """ 1796 """
1797 Rewrite variables to account for package renaming in things 1797 Rewrite variables to account for package renaming in things
1798 like debian.bbclass or manual PKG variable name changes 1798 like debian.bbclass or manual PKG variable name changes
1799 """ 1799 """
1800 runtime_mapping_rename("RDEPENDS", d) 1800 runtime_mapping_rename("RDEPENDS", d)
1801 runtime_mapping_rename("RRECOMMENDS", d) 1801 runtime_mapping_rename("RRECOMMENDS", d)
1802 runtime_mapping_rename("RSUGGESTS", d) 1802 runtime_mapping_rename("RSUGGESTS", d)
1803 runtime_mapping_rename("RPROVIDES", d) 1803 runtime_mapping_rename("RPROVIDES", d)
1804 runtime_mapping_rename("RREPLACES", d) 1804 runtime_mapping_rename("RREPLACES", d)
1805 runtime_mapping_rename("RCONFLICTS", d) 1805 runtime_mapping_rename("RCONFLICTS", d)
1806 1806