diff options
Diffstat (limited to 'meta/classes/package.bbclass')
-rw-r--r-- | meta/classes/package.bbclass | 282 |
1 files changed, 141 insertions, 141 deletions
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass index a6f0a7a63d..27034653b8 100644 --- a/meta/classes/package.bbclass +++ b/meta/classes/package.bbclass | |||
@@ -120,7 +120,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst | |||
120 | 120 | ||
121 | """ | 121 | """ |
122 | 122 | ||
123 | dvar = d.getVar('PKGD', True) | 123 | dvar = d.getVar('PKGD') |
124 | root = d.expand(root) | 124 | root = d.expand(root) |
125 | output_pattern = d.expand(output_pattern) | 125 | output_pattern = d.expand(output_pattern) |
126 | extra_depends = d.expand(extra_depends) | 126 | extra_depends = d.expand(extra_depends) |
@@ -130,7 +130,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst | |||
130 | if not os.path.exists(dvar + root): | 130 | if not os.path.exists(dvar + root): |
131 | return [] | 131 | return [] |
132 | 132 | ||
133 | ml = d.getVar("MLPREFIX", True) | 133 | ml = d.getVar("MLPREFIX") |
134 | if ml: | 134 | if ml: |
135 | if not output_pattern.startswith(ml): | 135 | if not output_pattern.startswith(ml): |
136 | output_pattern = ml + output_pattern | 136 | output_pattern = ml + output_pattern |
@@ -145,7 +145,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst | |||
145 | extra_depends = " ".join(newdeps) | 145 | extra_depends = " ".join(newdeps) |
146 | 146 | ||
147 | 147 | ||
148 | packages = d.getVar('PACKAGES', True).split() | 148 | packages = d.getVar('PACKAGES').split() |
149 | split_packages = set() | 149 | split_packages = set() |
150 | 150 | ||
151 | if postinst: | 151 | if postinst: |
@@ -163,7 +163,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst | |||
163 | objs.append(relpath) | 163 | objs.append(relpath) |
164 | 164 | ||
165 | if extra_depends == None: | 165 | if extra_depends == None: |
166 | extra_depends = d.getVar("PN", True) | 166 | extra_depends = d.getVar("PN") |
167 | 167 | ||
168 | if not summary: | 168 | if not summary: |
169 | summary = description | 169 | summary = description |
@@ -189,7 +189,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst | |||
189 | packages = [pkg] + packages | 189 | packages = [pkg] + packages |
190 | else: | 190 | else: |
191 | packages.append(pkg) | 191 | packages.append(pkg) |
192 | oldfiles = d.getVar('FILES_' + pkg, True) | 192 | oldfiles = d.getVar('FILES_' + pkg) |
193 | newfile = os.path.join(root, o) | 193 | newfile = os.path.join(root, o) |
194 | # These names will be passed through glob() so if the filename actually | 194 | # These names will be passed through glob() so if the filename actually |
195 | # contains * or ? (rare, but possible) we need to handle that specially | 195 | # contains * or ? (rare, but possible) we need to handle that specially |
@@ -214,9 +214,9 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst | |||
214 | d.setVar('FILES_' + pkg, oldfiles + " " + newfile) | 214 | d.setVar('FILES_' + pkg, oldfiles + " " + newfile) |
215 | if extra_depends != '': | 215 | if extra_depends != '': |
216 | d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends) | 216 | d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends) |
217 | if not d.getVar('DESCRIPTION_' + pkg, True): | 217 | if not d.getVar('DESCRIPTION_' + pkg): |
218 | d.setVar('DESCRIPTION_' + pkg, description % on) | 218 | d.setVar('DESCRIPTION_' + pkg, description % on) |
219 | if not d.getVar('SUMMARY_' + pkg, True): | 219 | if not d.getVar('SUMMARY_' + pkg): |
220 | d.setVar('SUMMARY_' + pkg, summary % on) | 220 | d.setVar('SUMMARY_' + pkg, summary % on) |
221 | if postinst: | 221 | if postinst: |
222 | d.setVar('pkg_postinst_' + pkg, postinst) | 222 | d.setVar('pkg_postinst_' + pkg, postinst) |
@@ -231,9 +231,9 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst | |||
231 | PACKAGE_DEPENDS += "file-native" | 231 | PACKAGE_DEPENDS += "file-native" |
232 | 232 | ||
233 | python () { | 233 | python () { |
234 | if d.getVar('PACKAGES', True) != '': | 234 | if d.getVar('PACKAGES') != '': |
235 | deps = "" | 235 | deps = "" |
236 | for dep in (d.getVar('PACKAGE_DEPENDS', True) or "").split(): | 236 | for dep in (d.getVar('PACKAGE_DEPENDS') or "").split(): |
237 | deps += " %s:do_populate_sysroot" % dep | 237 | deps += " %s:do_populate_sysroot" % dep |
238 | d.appendVarFlag('do_package', 'depends', deps) | 238 | d.appendVarFlag('do_package', 'depends', deps) |
239 | 239 | ||
@@ -286,14 +286,14 @@ def files_from_filevars(filevars): | |||
286 | 286 | ||
287 | # Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files | 287 | # Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files |
288 | def get_conffiles(pkg, d): | 288 | def get_conffiles(pkg, d): |
289 | pkgdest = d.getVar('PKGDEST', True) | 289 | pkgdest = d.getVar('PKGDEST') |
290 | root = os.path.join(pkgdest, pkg) | 290 | root = os.path.join(pkgdest, pkg) |
291 | cwd = os.getcwd() | 291 | cwd = os.getcwd() |
292 | os.chdir(root) | 292 | os.chdir(root) |
293 | 293 | ||
294 | conffiles = d.getVar('CONFFILES_%s' % pkg, True); | 294 | conffiles = d.getVar('CONFFILES_%s' % pkg); |
295 | if conffiles == None: | 295 | if conffiles == None: |
296 | conffiles = d.getVar('CONFFILES', True) | 296 | conffiles = d.getVar('CONFFILES') |
297 | if conffiles == None: | 297 | if conffiles == None: |
298 | conffiles = "" | 298 | conffiles = "" |
299 | conffiles = conffiles.split() | 299 | conffiles = conffiles.split() |
@@ -318,7 +318,7 @@ def get_conffiles(pkg, d): | |||
318 | return conf_list | 318 | return conf_list |
319 | 319 | ||
320 | def checkbuildpath(file, d): | 320 | def checkbuildpath(file, d): |
321 | tmpdir = d.getVar('TMPDIR', True) | 321 | tmpdir = d.getVar('TMPDIR') |
322 | with open(file) as f: | 322 | with open(file) as f: |
323 | file_content = f.read() | 323 | file_content = f.read() |
324 | if tmpdir in file_content: | 324 | if tmpdir in file_content: |
@@ -335,8 +335,8 @@ def splitdebuginfo(file, debugfile, debugsrcdir, sourcefile, d): | |||
335 | 335 | ||
336 | import stat | 336 | import stat |
337 | 337 | ||
338 | dvar = d.getVar('PKGD', True) | 338 | dvar = d.getVar('PKGD') |
339 | objcopy = d.getVar("OBJCOPY", True) | 339 | objcopy = d.getVar("OBJCOPY") |
340 | debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit") | 340 | debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit") |
341 | 341 | ||
342 | # We ignore kernel modules, we don't generate debug info files. | 342 | # We ignore kernel modules, we don't generate debug info files. |
@@ -382,11 +382,11 @@ def copydebugsources(debugsrcdir, d): | |||
382 | 382 | ||
383 | sourcefile = d.expand("${WORKDIR}/debugsources.list") | 383 | sourcefile = d.expand("${WORKDIR}/debugsources.list") |
384 | if debugsrcdir and os.path.isfile(sourcefile): | 384 | if debugsrcdir and os.path.isfile(sourcefile): |
385 | dvar = d.getVar('PKGD', True) | 385 | dvar = d.getVar('PKGD') |
386 | strip = d.getVar("STRIP", True) | 386 | strip = d.getVar("STRIP") |
387 | objcopy = d.getVar("OBJCOPY", True) | 387 | objcopy = d.getVar("OBJCOPY") |
388 | debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit") | 388 | debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit") |
389 | workdir = d.getVar("WORKDIR", True) | 389 | workdir = d.getVar("WORKDIR") |
390 | workparentdir = os.path.dirname(os.path.dirname(workdir)) | 390 | workparentdir = os.path.dirname(os.path.dirname(workdir)) |
391 | workbasedir = os.path.basename(os.path.dirname(workdir)) + "/" + os.path.basename(workdir) | 391 | workbasedir = os.path.basename(os.path.dirname(workdir)) + "/" + os.path.basename(workdir) |
392 | 392 | ||
@@ -468,20 +468,20 @@ def get_package_additional_metadata (pkg_type, d): | |||
468 | return "\n".join(metadata_fields).strip() | 468 | return "\n".join(metadata_fields).strip() |
469 | 469 | ||
470 | def runtime_mapping_rename (varname, pkg, d): | 470 | def runtime_mapping_rename (varname, pkg, d): |
471 | #bb.note("%s before: %s" % (varname, d.getVar(varname, True))) | 471 | #bb.note("%s before: %s" % (varname, d.getVar(varname))) |
472 | 472 | ||
473 | if bb.data.inherits_class('packagegroup', d): | 473 | if bb.data.inherits_class('packagegroup', d): |
474 | return | 474 | return |
475 | 475 | ||
476 | new_depends = {} | 476 | new_depends = {} |
477 | deps = bb.utils.explode_dep_versions2(d.getVar(varname, True) or "") | 477 | deps = bb.utils.explode_dep_versions2(d.getVar(varname) or "") |
478 | for depend in deps: | 478 | for depend in deps: |
479 | new_depend = get_package_mapping(depend, pkg, d) | 479 | new_depend = get_package_mapping(depend, pkg, d) |
480 | new_depends[new_depend] = deps[depend] | 480 | new_depends[new_depend] = deps[depend] |
481 | 481 | ||
482 | d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False)) | 482 | d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False)) |
483 | 483 | ||
484 | #bb.note("%s after: %s" % (varname, d.getVar(varname, True))) | 484 | #bb.note("%s after: %s" % (varname, d.getVar(varname))) |
485 | 485 | ||
486 | # | 486 | # |
487 | # Package functions suitable for inclusion in PACKAGEFUNCS | 487 | # Package functions suitable for inclusion in PACKAGEFUNCS |
@@ -492,34 +492,34 @@ python package_get_auto_pr() { | |||
492 | import re | 492 | import re |
493 | 493 | ||
494 | # Support per recipe PRSERV_HOST | 494 | # Support per recipe PRSERV_HOST |
495 | pn = d.getVar('PN', True) | 495 | pn = d.getVar('PN') |
496 | host = d.getVar("PRSERV_HOST_" + pn, True) | 496 | host = d.getVar("PRSERV_HOST_" + pn) |
497 | if not (host is None): | 497 | if not (host is None): |
498 | d.setVar("PRSERV_HOST", host) | 498 | d.setVar("PRSERV_HOST", host) |
499 | 499 | ||
500 | pkgv = d.getVar("PKGV", True) | 500 | pkgv = d.getVar("PKGV") |
501 | 501 | ||
502 | # PR Server not active, handle AUTOINC | 502 | # PR Server not active, handle AUTOINC |
503 | if not d.getVar('PRSERV_HOST', True): | 503 | if not d.getVar('PRSERV_HOST'): |
504 | if 'AUTOINC' in pkgv: | 504 | if 'AUTOINC' in pkgv: |
505 | d.setVar("PKGV", pkgv.replace("AUTOINC", "0")) | 505 | d.setVar("PKGV", pkgv.replace("AUTOINC", "0")) |
506 | return | 506 | return |
507 | 507 | ||
508 | auto_pr = None | 508 | auto_pr = None |
509 | pv = d.getVar("PV", True) | 509 | pv = d.getVar("PV") |
510 | version = d.getVar("PRAUTOINX", True) | 510 | version = d.getVar("PRAUTOINX") |
511 | pkgarch = d.getVar("PACKAGE_ARCH", True) | 511 | pkgarch = d.getVar("PACKAGE_ARCH") |
512 | checksum = d.getVar("BB_TASKHASH", True) | 512 | checksum = d.getVar("BB_TASKHASH") |
513 | 513 | ||
514 | if d.getVar('PRSERV_LOCKDOWN', True): | 514 | if d.getVar('PRSERV_LOCKDOWN'): |
515 | auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch, True) or d.getVar('PRAUTO_' + version, True) or None | 515 | auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch) or d.getVar('PRAUTO_' + version) or None |
516 | if auto_pr is None: | 516 | if auto_pr is None: |
517 | bb.fatal("Can NOT get PRAUTO from lockdown exported file") | 517 | bb.fatal("Can NOT get PRAUTO from lockdown exported file") |
518 | d.setVar('PRAUTO',str(auto_pr)) | 518 | d.setVar('PRAUTO',str(auto_pr)) |
519 | return | 519 | return |
520 | 520 | ||
521 | try: | 521 | try: |
522 | conn = d.getVar("__PRSERV_CONN", True) | 522 | conn = d.getVar("__PRSERV_CONN") |
523 | if conn is None: | 523 | if conn is None: |
524 | conn = oe.prservice.prserv_make_conn(d) | 524 | conn = oe.prservice.prserv_make_conn(d) |
525 | if conn is not None: | 525 | if conn is not None: |
@@ -540,19 +540,19 @@ python package_get_auto_pr() { | |||
540 | LOCALEBASEPN ??= "${PN}" | 540 | LOCALEBASEPN ??= "${PN}" |
541 | 541 | ||
542 | python package_do_split_locales() { | 542 | python package_do_split_locales() { |
543 | if (d.getVar('PACKAGE_NO_LOCALE', True) == '1'): | 543 | if (d.getVar('PACKAGE_NO_LOCALE') == '1'): |
544 | bb.debug(1, "package requested not splitting locales") | 544 | bb.debug(1, "package requested not splitting locales") |
545 | return | 545 | return |
546 | 546 | ||
547 | packages = (d.getVar('PACKAGES', True) or "").split() | 547 | packages = (d.getVar('PACKAGES') or "").split() |
548 | 548 | ||
549 | datadir = d.getVar('datadir', True) | 549 | datadir = d.getVar('datadir') |
550 | if not datadir: | 550 | if not datadir: |
551 | bb.note("datadir not defined") | 551 | bb.note("datadir not defined") |
552 | return | 552 | return |
553 | 553 | ||
554 | dvar = d.getVar('PKGD', True) | 554 | dvar = d.getVar('PKGD') |
555 | pn = d.getVar('LOCALEBASEPN', True) | 555 | pn = d.getVar('LOCALEBASEPN') |
556 | 556 | ||
557 | if pn + '-locale' in packages: | 557 | if pn + '-locale' in packages: |
558 | packages.remove(pn + '-locale') | 558 | packages.remove(pn + '-locale') |
@@ -565,10 +565,10 @@ python package_do_split_locales() { | |||
565 | 565 | ||
566 | locales = os.listdir(localedir) | 566 | locales = os.listdir(localedir) |
567 | 567 | ||
568 | summary = d.getVar('SUMMARY', True) or pn | 568 | summary = d.getVar('SUMMARY') or pn |
569 | description = d.getVar('DESCRIPTION', True) or "" | 569 | description = d.getVar('DESCRIPTION') or "" |
570 | locale_section = d.getVar('LOCALE_SECTION', True) | 570 | locale_section = d.getVar('LOCALE_SECTION') |
571 | mlprefix = d.getVar('MLPREFIX', True) or "" | 571 | mlprefix = d.getVar('MLPREFIX') or "" |
572 | for l in sorted(locales): | 572 | for l in sorted(locales): |
573 | ln = legitimize_package_name(l) | 573 | ln = legitimize_package_name(l) |
574 | pkg = pn + '-locale-' + ln | 574 | pkg = pn + '-locale-' + ln |
@@ -589,14 +589,14 @@ python package_do_split_locales() { | |||
589 | # glibc-localedata-translit* won't install as a dependency | 589 | # glibc-localedata-translit* won't install as a dependency |
590 | # for some other package which breaks meta-toolchain | 590 | # for some other package which breaks meta-toolchain |
591 | # Probably breaks since virtual-locale- isn't provided anywhere | 591 | # Probably breaks since virtual-locale- isn't provided anywhere |
592 | #rdep = (d.getVar('RDEPENDS_%s' % pn, True) or "").split() | 592 | #rdep = (d.getVar('RDEPENDS_%s' % pn) or "").split() |
593 | #rdep.append('%s-locale*' % pn) | 593 | #rdep.append('%s-locale*' % pn) |
594 | #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep)) | 594 | #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep)) |
595 | } | 595 | } |
596 | 596 | ||
597 | python perform_packagecopy () { | 597 | python perform_packagecopy () { |
598 | dest = d.getVar('D', True) | 598 | dest = d.getVar('D') |
599 | dvar = d.getVar('PKGD', True) | 599 | dvar = d.getVar('PKGD') |
600 | 600 | ||
601 | # Start by package population by taking a copy of the installed | 601 | # Start by package population by taking a copy of the installed |
602 | # files to operate on | 602 | # files to operate on |
@@ -730,8 +730,8 @@ python fixup_perms () { | |||
730 | # paths are resolved via BBPATH | 730 | # paths are resolved via BBPATH |
731 | def get_fs_perms_list(d): | 731 | def get_fs_perms_list(d): |
732 | str = "" | 732 | str = "" |
733 | bbpath = d.getVar('BBPATH', True) | 733 | bbpath = d.getVar('BBPATH') |
734 | fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES', True) | 734 | fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES') |
735 | if not fs_perms_tables: | 735 | if not fs_perms_tables: |
736 | fs_perms_tables = 'files/fs-perms.txt' | 736 | fs_perms_tables = 'files/fs-perms.txt' |
737 | for conf_file in fs_perms_tables.split(): | 737 | for conf_file in fs_perms_tables.split(): |
@@ -740,7 +740,7 @@ python fixup_perms () { | |||
740 | 740 | ||
741 | 741 | ||
742 | 742 | ||
743 | dvar = d.getVar('PKGD', True) | 743 | dvar = d.getVar('PKGD') |
744 | 744 | ||
745 | fs_perms_table = {} | 745 | fs_perms_table = {} |
746 | fs_link_table = {} | 746 | fs_link_table = {} |
@@ -769,7 +769,7 @@ python fixup_perms () { | |||
769 | 'oldincludedir' ] | 769 | 'oldincludedir' ] |
770 | 770 | ||
771 | for path in target_path_vars: | 771 | for path in target_path_vars: |
772 | dir = d.getVar(path, True) or "" | 772 | dir = d.getVar(path) or "" |
773 | if dir == "": | 773 | if dir == "": |
774 | continue | 774 | continue |
775 | fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d)) | 775 | fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d)) |
@@ -854,20 +854,20 @@ python fixup_perms () { | |||
854 | python split_and_strip_files () { | 854 | python split_and_strip_files () { |
855 | import stat, errno | 855 | import stat, errno |
856 | 856 | ||
857 | dvar = d.getVar('PKGD', True) | 857 | dvar = d.getVar('PKGD') |
858 | pn = d.getVar('PN', True) | 858 | pn = d.getVar('PN') |
859 | 859 | ||
860 | oldcwd = os.getcwd() | 860 | oldcwd = os.getcwd() |
861 | os.chdir(dvar) | 861 | os.chdir(dvar) |
862 | 862 | ||
863 | # We default to '.debug' style | 863 | # We default to '.debug' style |
864 | if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory': | 864 | if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory': |
865 | # Single debug-file-directory style debug info | 865 | # Single debug-file-directory style debug info |
866 | debugappend = ".debug" | 866 | debugappend = ".debug" |
867 | debugdir = "" | 867 | debugdir = "" |
868 | debuglibdir = "/usr/lib/debug" | 868 | debuglibdir = "/usr/lib/debug" |
869 | debugsrcdir = "/usr/src/debug" | 869 | debugsrcdir = "/usr/src/debug" |
870 | elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-without-src': | 870 | elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src': |
871 | # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug | 871 | # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug |
872 | debugappend = "" | 872 | debugappend = "" |
873 | debugdir = "/.debug" | 873 | debugdir = "/.debug" |
@@ -918,10 +918,10 @@ python split_and_strip_files () { | |||
918 | symlinks = {} | 918 | symlinks = {} |
919 | kernmods = [] | 919 | kernmods = [] |
920 | inodes = {} | 920 | inodes = {} |
921 | libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir", True)) | 921 | libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir")) |
922 | baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir", True)) | 922 | baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir")) |
923 | if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1' or \ | 923 | if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \ |
924 | d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'): | 924 | d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'): |
925 | for root, dirs, files in cpath.walk(dvar): | 925 | for root, dirs, files in cpath.walk(dvar): |
926 | for f in files: | 926 | for f in files: |
927 | file = os.path.join(root, f) | 927 | file = os.path.join(root, f) |
@@ -962,7 +962,7 @@ python split_and_strip_files () { | |||
962 | elf_file = isELF(file) | 962 | elf_file = isELF(file) |
963 | if elf_file & 1: | 963 | if elf_file & 1: |
964 | if elf_file & 2: | 964 | if elf_file & 2: |
965 | if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn, True) or "").split(): | 965 | if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn) or "").split(): |
966 | bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn)) | 966 | bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn)) |
967 | else: | 967 | else: |
968 | msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn) | 968 | msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn) |
@@ -991,7 +991,7 @@ python split_and_strip_files () { | |||
991 | # | 991 | # |
992 | # First lets process debug splitting | 992 | # First lets process debug splitting |
993 | # | 993 | # |
994 | if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'): | 994 | if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'): |
995 | for file in elffiles: | 995 | for file in elffiles: |
996 | src = file[len(dvar):] | 996 | src = file[len(dvar):] |
997 | dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend | 997 | dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend |
@@ -1054,8 +1054,8 @@ python split_and_strip_files () { | |||
1054 | # | 1054 | # |
1055 | # Now lets go back over things and strip them | 1055 | # Now lets go back over things and strip them |
1056 | # | 1056 | # |
1057 | if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'): | 1057 | if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'): |
1058 | strip = d.getVar("STRIP", True) | 1058 | strip = d.getVar("STRIP") |
1059 | sfiles = [] | 1059 | sfiles = [] |
1060 | for file in elffiles: | 1060 | for file in elffiles: |
1061 | elf_file = int(elffiles[file]) | 1061 | elf_file = int(elffiles[file]) |
@@ -1075,16 +1075,16 @@ python split_and_strip_files () { | |||
1075 | python populate_packages () { | 1075 | python populate_packages () { |
1076 | import glob, re | 1076 | import glob, re |
1077 | 1077 | ||
1078 | workdir = d.getVar('WORKDIR', True) | 1078 | workdir = d.getVar('WORKDIR') |
1079 | outdir = d.getVar('DEPLOY_DIR', True) | 1079 | outdir = d.getVar('DEPLOY_DIR') |
1080 | dvar = d.getVar('PKGD', True) | 1080 | dvar = d.getVar('PKGD') |
1081 | packages = d.getVar('PACKAGES', True) | 1081 | packages = d.getVar('PACKAGES') |
1082 | pn = d.getVar('PN', True) | 1082 | pn = d.getVar('PN') |
1083 | 1083 | ||
1084 | bb.utils.mkdirhier(outdir) | 1084 | bb.utils.mkdirhier(outdir) |
1085 | os.chdir(dvar) | 1085 | os.chdir(dvar) |
1086 | 1086 | ||
1087 | autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG", True) or False) | 1087 | autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False) |
1088 | 1088 | ||
1089 | # Sanity check PACKAGES for duplicates | 1089 | # Sanity check PACKAGES for duplicates |
1090 | # Sanity should be moved to sanity.bbclass once we have the infrastucture | 1090 | # Sanity should be moved to sanity.bbclass once we have the infrastucture |
@@ -1099,7 +1099,7 @@ python populate_packages () { | |||
1099 | else: | 1099 | else: |
1100 | package_list.append(pkg) | 1100 | package_list.append(pkg) |
1101 | d.setVar('PACKAGES', ' '.join(package_list)) | 1101 | d.setVar('PACKAGES', ' '.join(package_list)) |
1102 | pkgdest = d.getVar('PKGDEST', True) | 1102 | pkgdest = d.getVar('PKGDEST') |
1103 | 1103 | ||
1104 | seen = [] | 1104 | seen = [] |
1105 | 1105 | ||
@@ -1120,7 +1120,7 @@ python populate_packages () { | |||
1120 | root = os.path.join(pkgdest, pkg) | 1120 | root = os.path.join(pkgdest, pkg) |
1121 | bb.utils.mkdirhier(root) | 1121 | bb.utils.mkdirhier(root) |
1122 | 1122 | ||
1123 | filesvar = d.getVar('FILES_%s' % pkg, True) or "" | 1123 | filesvar = d.getVar('FILES_%s' % pkg) or "" |
1124 | if "//" in filesvar: | 1124 | if "//" in filesvar: |
1125 | msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg | 1125 | msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg |
1126 | package_qa_handle_error("files-invalid", msg, d) | 1126 | package_qa_handle_error("files-invalid", msg, d) |
@@ -1188,7 +1188,7 @@ python populate_packages () { | |||
1188 | # Handle LICENSE_EXCLUSION | 1188 | # Handle LICENSE_EXCLUSION |
1189 | package_list = [] | 1189 | package_list = [] |
1190 | for pkg in packages.split(): | 1190 | for pkg in packages.split(): |
1191 | if d.getVar('LICENSE_EXCLUSION-' + pkg, True): | 1191 | if d.getVar('LICENSE_EXCLUSION-' + pkg): |
1192 | msg = "%s has an incompatible license. Excluding from packaging." % pkg | 1192 | msg = "%s has an incompatible license. Excluding from packaging." % pkg |
1193 | package_qa_handle_error("incompatible-license", msg, d) | 1193 | package_qa_handle_error("incompatible-license", msg, d) |
1194 | else: | 1194 | else: |
@@ -1207,7 +1207,7 @@ python populate_packages () { | |||
1207 | 1207 | ||
1208 | if unshipped != []: | 1208 | if unshipped != []: |
1209 | msg = pn + ": Files/directories were installed but not shipped in any package:" | 1209 | msg = pn + ": Files/directories were installed but not shipped in any package:" |
1210 | if "installed-vs-shipped" in (d.getVar('INSANE_SKIP_' + pn, True) or "").split(): | 1210 | if "installed-vs-shipped" in (d.getVar('INSANE_SKIP_' + pn) or "").split(): |
1211 | bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn) | 1211 | bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn) |
1212 | else: | 1212 | else: |
1213 | for f in unshipped: | 1213 | for f in unshipped: |
@@ -1220,7 +1220,7 @@ populate_packages[dirs] = "${D}" | |||
1220 | 1220 | ||
1221 | python package_fixsymlinks () { | 1221 | python package_fixsymlinks () { |
1222 | import errno | 1222 | import errno |
1223 | pkgdest = d.getVar('PKGDEST', True) | 1223 | pkgdest = d.getVar('PKGDEST') |
1224 | packages = d.getVar("PACKAGES", False).split() | 1224 | packages = d.getVar("PACKAGES", False).split() |
1225 | 1225 | ||
1226 | dangling_links = {} | 1226 | dangling_links = {} |
@@ -1255,7 +1255,7 @@ python package_fixsymlinks () { | |||
1255 | bb.note("%s contains dangling symlink to %s" % (pkg, l)) | 1255 | bb.note("%s contains dangling symlink to %s" % (pkg, l)) |
1256 | 1256 | ||
1257 | for pkg in newrdepends: | 1257 | for pkg in newrdepends: |
1258 | rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "") | 1258 | rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg) or "") |
1259 | for p in newrdepends[pkg]: | 1259 | for p in newrdepends[pkg]: |
1260 | if p not in rdepends: | 1260 | if p not in rdepends: |
1261 | rdepends[p] = [] | 1261 | rdepends[p] = [] |
@@ -1309,9 +1309,9 @@ python emit_pkgdata() { | |||
1309 | with open(subdata_file, 'w') as fd: | 1309 | with open(subdata_file, 'w') as fd: |
1310 | fd.write("PKG_%s: %s" % (ml_pkg, pkg)) | 1310 | fd.write("PKG_%s: %s" % (ml_pkg, pkg)) |
1311 | 1311 | ||
1312 | packages = d.getVar('PACKAGES', True) | 1312 | packages = d.getVar('PACKAGES') |
1313 | pkgdest = d.getVar('PKGDEST', True) | 1313 | pkgdest = d.getVar('PKGDEST') |
1314 | pkgdatadir = d.getVar('PKGDESTWORK', True) | 1314 | pkgdatadir = d.getVar('PKGDESTWORK') |
1315 | 1315 | ||
1316 | # Take shared lock since we're only reading, not writing | 1316 | # Take shared lock since we're only reading, not writing |
1317 | lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True) | 1317 | lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True) |
@@ -1321,9 +1321,9 @@ python emit_pkgdata() { | |||
1321 | f.write("PACKAGES: %s\n" % packages) | 1321 | f.write("PACKAGES: %s\n" % packages) |
1322 | f.close() | 1322 | f.close() |
1323 | 1323 | ||
1324 | pn = d.getVar('PN', True) | 1324 | pn = d.getVar('PN') |
1325 | global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS', True) or "").split() | 1325 | global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split() |
1326 | variants = (d.getVar('MULTILIB_VARIANTS', True) or "").split() | 1326 | variants = (d.getVar('MULTILIB_VARIANTS') or "").split() |
1327 | 1327 | ||
1328 | if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d): | 1328 | if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d): |
1329 | write_extra_pkgs(variants, pn, packages, pkgdatadir) | 1329 | write_extra_pkgs(variants, pn, packages, pkgdatadir) |
@@ -1331,10 +1331,10 @@ python emit_pkgdata() { | |||
1331 | if (bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d)): | 1331 | if (bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d)): |
1332 | write_extra_pkgs(global_variants, pn, packages, pkgdatadir) | 1332 | write_extra_pkgs(global_variants, pn, packages, pkgdatadir) |
1333 | 1333 | ||
1334 | workdir = d.getVar('WORKDIR', True) | 1334 | workdir = d.getVar('WORKDIR') |
1335 | 1335 | ||
1336 | for pkg in packages.split(): | 1336 | for pkg in packages.split(): |
1337 | pkgval = d.getVar('PKG_%s' % pkg, True) | 1337 | pkgval = d.getVar('PKG_%s' % pkg) |
1338 | if pkgval is None: | 1338 | if pkgval is None: |
1339 | pkgval = pkg | 1339 | pkgval = pkg |
1340 | d.setVar('PKG_%s' % pkg, pkg) | 1340 | d.setVar('PKG_%s' % pkg, pkg) |
@@ -1377,11 +1377,11 @@ python emit_pkgdata() { | |||
1377 | write_if_exists(sf, pkg, 'pkg_prerm') | 1377 | write_if_exists(sf, pkg, 'pkg_prerm') |
1378 | write_if_exists(sf, pkg, 'FILERPROVIDESFLIST') | 1378 | write_if_exists(sf, pkg, 'FILERPROVIDESFLIST') |
1379 | write_if_exists(sf, pkg, 'FILES_INFO') | 1379 | write_if_exists(sf, pkg, 'FILES_INFO') |
1380 | for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg, True) or "").split(): | 1380 | for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg) or "").split(): |
1381 | write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile) | 1381 | write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile) |
1382 | 1382 | ||
1383 | write_if_exists(sf, pkg, 'FILERDEPENDSFLIST') | 1383 | write_if_exists(sf, pkg, 'FILERDEPENDSFLIST') |
1384 | for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg, True) or "").split(): | 1384 | for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg) or "").split(): |
1385 | write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile) | 1385 | write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile) |
1386 | 1386 | ||
1387 | sf.write('%s_%s: %d\n' % ('PKGSIZE', pkg, total_size)) | 1387 | sf.write('%s_%s: %d\n' % ('PKGSIZE', pkg, total_size)) |
@@ -1394,9 +1394,9 @@ python emit_pkgdata() { | |||
1394 | bb.utils.mkdirhier(os.path.dirname(subdata_sym)) | 1394 | bb.utils.mkdirhier(os.path.dirname(subdata_sym)) |
1395 | oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True) | 1395 | oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True) |
1396 | 1396 | ||
1397 | allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg, True) | 1397 | allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg) |
1398 | if not allow_empty: | 1398 | if not allow_empty: |
1399 | allow_empty = d.getVar('ALLOW_EMPTY', True) | 1399 | allow_empty = d.getVar('ALLOW_EMPTY') |
1400 | root = "%s/%s" % (pkgdest, pkg) | 1400 | root = "%s/%s" % (pkgdest, pkg) |
1401 | os.chdir(root) | 1401 | os.chdir(root) |
1402 | g = glob('*') | 1402 | g = glob('*') |
@@ -1435,19 +1435,19 @@ RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/bin/rpmdeps-oecore --macros ${STAGING_LI | |||
1435 | # FILERDEPENDS_filepath_pkg - per file dep | 1435 | # FILERDEPENDS_filepath_pkg - per file dep |
1436 | 1436 | ||
1437 | python package_do_filedeps() { | 1437 | python package_do_filedeps() { |
1438 | if d.getVar('SKIP_FILEDEPS', True) == '1': | 1438 | if d.getVar('SKIP_FILEDEPS') == '1': |
1439 | return | 1439 | return |
1440 | 1440 | ||
1441 | pkgdest = d.getVar('PKGDEST', True) | 1441 | pkgdest = d.getVar('PKGDEST') |
1442 | packages = d.getVar('PACKAGES', True) | 1442 | packages = d.getVar('PACKAGES') |
1443 | rpmdeps = d.getVar('RPMDEPS', True) | 1443 | rpmdeps = d.getVar('RPMDEPS') |
1444 | 1444 | ||
1445 | def chunks(files, n): | 1445 | def chunks(files, n): |
1446 | return [files[i:i+n] for i in range(0, len(files), n)] | 1446 | return [files[i:i+n] for i in range(0, len(files), n)] |
1447 | 1447 | ||
1448 | pkglist = [] | 1448 | pkglist = [] |
1449 | for pkg in packages.split(): | 1449 | for pkg in packages.split(): |
1450 | if d.getVar('SKIP_FILEDEPS_' + pkg, True) == '1': | 1450 | if d.getVar('SKIP_FILEDEPS_' + pkg) == '1': |
1451 | continue | 1451 | continue |
1452 | if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-'): | 1452 | if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-'): |
1453 | continue | 1453 | continue |
@@ -1496,22 +1496,22 @@ python package_do_shlibs() { | |||
1496 | return | 1496 | return |
1497 | 1497 | ||
1498 | lib_re = re.compile("^.*\.so") | 1498 | lib_re = re.compile("^.*\.so") |
1499 | libdir_re = re.compile(".*/%s$" % d.getVar('baselib', True)) | 1499 | libdir_re = re.compile(".*/%s$" % d.getVar('baselib')) |
1500 | 1500 | ||
1501 | packages = d.getVar('PACKAGES', True) | 1501 | packages = d.getVar('PACKAGES') |
1502 | targetos = d.getVar('TARGET_OS', True) | 1502 | targetos = d.getVar('TARGET_OS') |
1503 | 1503 | ||
1504 | workdir = d.getVar('WORKDIR', True) | 1504 | workdir = d.getVar('WORKDIR') |
1505 | 1505 | ||
1506 | ver = d.getVar('PKGV', True) | 1506 | ver = d.getVar('PKGV') |
1507 | if not ver: | 1507 | if not ver: |
1508 | msg = "PKGV not defined" | 1508 | msg = "PKGV not defined" |
1509 | package_qa_handle_error("pkgv-undefined", msg, d) | 1509 | package_qa_handle_error("pkgv-undefined", msg, d) |
1510 | return | 1510 | return |
1511 | 1511 | ||
1512 | pkgdest = d.getVar('PKGDEST', True) | 1512 | pkgdest = d.getVar('PKGDEST') |
1513 | 1513 | ||
1514 | shlibswork_dir = d.getVar('SHLIBSWORKDIR', True) | 1514 | shlibswork_dir = d.getVar('SHLIBSWORKDIR') |
1515 | 1515 | ||
1516 | # Take shared lock since we're only reading, not writing | 1516 | # Take shared lock since we're only reading, not writing |
1517 | lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}")) | 1517 | lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}")) |
@@ -1519,7 +1519,7 @@ python package_do_shlibs() { | |||
1519 | def linux_so(file, needed, sonames, renames, pkgver): | 1519 | def linux_so(file, needed, sonames, renames, pkgver): |
1520 | needs_ldconfig = False | 1520 | needs_ldconfig = False |
1521 | ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') | 1521 | ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') |
1522 | cmd = d.getVar('OBJDUMP', True) + " -p " + pipes.quote(file) + " 2>/dev/null" | 1522 | cmd = d.getVar('OBJDUMP') + " -p " + pipes.quote(file) + " 2>/dev/null" |
1523 | fd = os.popen(cmd) | 1523 | fd = os.popen(cmd) |
1524 | lines = fd.readlines() | 1524 | lines = fd.readlines() |
1525 | fd.close() | 1525 | fd.close() |
@@ -1601,12 +1601,12 @@ python package_do_shlibs() { | |||
1601 | if name and name not in needed[pkg]: | 1601 | if name and name not in needed[pkg]: |
1602 | needed[pkg].append((name, file, [])) | 1602 | needed[pkg].append((name, file, [])) |
1603 | 1603 | ||
1604 | if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS', True) == "1": | 1604 | if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1": |
1605 | snap_symlinks = True | 1605 | snap_symlinks = True |
1606 | else: | 1606 | else: |
1607 | snap_symlinks = False | 1607 | snap_symlinks = False |
1608 | 1608 | ||
1609 | if (d.getVar('USE_LDCONFIG', True) or "1") == "1": | 1609 | if (d.getVar('USE_LDCONFIG') or "1") == "1": |
1610 | use_ldconfig = True | 1610 | use_ldconfig = True |
1611 | else: | 1611 | else: |
1612 | use_ldconfig = False | 1612 | use_ldconfig = False |
@@ -1615,14 +1615,14 @@ python package_do_shlibs() { | |||
1615 | shlib_provider = oe.package.read_shlib_providers(d) | 1615 | shlib_provider = oe.package.read_shlib_providers(d) |
1616 | 1616 | ||
1617 | for pkg in packages.split(): | 1617 | for pkg in packages.split(): |
1618 | private_libs = d.getVar('PRIVATE_LIBS_' + pkg, True) or d.getVar('PRIVATE_LIBS', True) or "" | 1618 | private_libs = d.getVar('PRIVATE_LIBS_' + pkg) or d.getVar('PRIVATE_LIBS') or "" |
1619 | private_libs = private_libs.split() | 1619 | private_libs = private_libs.split() |
1620 | needs_ldconfig = False | 1620 | needs_ldconfig = False |
1621 | bb.debug(2, "calculating shlib provides for %s" % pkg) | 1621 | bb.debug(2, "calculating shlib provides for %s" % pkg) |
1622 | 1622 | ||
1623 | pkgver = d.getVar('PKGV_' + pkg, True) | 1623 | pkgver = d.getVar('PKGV_' + pkg) |
1624 | if not pkgver: | 1624 | if not pkgver: |
1625 | pkgver = d.getVar('PV_' + pkg, True) | 1625 | pkgver = d.getVar('PV_' + pkg) |
1626 | if not pkgver: | 1626 | if not pkgver: |
1627 | pkgver = ver | 1627 | pkgver = ver |
1628 | 1628 | ||
@@ -1659,18 +1659,18 @@ python package_do_shlibs() { | |||
1659 | fd.close() | 1659 | fd.close() |
1660 | if needs_ldconfig and use_ldconfig: | 1660 | if needs_ldconfig and use_ldconfig: |
1661 | bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg) | 1661 | bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg) |
1662 | postinst = d.getVar('pkg_postinst_%s' % pkg, True) | 1662 | postinst = d.getVar('pkg_postinst_%s' % pkg) |
1663 | if not postinst: | 1663 | if not postinst: |
1664 | postinst = '#!/bin/sh\n' | 1664 | postinst = '#!/bin/sh\n' |
1665 | postinst += d.getVar('ldconfig_postinst_fragment', True) | 1665 | postinst += d.getVar('ldconfig_postinst_fragment') |
1666 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 1666 | d.setVar('pkg_postinst_%s' % pkg, postinst) |
1667 | bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames)) | 1667 | bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames)) |
1668 | 1668 | ||
1669 | bb.utils.unlockfile(lf) | 1669 | bb.utils.unlockfile(lf) |
1670 | 1670 | ||
1671 | assumed_libs = d.getVar('ASSUME_SHLIBS', True) | 1671 | assumed_libs = d.getVar('ASSUME_SHLIBS') |
1672 | if assumed_libs: | 1672 | if assumed_libs: |
1673 | libdir = d.getVar("libdir", True) | 1673 | libdir = d.getVar("libdir") |
1674 | for e in assumed_libs.split(): | 1674 | for e in assumed_libs.split(): |
1675 | l, dep_pkg = e.split(":") | 1675 | l, dep_pkg = e.split(":") |
1676 | lib_ver = None | 1676 | lib_ver = None |
@@ -1682,7 +1682,7 @@ python package_do_shlibs() { | |||
1682 | shlib_provider[l] = {} | 1682 | shlib_provider[l] = {} |
1683 | shlib_provider[l][libdir] = (dep_pkg, lib_ver) | 1683 | shlib_provider[l][libdir] = (dep_pkg, lib_ver) |
1684 | 1684 | ||
1685 | libsearchpath = [d.getVar('libdir', True), d.getVar('base_libdir', True)] | 1685 | libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')] |
1686 | 1686 | ||
1687 | for pkg in packages.split(): | 1687 | for pkg in packages.split(): |
1688 | bb.debug(2, "calculating shlib requirements for %s" % pkg) | 1688 | bb.debug(2, "calculating shlib requirements for %s" % pkg) |
@@ -1736,12 +1736,12 @@ python package_do_shlibs() { | |||
1736 | python package_do_pkgconfig () { | 1736 | python package_do_pkgconfig () { |
1737 | import re | 1737 | import re |
1738 | 1738 | ||
1739 | packages = d.getVar('PACKAGES', True) | 1739 | packages = d.getVar('PACKAGES') |
1740 | workdir = d.getVar('WORKDIR', True) | 1740 | workdir = d.getVar('WORKDIR') |
1741 | pkgdest = d.getVar('PKGDEST', True) | 1741 | pkgdest = d.getVar('PKGDEST') |
1742 | 1742 | ||
1743 | shlibs_dirs = d.getVar('SHLIBSDIRS', True).split() | 1743 | shlibs_dirs = d.getVar('SHLIBSDIRS').split() |
1744 | shlibswork_dir = d.getVar('SHLIBSWORKDIR', True) | 1744 | shlibswork_dir = d.getVar('SHLIBSWORKDIR') |
1745 | 1745 | ||
1746 | pc_re = re.compile('(.*)\.pc$') | 1746 | pc_re = re.compile('(.*)\.pc$') |
1747 | var_re = re.compile('(.*)=(.*)') | 1747 | var_re = re.compile('(.*)=(.*)') |
@@ -1826,7 +1826,7 @@ python package_do_pkgconfig () { | |||
1826 | 1826 | ||
1827 | def read_libdep_files(d): | 1827 | def read_libdep_files(d): |
1828 | pkglibdeps = {} | 1828 | pkglibdeps = {} |
1829 | packages = d.getVar('PACKAGES', True).split() | 1829 | packages = d.getVar('PACKAGES').split() |
1830 | for pkg in packages: | 1830 | for pkg in packages: |
1831 | pkglibdeps[pkg] = {} | 1831 | pkglibdeps[pkg] = {} |
1832 | for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": | 1832 | for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": |
@@ -1846,9 +1846,9 @@ def read_libdep_files(d): | |||
1846 | python read_shlibdeps () { | 1846 | python read_shlibdeps () { |
1847 | pkglibdeps = read_libdep_files(d) | 1847 | pkglibdeps = read_libdep_files(d) |
1848 | 1848 | ||
1849 | packages = d.getVar('PACKAGES', True).split() | 1849 | packages = d.getVar('PACKAGES').split() |
1850 | for pkg in packages: | 1850 | for pkg in packages: |
1851 | rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "") | 1851 | rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg) or "") |
1852 | for dep in pkglibdeps[pkg]: | 1852 | for dep in pkglibdeps[pkg]: |
1853 | # Add the dep if it's not already there, or if no comparison is set | 1853 | # Add the dep if it's not already there, or if no comparison is set |
1854 | if dep not in rdepends: | 1854 | if dep not in rdepends: |
@@ -1873,14 +1873,14 @@ python package_depchains() { | |||
1873 | package. | 1873 | package. |
1874 | """ | 1874 | """ |
1875 | 1875 | ||
1876 | packages = d.getVar('PACKAGES', True) | 1876 | packages = d.getVar('PACKAGES') |
1877 | postfixes = (d.getVar('DEPCHAIN_POST', True) or '').split() | 1877 | postfixes = (d.getVar('DEPCHAIN_POST') or '').split() |
1878 | prefixes = (d.getVar('DEPCHAIN_PRE', True) or '').split() | 1878 | prefixes = (d.getVar('DEPCHAIN_PRE') or '').split() |
1879 | 1879 | ||
1880 | def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d): | 1880 | def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d): |
1881 | 1881 | ||
1882 | #bb.note('depends for %s is %s' % (base, depends)) | 1882 | #bb.note('depends for %s is %s' % (base, depends)) |
1883 | rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg, True) or "") | 1883 | rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg) or "") |
1884 | 1884 | ||
1885 | for depend in depends: | 1885 | for depend in depends: |
1886 | if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'): | 1886 | if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'): |
@@ -1901,7 +1901,7 @@ python package_depchains() { | |||
1901 | def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): | 1901 | def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): |
1902 | 1902 | ||
1903 | #bb.note('rdepends for %s is %s' % (base, rdepends)) | 1903 | #bb.note('rdepends for %s is %s' % (base, rdepends)) |
1904 | rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg, True) or "") | 1904 | rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg) or "") |
1905 | 1905 | ||
1906 | for depend in rdepends: | 1906 | for depend in rdepends: |
1907 | if depend.find('virtual-locale-') != -1: | 1907 | if depend.find('virtual-locale-') != -1: |
@@ -1924,12 +1924,12 @@ python package_depchains() { | |||
1924 | list.append(dep) | 1924 | list.append(dep) |
1925 | 1925 | ||
1926 | depends = [] | 1926 | depends = [] |
1927 | for dep in bb.utils.explode_deps(d.getVar('DEPENDS', True) or ""): | 1927 | for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""): |
1928 | add_dep(depends, dep) | 1928 | add_dep(depends, dep) |
1929 | 1929 | ||
1930 | rdepends = [] | 1930 | rdepends = [] |
1931 | for pkg in packages.split(): | 1931 | for pkg in packages.split(): |
1932 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg, True) or ""): | 1932 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg) or ""): |
1933 | add_dep(rdepends, dep) | 1933 | add_dep(rdepends, dep) |
1934 | 1934 | ||
1935 | #bb.note('rdepends is %s' % rdepends) | 1935 | #bb.note('rdepends is %s' % rdepends) |
@@ -1959,7 +1959,7 @@ python package_depchains() { | |||
1959 | for pkg in pkglibdeps: | 1959 | for pkg in pkglibdeps: |
1960 | for k in pkglibdeps[pkg]: | 1960 | for k in pkglibdeps[pkg]: |
1961 | add_dep(pkglibdeplist, k) | 1961 | add_dep(pkglibdeplist, k) |
1962 | dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS', True) == '1') or (bb.data.inherits_class('packagegroup', d))) | 1962 | dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d))) |
1963 | 1963 | ||
1964 | for suffix in pkgs: | 1964 | for suffix in pkgs: |
1965 | for pkg in pkgs[suffix]: | 1965 | for pkg in pkgs[suffix]: |
@@ -1976,7 +1976,7 @@ python package_depchains() { | |||
1976 | pkg_addrrecs(pkg, base, suffix, func, rdepends, d) | 1976 | pkg_addrrecs(pkg, base, suffix, func, rdepends, d) |
1977 | else: | 1977 | else: |
1978 | rdeps = [] | 1978 | rdeps = [] |
1979 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base, True) or ""): | 1979 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base) or ""): |
1980 | add_dep(rdeps, dep) | 1980 | add_dep(rdeps, dep) |
1981 | pkg_addrrecs(pkg, base, suffix, func, rdeps, d) | 1981 | pkg_addrrecs(pkg, base, suffix, func, rdeps, d) |
1982 | } | 1982 | } |
@@ -1987,8 +1987,8 @@ PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDE | |||
1987 | 1987 | ||
1988 | def gen_packagevar(d): | 1988 | def gen_packagevar(d): |
1989 | ret = [] | 1989 | ret = [] |
1990 | pkgs = (d.getVar("PACKAGES", True) or "").split() | 1990 | pkgs = (d.getVar("PACKAGES") or "").split() |
1991 | vars = (d.getVar("PACKAGEVARS", True) or "").split() | 1991 | vars = (d.getVar("PACKAGEVARS") or "").split() |
1992 | for p in pkgs: | 1992 | for p in pkgs: |
1993 | for v in vars: | 1993 | for v in vars: |
1994 | ret.append(v + "_" + p) | 1994 | ret.append(v + "_" + p) |
@@ -2036,16 +2036,16 @@ python do_package () { | |||
2036 | # Sanity test the setup | 2036 | # Sanity test the setup |
2037 | ########################################################################### | 2037 | ########################################################################### |
2038 | 2038 | ||
2039 | packages = (d.getVar('PACKAGES', True) or "").split() | 2039 | packages = (d.getVar('PACKAGES') or "").split() |
2040 | if len(packages) < 1: | 2040 | if len(packages) < 1: |
2041 | bb.debug(1, "No packages to build, skipping do_package") | 2041 | bb.debug(1, "No packages to build, skipping do_package") |
2042 | return | 2042 | return |
2043 | 2043 | ||
2044 | workdir = d.getVar('WORKDIR', True) | 2044 | workdir = d.getVar('WORKDIR') |
2045 | outdir = d.getVar('DEPLOY_DIR', True) | 2045 | outdir = d.getVar('DEPLOY_DIR') |
2046 | dest = d.getVar('D', True) | 2046 | dest = d.getVar('D') |
2047 | dvar = d.getVar('PKGD', True) | 2047 | dvar = d.getVar('PKGD') |
2048 | pn = d.getVar('PN', True) | 2048 | pn = d.getVar('PN') |
2049 | 2049 | ||
2050 | if not workdir or not outdir or not dest or not dvar or not pn: | 2050 | if not workdir or not outdir or not dest or not dvar or not pn: |
2051 | msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package" | 2051 | msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package" |
@@ -2063,7 +2063,7 @@ python do_package () { | |||
2063 | # code pre-expands some frequently used variables | 2063 | # code pre-expands some frequently used variables |
2064 | 2064 | ||
2065 | def expandVar(x, d): | 2065 | def expandVar(x, d): |
2066 | d.setVar(x, d.getVar(x, True)) | 2066 | d.setVar(x, d.getVar(x)) |
2067 | 2067 | ||
2068 | for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO': | 2068 | for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO': |
2069 | expandVar(x, d) | 2069 | expandVar(x, d) |
@@ -2072,7 +2072,7 @@ python do_package () { | |||
2072 | # Setup PKGD (from D) | 2072 | # Setup PKGD (from D) |
2073 | ########################################################################### | 2073 | ########################################################################### |
2074 | 2074 | ||
2075 | for f in (d.getVar('PACKAGEBUILDPKGD', True) or '').split(): | 2075 | for f in (d.getVar('PACKAGEBUILDPKGD') or '').split(): |
2076 | bb.build.exec_func(f, d) | 2076 | bb.build.exec_func(f, d) |
2077 | 2077 | ||
2078 | ########################################################################### | 2078 | ########################################################################### |
@@ -2081,7 +2081,7 @@ python do_package () { | |||
2081 | 2081 | ||
2082 | cpath = oe.cachedpath.CachedPath() | 2082 | cpath = oe.cachedpath.CachedPath() |
2083 | 2083 | ||
2084 | for f in (d.getVar('PACKAGESPLITFUNCS', True) or '').split(): | 2084 | for f in (d.getVar('PACKAGESPLITFUNCS') or '').split(): |
2085 | bb.build.exec_func(f, d) | 2085 | bb.build.exec_func(f, d) |
2086 | 2086 | ||
2087 | ########################################################################### | 2087 | ########################################################################### |
@@ -2091,18 +2091,18 @@ python do_package () { | |||
2091 | # Build global list of files in each split package | 2091 | # Build global list of files in each split package |
2092 | global pkgfiles | 2092 | global pkgfiles |
2093 | pkgfiles = {} | 2093 | pkgfiles = {} |
2094 | packages = d.getVar('PACKAGES', True).split() | 2094 | packages = d.getVar('PACKAGES').split() |
2095 | pkgdest = d.getVar('PKGDEST', True) | 2095 | pkgdest = d.getVar('PKGDEST') |
2096 | for pkg in packages: | 2096 | for pkg in packages: |
2097 | pkgfiles[pkg] = [] | 2097 | pkgfiles[pkg] = [] |
2098 | for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg): | 2098 | for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg): |
2099 | for file in files: | 2099 | for file in files: |
2100 | pkgfiles[pkg].append(walkroot + os.sep + file) | 2100 | pkgfiles[pkg].append(walkroot + os.sep + file) |
2101 | 2101 | ||
2102 | for f in (d.getVar('PACKAGEFUNCS', True) or '').split(): | 2102 | for f in (d.getVar('PACKAGEFUNCS') or '').split(): |
2103 | bb.build.exec_func(f, d) | 2103 | bb.build.exec_func(f, d) |
2104 | 2104 | ||
2105 | qa_sane = d.getVar("QA_SANE", True) | 2105 | qa_sane = d.getVar("QA_SANE") |
2106 | if not qa_sane: | 2106 | if not qa_sane: |
2107 | bb.fatal("Fatal QA errors found, failing task.") | 2107 | bb.fatal("Fatal QA errors found, failing task.") |
2108 | } | 2108 | } |
@@ -2149,7 +2149,7 @@ def mapping_rename_hook(d): | |||
2149 | Rewrite variables to account for package renaming in things | 2149 | Rewrite variables to account for package renaming in things |
2150 | like debian.bbclass or manual PKG variable name changes | 2150 | like debian.bbclass or manual PKG variable name changes |
2151 | """ | 2151 | """ |
2152 | pkg = d.getVar("PKG", True) | 2152 | pkg = d.getVar("PKG") |
2153 | runtime_mapping_rename("RDEPENDS", pkg, d) | 2153 | runtime_mapping_rename("RDEPENDS", pkg, d) |
2154 | runtime_mapping_rename("RRECOMMENDS", pkg, d) | 2154 | runtime_mapping_rename("RRECOMMENDS", pkg, d) |
2155 | runtime_mapping_rename("RSUGGESTS", pkg, d) | 2155 | runtime_mapping_rename("RSUGGESTS", pkg, d) |