diff options
| author | Richard Purdie <richard.purdie@linuxfoundation.org> | 2021-07-28 23:28:15 +0100 |
|---|---|---|
| committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2021-08-02 15:44:10 +0100 |
| commit | bb6ddc3691ab04162ec5fd69a2d5e7876713fd15 (patch) | |
| tree | 76e376b01253c3aace1a98a5021bcaad3c92e861 /meta/classes | |
| parent | fcc456ee4b8f619134abb4649db53c638074082c (diff) | |
| download | poky-bb6ddc3691ab04162ec5fd69a2d5e7876713fd15.tar.gz | |
Convert to new override syntax
This is the result of automated script conversion:
scripts/contrib/convert-overrides.py <oe-core directory>
converting the metadata to use ":" as the override character instead of "_".
(From OE-Core rev: 42344347be29f0997cc2f7636d9603b1fe1875ae)
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta/classes')
111 files changed, 637 insertions, 636 deletions
diff --git a/meta/classes/autotools.bbclass b/meta/classes/autotools.bbclass index 77d90a3df5..2c7968e659 100644 --- a/meta/classes/autotools.bbclass +++ b/meta/classes/autotools.bbclass | |||
| @@ -19,7 +19,8 @@ def get_autotools_dep(d): | |||
| 19 | 19 | ||
| 20 | return deps | 20 | return deps |
| 21 | 21 | ||
| 22 | DEPENDS_prepend = "${@get_autotools_dep(d)} " | 22 | |
| 23 | DEPENDS:prepend = "${@get_autotools_dep(d)} " | ||
| 23 | 24 | ||
| 24 | inherit siteinfo | 25 | inherit siteinfo |
| 25 | 26 | ||
| @@ -131,7 +132,7 @@ autotools_postconfigure(){ | |||
| 131 | 132 | ||
| 132 | EXTRACONFFUNCS ??= "" | 133 | EXTRACONFFUNCS ??= "" |
| 133 | 134 | ||
| 134 | EXTRA_OECONF_append = " ${PACKAGECONFIG_CONFARGS}" | 135 | EXTRA_OECONF:append = " ${PACKAGECONFIG_CONFARGS}" |
| 135 | 136 | ||
| 136 | do_configure[prefuncs] += "autotools_preconfigure autotools_aclocals ${EXTRACONFFUNCS}" | 137 | do_configure[prefuncs] += "autotools_preconfigure autotools_aclocals ${EXTRACONFFUNCS}" |
| 137 | do_compile[prefuncs] += "autotools_aclocals" | 138 | do_compile[prefuncs] += "autotools_aclocals" |
| @@ -140,8 +141,8 @@ do_configure[postfuncs] += "autotools_postconfigure" | |||
| 140 | 141 | ||
| 141 | ACLOCALDIR = "${STAGING_DATADIR}/aclocal" | 142 | ACLOCALDIR = "${STAGING_DATADIR}/aclocal" |
| 142 | ACLOCALEXTRAPATH = "" | 143 | ACLOCALEXTRAPATH = "" |
| 143 | ACLOCALEXTRAPATH_class-target = " -I ${STAGING_DATADIR_NATIVE}/aclocal/" | 144 | ACLOCALEXTRAPATH:class-target = " -I ${STAGING_DATADIR_NATIVE}/aclocal/" |
| 144 | ACLOCALEXTRAPATH_class-nativesdk = " -I ${STAGING_DATADIR_NATIVE}/aclocal/" | 145 | ACLOCALEXTRAPATH:class-nativesdk = " -I ${STAGING_DATADIR_NATIVE}/aclocal/" |
| 145 | 146 | ||
| 146 | python autotools_aclocals () { | 147 | python autotools_aclocals () { |
| 147 | d.setVar("CONFIG_SITE", siteinfo_get_files(d, sysrootcache=True)) | 148 | d.setVar("CONFIG_SITE", siteinfo_get_files(d, sysrootcache=True)) |
diff --git a/meta/classes/baremetal-image.bbclass b/meta/classes/baremetal-image.bbclass index 8708a54301..9ec3f1460b 100644 --- a/meta/classes/baremetal-image.bbclass +++ b/meta/classes/baremetal-image.bbclass | |||
| @@ -12,8 +12,8 @@ | |||
| 12 | 12 | ||
| 13 | # Toolchain should be baremetal or newlib based. | 13 | # Toolchain should be baremetal or newlib based. |
| 14 | # TCLIBC="baremetal" or TCLIBC="newlib" | 14 | # TCLIBC="baremetal" or TCLIBC="newlib" |
| 15 | COMPATIBLE_HOST_libc-musl_class-target = "null" | 15 | COMPATIBLE_HOST:libc-musl:class-target = "null" |
| 16 | COMPATIBLE_HOST_libc-glibc_class-target = "null" | 16 | COMPATIBLE_HOST:libc-glibc:class-target = "null" |
| 17 | 17 | ||
| 18 | 18 | ||
| 19 | inherit rootfs-postcommands | 19 | inherit rootfs-postcommands |
| @@ -61,7 +61,7 @@ python do_rootfs(){ | |||
| 61 | # Assure binaries, manifest and qemubootconf are populated on DEPLOY_DIR_IMAGE | 61 | # Assure binaries, manifest and qemubootconf are populated on DEPLOY_DIR_IMAGE |
| 62 | do_image_complete[dirs] = "${TOPDIR}" | 62 | do_image_complete[dirs] = "${TOPDIR}" |
| 63 | SSTATETASKS += "do_image_complete" | 63 | SSTATETASKS += "do_image_complete" |
| 64 | SSTATE_SKIP_CREATION_task-image-complete = '1' | 64 | SSTATE_SKIP_CREATION:task-image-complete = '1' |
| 65 | do_image_complete[sstate-inputdirs] = "${IMGDEPLOYDIR}" | 65 | do_image_complete[sstate-inputdirs] = "${IMGDEPLOYDIR}" |
| 66 | do_image_complete[sstate-outputdirs] = "${DEPLOY_DIR_IMAGE}" | 66 | do_image_complete[sstate-outputdirs] = "${DEPLOY_DIR_IMAGE}" |
| 67 | do_image_complete[stamp-extra-info] = "${MACHINE_ARCH}" | 67 | do_image_complete[stamp-extra-info] = "${MACHINE_ARCH}" |
| @@ -77,18 +77,18 @@ QB_DEFAULT_KERNEL ?= "${IMAGE_LINK_NAME}.bin" | |||
| 77 | QB_MEM ?= "-m 256" | 77 | QB_MEM ?= "-m 256" |
| 78 | QB_DEFAULT_FSTYPE ?= "bin" | 78 | QB_DEFAULT_FSTYPE ?= "bin" |
| 79 | QB_DTB ?= "" | 79 | QB_DTB ?= "" |
| 80 | QB_OPT_APPEND_append = " -nographic" | 80 | QB_OPT_APPEND:append = " -nographic" |
| 81 | 81 | ||
| 82 | # RISC-V tunes set the BIOS, unset, and instruct QEMU to | 82 | # RISC-V tunes set the BIOS, unset, and instruct QEMU to |
| 83 | # ignore the BIOS and boot from -kernel | 83 | # ignore the BIOS and boot from -kernel |
| 84 | QB_DEFAULT_BIOS_qemuriscv64 = "" | 84 | QB_DEFAULT_BIOS:qemuriscv64 = "" |
| 85 | QB_OPT_APPEND_append_qemuriscv64 = " -bios none" | 85 | QB_OPT_APPEND:append:qemuriscv64 = " -bios none" |
| 86 | 86 | ||
| 87 | 87 | ||
| 88 | # Use the medium-any code model for the RISC-V 64 bit implementation, | 88 | # Use the medium-any code model for the RISC-V 64 bit implementation, |
| 89 | # since medlow can only access addresses below 0x80000000 and RAM | 89 | # since medlow can only access addresses below 0x80000000 and RAM |
| 90 | # starts at 0x80000000 on RISC-V 64 | 90 | # starts at 0x80000000 on RISC-V 64 |
| 91 | CFLAGS_append_qemuriscv64 = " -mcmodel=medany" | 91 | CFLAGS:append:qemuriscv64 = " -mcmodel=medany" |
| 92 | 92 | ||
| 93 | 93 | ||
| 94 | # This next part is necessary to trick the build system into thinking | 94 | # This next part is necessary to trick the build system into thinking |
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass index ff0579c7d6..11b65171d9 100644 --- a/meta/classes/base.bbclass +++ b/meta/classes/base.bbclass | |||
| @@ -74,10 +74,10 @@ def get_base_dep(d): | |||
| 74 | BASE_DEFAULT_DEPS = "virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc" | 74 | BASE_DEFAULT_DEPS = "virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc" |
| 75 | 75 | ||
| 76 | BASEDEPENDS = "" | 76 | BASEDEPENDS = "" |
| 77 | BASEDEPENDS_class-target = "${@get_base_dep(d)}" | 77 | BASEDEPENDS:class-target = "${@get_base_dep(d)}" |
| 78 | BASEDEPENDS_class-nativesdk = "${@get_base_dep(d)}" | 78 | BASEDEPENDS:class-nativesdk = "${@get_base_dep(d)}" |
| 79 | 79 | ||
| 80 | DEPENDS_prepend="${BASEDEPENDS} " | 80 | DEPENDS:prepend="${BASEDEPENDS} " |
| 81 | 81 | ||
| 82 | FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}" | 82 | FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}" |
| 83 | # THISDIR only works properly with imediate expansion as it has to run | 83 | # THISDIR only works properly with imediate expansion as it has to run |
| @@ -91,7 +91,7 @@ def extra_path_elements(d): | |||
| 91 | path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":" | 91 | path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":" |
| 92 | return path | 92 | return path |
| 93 | 93 | ||
| 94 | PATH_prepend = "${@extra_path_elements(d)}" | 94 | PATH:prepend = "${@extra_path_elements(d)}" |
| 95 | 95 | ||
| 96 | def get_lic_checksum_file_list(d): | 96 | def get_lic_checksum_file_list(d): |
| 97 | filelist = [] | 97 | filelist = [] |
| @@ -481,8 +481,8 @@ python () { | |||
| 481 | % (d.getVar('PN'), flag, 's' if len(intersec) > 1 else '', ' '.join(intersec))) | 481 | % (d.getVar('PN'), flag, 's' if len(intersec) > 1 else '', ' '.join(intersec))) |
| 482 | 482 | ||
| 483 | appendVar('DEPENDS', extradeps) | 483 | appendVar('DEPENDS', extradeps) |
| 484 | appendVar('RDEPENDS_${PN}', extrardeps) | 484 | appendVar('RDEPENDS:${PN}', extrardeps) |
| 485 | appendVar('RRECOMMENDS_${PN}', extrarrecs) | 485 | appendVar('RRECOMMENDS:${PN}', extrarrecs) |
| 486 | appendVar('PACKAGECONFIG_CONFARGS', extraconf) | 486 | appendVar('PACKAGECONFIG_CONFARGS', extraconf) |
| 487 | 487 | ||
| 488 | pn = d.getVar('PN') | 488 | pn = d.getVar('PN') |
diff --git a/meta/classes/bash-completion.bbclass b/meta/classes/bash-completion.bbclass index 80ee9b4874..803b2cae4d 100644 --- a/meta/classes/bash-completion.bbclass +++ b/meta/classes/bash-completion.bbclass | |||
| @@ -1,7 +1,7 @@ | |||
| 1 | DEPENDS_append_class-target = " bash-completion" | 1 | DEPENDS:append:class-target = " bash-completion" |
| 2 | 2 | ||
| 3 | PACKAGES += "${PN}-bash-completion" | 3 | PACKAGES += "${PN}-bash-completion" |
| 4 | 4 | ||
| 5 | FILES_${PN}-bash-completion = "${datadir}/bash-completion ${sysconfdir}/bash_completion.d" | 5 | FILES:${PN}-bash-completion = "${datadir}/bash-completion ${sysconfdir}/bash_completion.d" |
| 6 | 6 | ||
| 7 | RDEPENDS_${PN}-bash-completion = "bash-completion" | 7 | RDEPENDS:${PN}-bash-completion = "bash-completion" |
diff --git a/meta/classes/bin_package.bbclass b/meta/classes/bin_package.bbclass index cbc9b1fa13..c3aca20443 100644 --- a/meta/classes/bin_package.bbclass +++ b/meta/classes/bin_package.bbclass | |||
| @@ -34,6 +34,6 @@ bin_package_do_install () { | |||
| 34 | | tar --no-same-owner -xpf - -C ${D} | 34 | | tar --no-same-owner -xpf - -C ${D} |
| 35 | } | 35 | } |
| 36 | 36 | ||
| 37 | FILES_${PN} = "/" | 37 | FILES:${PN} = "/" |
| 38 | 38 | ||
| 39 | EXPORT_FUNCTIONS do_install | 39 | EXPORT_FUNCTIONS do_install |
diff --git a/meta/classes/binconfig-disabled.bbclass b/meta/classes/binconfig-disabled.bbclass index 096b670e12..e8ac41b2d4 100644 --- a/meta/classes/binconfig-disabled.bbclass +++ b/meta/classes/binconfig-disabled.bbclass | |||
| @@ -5,9 +5,9 @@ | |||
| 5 | # The list of scripts which should be disabled. | 5 | # The list of scripts which should be disabled. |
| 6 | BINCONFIG ?= "" | 6 | BINCONFIG ?= "" |
| 7 | 7 | ||
| 8 | FILES_${PN}-dev += "${bindir}/*-config" | 8 | FILES:${PN}-dev += "${bindir}/*-config" |
| 9 | 9 | ||
| 10 | do_install_append () { | 10 | do_install:append () { |
| 11 | for x in ${BINCONFIG}; do | 11 | for x in ${BINCONFIG}; do |
| 12 | # Make the disabled script emit invalid parameters for those configure | 12 | # Make the disabled script emit invalid parameters for those configure |
| 13 | # scripts which call it without checking the return code. | 13 | # scripts which call it without checking the return code. |
diff --git a/meta/classes/binconfig.bbclass b/meta/classes/binconfig.bbclass index 9112ed4608..6e0c88269a 100644 --- a/meta/classes/binconfig.bbclass +++ b/meta/classes/binconfig.bbclass | |||
| @@ -1,4 +1,4 @@ | |||
| 1 | FILES_${PN}-dev += "${bindir}/*-config" | 1 | FILES:${PN}-dev += "${bindir}/*-config" |
| 2 | 2 | ||
| 3 | # The namespaces can clash here hence the two step replace | 3 | # The namespaces can clash here hence the two step replace |
| 4 | def get_binconfig_mangle(d): | 4 | def get_binconfig_mangle(d): |
diff --git a/meta/classes/buildhistory.bbclass b/meta/classes/buildhistory.bbclass index 55b12d7893..5099e70fb7 100644 --- a/meta/classes/buildhistory.bbclass +++ b/meta/classes/buildhistory.bbclass | |||
| @@ -45,14 +45,14 @@ BUILDHISTORY_PUSH_REPO ?= "" | |||
| 45 | BUILDHISTORY_TAG ?= "build" | 45 | BUILDHISTORY_TAG ?= "build" |
| 46 | BUILDHISTORY_PATH_PREFIX_STRIP ?= "" | 46 | BUILDHISTORY_PATH_PREFIX_STRIP ?= "" |
| 47 | 47 | ||
| 48 | SSTATEPOSTINSTFUNCS_append = " buildhistory_emit_pkghistory" | 48 | SSTATEPOSTINSTFUNCS:append = " buildhistory_emit_pkghistory" |
| 49 | # We want to avoid influencing the signatures of sstate tasks - first the function itself: | 49 | # We want to avoid influencing the signatures of sstate tasks - first the function itself: |
| 50 | sstate_install[vardepsexclude] += "buildhistory_emit_pkghistory" | 50 | sstate_install[vardepsexclude] += "buildhistory_emit_pkghistory" |
| 51 | # then the value added to SSTATEPOSTINSTFUNCS: | 51 | # then the value added to SSTATEPOSTINSTFUNCS: |
| 52 | SSTATEPOSTINSTFUNCS[vardepvalueexclude] .= "| buildhistory_emit_pkghistory" | 52 | SSTATEPOSTINSTFUNCS[vardepvalueexclude] .= "| buildhistory_emit_pkghistory" |
| 53 | 53 | ||
| 54 | # Similarly for our function that gets the output signatures | 54 | # Similarly for our function that gets the output signatures |
| 55 | SSTATEPOSTUNPACKFUNCS_append = " buildhistory_emit_outputsigs" | 55 | SSTATEPOSTUNPACKFUNCS:append = " buildhistory_emit_outputsigs" |
| 56 | sstate_installpkgdir[vardepsexclude] += "buildhistory_emit_outputsigs" | 56 | sstate_installpkgdir[vardepsexclude] += "buildhistory_emit_outputsigs" |
| 57 | SSTATEPOSTUNPACKFUNCS[vardepvalueexclude] .= "| buildhistory_emit_outputsigs" | 57 | SSTATEPOSTUNPACKFUNCS[vardepvalueexclude] .= "| buildhistory_emit_outputsigs" |
| 58 | 58 | ||
| @@ -676,17 +676,17 @@ IMAGE_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_imageinfo ; | |||
| 676 | IMAGE_POSTPROCESS_COMMAND[vardepsexclude] += "buildhistory_get_imageinfo" | 676 | IMAGE_POSTPROCESS_COMMAND[vardepsexclude] += "buildhistory_get_imageinfo" |
| 677 | 677 | ||
| 678 | # We want these to be the last run so that we get called after complementary package installation | 678 | # We want these to be the last run so that we get called after complementary package installation |
| 679 | POPULATE_SDK_POST_TARGET_COMMAND_append = " buildhistory_list_installed_sdk_target;" | 679 | POPULATE_SDK_POST_TARGET_COMMAND:append = " buildhistory_list_installed_sdk_target;" |
| 680 | POPULATE_SDK_POST_TARGET_COMMAND_append = " buildhistory_get_sdk_installed_target;" | 680 | POPULATE_SDK_POST_TARGET_COMMAND:append = " buildhistory_get_sdk_installed_target;" |
| 681 | POPULATE_SDK_POST_TARGET_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_sdk_target;| buildhistory_get_sdk_installed_target;" | 681 | POPULATE_SDK_POST_TARGET_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_sdk_target;| buildhistory_get_sdk_installed_target;" |
| 682 | POPULATE_SDK_POST_TARGET_COMMAND[vardepsexclude] += "buildhistory_list_installed_sdk_target buildhistory_get_sdk_installed_target" | 682 | POPULATE_SDK_POST_TARGET_COMMAND[vardepsexclude] += "buildhistory_list_installed_sdk_target buildhistory_get_sdk_installed_target" |
| 683 | 683 | ||
| 684 | POPULATE_SDK_POST_HOST_COMMAND_append = " buildhistory_list_installed_sdk_host;" | 684 | POPULATE_SDK_POST_HOST_COMMAND:append = " buildhistory_list_installed_sdk_host;" |
| 685 | POPULATE_SDK_POST_HOST_COMMAND_append = " buildhistory_get_sdk_installed_host;" | 685 | POPULATE_SDK_POST_HOST_COMMAND:append = " buildhistory_get_sdk_installed_host;" |
| 686 | POPULATE_SDK_POST_HOST_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_sdk_host;| buildhistory_get_sdk_installed_host;" | 686 | POPULATE_SDK_POST_HOST_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_sdk_host;| buildhistory_get_sdk_installed_host;" |
| 687 | POPULATE_SDK_POST_HOST_COMMAND[vardepsexclude] += "buildhistory_list_installed_sdk_host buildhistory_get_sdk_installed_host" | 687 | POPULATE_SDK_POST_HOST_COMMAND[vardepsexclude] += "buildhistory_list_installed_sdk_host buildhistory_get_sdk_installed_host" |
| 688 | 688 | ||
| 689 | SDK_POSTPROCESS_COMMAND_append = " buildhistory_get_sdkinfo ; buildhistory_get_extra_sdkinfo; " | 689 | SDK_POSTPROCESS_COMMAND:append = " buildhistory_get_sdkinfo ; buildhistory_get_extra_sdkinfo; " |
| 690 | SDK_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_sdkinfo ; buildhistory_get_extra_sdkinfo; " | 690 | SDK_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_sdkinfo ; buildhistory_get_extra_sdkinfo; " |
| 691 | SDK_POSTPROCESS_COMMAND[vardepsexclude] += "buildhistory_get_sdkinfo buildhistory_get_extra_sdkinfo" | 691 | SDK_POSTPROCESS_COMMAND[vardepsexclude] += "buildhistory_get_sdkinfo buildhistory_get_extra_sdkinfo" |
| 692 | 692 | ||
diff --git a/meta/classes/cmake.bbclass b/meta/classes/cmake.bbclass index f01db7480b..92b9197c48 100644 --- a/meta/classes/cmake.bbclass +++ b/meta/classes/cmake.bbclass | |||
| @@ -1,7 +1,7 @@ | |||
| 1 | # Path to the CMake file to process. | 1 | # Path to the CMake file to process. |
| 2 | OECMAKE_SOURCEPATH ??= "${S}" | 2 | OECMAKE_SOURCEPATH ??= "${S}" |
| 3 | 3 | ||
| 4 | DEPENDS_prepend = "cmake-native " | 4 | DEPENDS:prepend = "cmake-native " |
| 5 | B = "${WORKDIR}/build" | 5 | B = "${WORKDIR}/build" |
| 6 | 6 | ||
| 7 | # What CMake generator to use. | 7 | # What CMake generator to use. |
| @@ -57,13 +57,13 @@ OECMAKE_PERLNATIVE_DIR ??= "" | |||
| 57 | OECMAKE_EXTRA_ROOT_PATH ?= "" | 57 | OECMAKE_EXTRA_ROOT_PATH ?= "" |
| 58 | 58 | ||
| 59 | OECMAKE_FIND_ROOT_PATH_MODE_PROGRAM = "ONLY" | 59 | OECMAKE_FIND_ROOT_PATH_MODE_PROGRAM = "ONLY" |
| 60 | OECMAKE_FIND_ROOT_PATH_MODE_PROGRAM_class-native = "BOTH" | 60 | OECMAKE_FIND_ROOT_PATH_MODE_PROGRAM:class-native = "BOTH" |
| 61 | 61 | ||
| 62 | EXTRA_OECMAKE_append = " ${PACKAGECONFIG_CONFARGS}" | 62 | EXTRA_OECMAKE:append = " ${PACKAGECONFIG_CONFARGS}" |
| 63 | 63 | ||
| 64 | export CMAKE_BUILD_PARALLEL_LEVEL | 64 | export CMAKE_BUILD_PARALLEL_LEVEL |
| 65 | CMAKE_BUILD_PARALLEL_LEVEL_task-compile = "${@oe.utils.parallel_make(d, False)}" | 65 | CMAKE_BUILD_PARALLEL_LEVEL:task-compile = "${@oe.utils.parallel_make(d, False)}" |
| 66 | CMAKE_BUILD_PARALLEL_LEVEL_task-install = "${@oe.utils.parallel_make(d, True)}" | 66 | CMAKE_BUILD_PARALLEL_LEVEL:task-install = "${@oe.utils.parallel_make(d, True)}" |
| 67 | 67 | ||
| 68 | OECMAKE_TARGET_COMPILE ?= "all" | 68 | OECMAKE_TARGET_COMPILE ?= "all" |
| 69 | OECMAKE_TARGET_INSTALL ?= "install" | 69 | OECMAKE_TARGET_INSTALL ?= "install" |
diff --git a/meta/classes/compress_doc.bbclass b/meta/classes/compress_doc.bbclass index d6d11fad26..379b6c169e 100644 --- a/meta/classes/compress_doc.bbclass +++ b/meta/classes/compress_doc.bbclass | |||
| @@ -8,7 +8,7 @@ | |||
| 8 | # | 8 | # |
| 9 | # 3. It is easy to add a new type compression by editing | 9 | # 3. It is easy to add a new type compression by editing |
| 10 | # local.conf, such as: | 10 | # local.conf, such as: |
| 11 | # DOC_COMPRESS_LIST_append = ' abc' | 11 | # DOC_COMPRESS_LIST:append = ' abc' |
| 12 | # DOC_COMPRESS = 'abc' | 12 | # DOC_COMPRESS = 'abc' |
| 13 | # DOC_COMPRESS_CMD[abc] = 'abc compress cmd ***' | 13 | # DOC_COMPRESS_CMD[abc] = 'abc compress cmd ***' |
| 14 | # DOC_DECOMPRESS_CMD[abc] = 'abc decompress cmd ***' | 14 | # DOC_DECOMPRESS_CMD[abc] = 'abc decompress cmd ***' |
| @@ -225,7 +225,7 @@ python compress_doc_updatealternatives () { | |||
| 225 | infodir = d.getVar("infodir") | 225 | infodir = d.getVar("infodir") |
| 226 | compress_mode = d.getVar('DOC_COMPRESS') | 226 | compress_mode = d.getVar('DOC_COMPRESS') |
| 227 | for pkg in (d.getVar('PACKAGES') or "").split(): | 227 | for pkg in (d.getVar('PACKAGES') or "").split(): |
| 228 | old_names = (d.getVar('ALTERNATIVE_%s' % pkg) or "").split() | 228 | old_names = (d.getVar('ALTERNATIVE:%s' % pkg) or "").split() |
| 229 | new_names = [] | 229 | new_names = [] |
| 230 | for old_name in old_names: | 230 | for old_name in old_names: |
| 231 | old_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', old_name) | 231 | old_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', old_name) |
| @@ -258,6 +258,6 @@ python compress_doc_updatealternatives () { | |||
| 258 | new_names.append(new_name) | 258 | new_names.append(new_name) |
| 259 | 259 | ||
| 260 | if new_names: | 260 | if new_names: |
| 261 | d.setVar('ALTERNATIVE_%s' % pkg, ' '.join(new_names)) | 261 | d.setVar('ALTERNATIVE:%s' % pkg, ' '.join(new_names)) |
| 262 | } | 262 | } |
| 263 | 263 | ||
diff --git a/meta/classes/cpan-base.bbclass b/meta/classes/cpan-base.bbclass index 1fc3f0bcb0..9758065bfc 100644 --- a/meta/classes/cpan-base.bbclass +++ b/meta/classes/cpan-base.bbclass | |||
| @@ -2,10 +2,10 @@ | |||
| 2 | # cpan-base providers various perl related information needed for building | 2 | # cpan-base providers various perl related information needed for building |
| 3 | # cpan modules | 3 | # cpan modules |
| 4 | # | 4 | # |
| 5 | FILES_${PN} += "${libdir}/perl5 ${datadir}/perl5" | 5 | FILES:${PN} += "${libdir}/perl5 ${datadir}/perl5" |
| 6 | 6 | ||
| 7 | DEPENDS += "${@["perl", "perl-native"][(bb.data.inherits_class('native', d))]}" | 7 | DEPENDS += "${@["perl", "perl-native"][(bb.data.inherits_class('native', d))]}" |
| 8 | RDEPENDS_${PN} += "${@["perl", ""][(bb.data.inherits_class('native', d))]}" | 8 | RDEPENDS:${PN} += "${@["perl", ""][(bb.data.inherits_class('native', d))]}" |
| 9 | 9 | ||
| 10 | inherit perl-version | 10 | inherit perl-version |
| 11 | 11 | ||
| @@ -15,7 +15,7 @@ def is_target(d): | |||
| 15 | return "no" | 15 | return "no" |
| 16 | 16 | ||
| 17 | PERLLIBDIRS = "${libdir}/perl5" | 17 | PERLLIBDIRS = "${libdir}/perl5" |
| 18 | PERLLIBDIRS_class-native = "${libdir}/perl5" | 18 | PERLLIBDIRS:class-native = "${libdir}/perl5" |
| 19 | 19 | ||
| 20 | def cpan_upstream_check_pattern(d): | 20 | def cpan_upstream_check_pattern(d): |
| 21 | for x in (d.getVar('SRC_URI') or '').split(' '): | 21 | for x in (d.getVar('SRC_URI') or '').split(' '): |
diff --git a/meta/classes/cpan.bbclass b/meta/classes/cpan.bbclass index e9908ae4b8..18f1b9d575 100644 --- a/meta/classes/cpan.bbclass +++ b/meta/classes/cpan.bbclass | |||
| @@ -41,12 +41,12 @@ cpan_do_configure () { | |||
| 41 | fi | 41 | fi |
| 42 | } | 42 | } |
| 43 | 43 | ||
| 44 | do_configure_append_class-target() { | 44 | do_configure:append:class-target() { |
| 45 | find . -name Makefile | xargs sed -E -i \ | 45 | find . -name Makefile | xargs sed -E -i \ |
| 46 | -e 's:LD_RUN_PATH ?= ?"?[^"]*"?::g' | 46 | -e 's:LD_RUN_PATH ?= ?"?[^"]*"?::g' |
| 47 | } | 47 | } |
| 48 | 48 | ||
| 49 | do_configure_append_class-nativesdk() { | 49 | do_configure:append:class-nativesdk() { |
| 50 | find . -name Makefile | xargs sed -E -i \ | 50 | find . -name Makefile | xargs sed -E -i \ |
| 51 | -e 's:LD_RUN_PATH ?= ?"?[^"]*"?::g' | 51 | -e 's:LD_RUN_PATH ?= ?"?[^"]*"?::g' |
| 52 | } | 52 | } |
diff --git a/meta/classes/cross-canadian.bbclass b/meta/classes/cross-canadian.bbclass index 39b5bb93f4..447a2bb158 100644 --- a/meta/classes/cross-canadian.bbclass +++ b/meta/classes/cross-canadian.bbclass | |||
| @@ -104,7 +104,7 @@ STAGING_DIR_HOST = "${RECIPE_SYSROOT}" | |||
| 104 | 104 | ||
| 105 | TOOLCHAIN_OPTIONS = " --sysroot=${RECIPE_SYSROOT}" | 105 | TOOLCHAIN_OPTIONS = " --sysroot=${RECIPE_SYSROOT}" |
| 106 | 106 | ||
| 107 | PATH_append = ":${TMPDIR}/sysroots/${HOST_ARCH}/${bindir_cross}" | 107 | PATH:append = ":${TMPDIR}/sysroots/${HOST_ARCH}/${bindir_cross}" |
| 108 | PKGHIST_DIR = "${TMPDIR}/pkghistory/${HOST_ARCH}-${SDKPKGSUFFIX}${HOST_VENDOR}-${HOST_OS}/" | 108 | PKGHIST_DIR = "${TMPDIR}/pkghistory/${HOST_ARCH}-${SDKPKGSUFFIX}${HOST_VENDOR}-${HOST_OS}/" |
| 109 | 109 | ||
| 110 | HOST_ARCH = "${SDK_ARCH}" | 110 | HOST_ARCH = "${SDK_ARCH}" |
| @@ -129,7 +129,7 @@ LDFLAGS = "${BUILDSDK_LDFLAGS} \ | |||
| 129 | # We need chrpath >= 0.14 to ensure we can deal with 32 and 64 bit | 129 | # We need chrpath >= 0.14 to ensure we can deal with 32 and 64 bit |
| 130 | # binaries | 130 | # binaries |
| 131 | # | 131 | # |
| 132 | DEPENDS_append = " chrpath-replacement-native" | 132 | DEPENDS:append = " chrpath-replacement-native" |
| 133 | EXTRANATIVEPATH += "chrpath-native" | 133 | EXTRANATIVEPATH += "chrpath-native" |
| 134 | 134 | ||
| 135 | # Path mangling needed by the cross packaging | 135 | # Path mangling needed by the cross packaging |
| @@ -153,7 +153,7 @@ base_sbindir = "${bindir}" | |||
| 153 | libdir = "${exec_prefix}/lib/${TARGET_ARCH}${TARGET_VENDOR}-${TARGET_OS}" | 153 | libdir = "${exec_prefix}/lib/${TARGET_ARCH}${TARGET_VENDOR}-${TARGET_OS}" |
| 154 | libexecdir = "${exec_prefix}/libexec/${TARGET_ARCH}${TARGET_VENDOR}-${TARGET_OS}" | 154 | libexecdir = "${exec_prefix}/libexec/${TARGET_ARCH}${TARGET_VENDOR}-${TARGET_OS}" |
| 155 | 155 | ||
| 156 | FILES_${PN} = "${prefix}" | 156 | FILES:${PN} = "${prefix}" |
| 157 | 157 | ||
| 158 | export PKG_CONFIG_DIR = "${STAGING_DIR_HOST}${exec_prefix}/lib/pkgconfig" | 158 | export PKG_CONFIG_DIR = "${STAGING_DIR_HOST}${exec_prefix}/lib/pkgconfig" |
| 159 | export PKG_CONFIG_SYSROOT_DIR = "${STAGING_DIR_HOST}" | 159 | export PKG_CONFIG_SYSROOT_DIR = "${STAGING_DIR_HOST}" |
diff --git a/meta/classes/cross.bbclass b/meta/classes/cross.bbclass index bfec91d043..00e0de84f3 100644 --- a/meta/classes/cross.bbclass +++ b/meta/classes/cross.bbclass | |||
| @@ -7,7 +7,7 @@ EXCLUDE_FROM_WORLD = "1" | |||
| 7 | CLASSOVERRIDE = "class-cross" | 7 | CLASSOVERRIDE = "class-cross" |
| 8 | PACKAGES = "" | 8 | PACKAGES = "" |
| 9 | PACKAGES_DYNAMIC = "" | 9 | PACKAGES_DYNAMIC = "" |
| 10 | PACKAGES_DYNAMIC_class-native = "" | 10 | PACKAGES_DYNAMIC:class-native = "" |
| 11 | 11 | ||
| 12 | HOST_ARCH = "${BUILD_ARCH}" | 12 | HOST_ARCH = "${BUILD_ARCH}" |
| 13 | HOST_VENDOR = "${BUILD_VENDOR}" | 13 | HOST_VENDOR = "${BUILD_VENDOR}" |
diff --git a/meta/classes/cve-check.bbclass b/meta/classes/cve-check.bbclass index 112ee3379d..a3fc9c2623 100644 --- a/meta/classes/cve-check.bbclass +++ b/meta/classes/cve-check.bbclass | |||
| @@ -153,7 +153,7 @@ python cve_check_write_rootfs_manifest () { | |||
| 153 | bb.plain("Image CVE report stored in: %s" % manifest_name) | 153 | bb.plain("Image CVE report stored in: %s" % manifest_name) |
| 154 | } | 154 | } |
| 155 | 155 | ||
| 156 | ROOTFS_POSTPROCESS_COMMAND_prepend = "${@'cve_check_write_rootfs_manifest; ' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}" | 156 | ROOTFS_POSTPROCESS_COMMAND:prepend = "${@'cve_check_write_rootfs_manifest; ' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}" |
| 157 | do_rootfs[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}" | 157 | do_rootfs[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}" |
| 158 | 158 | ||
| 159 | def get_patches_cves(d): | 159 | def get_patches_cves(d): |
diff --git a/meta/classes/debian.bbclass b/meta/classes/debian.bbclass index 6f8a599ccb..39b1a1a08b 100644 --- a/meta/classes/debian.bbclass +++ b/meta/classes/debian.bbclass | |||
| @@ -4,7 +4,7 @@ | |||
| 4 | # depends are correct | 4 | # depends are correct |
| 5 | # | 5 | # |
| 6 | # Custom library package names can be defined setting | 6 | # Custom library package names can be defined setting |
| 7 | # DEBIANNAME_ + pkgname to the desired name. | 7 | # DEBIANNAME: + pkgname to the desired name. |
| 8 | # | 8 | # |
| 9 | # Better expressed as ensure all RDEPENDS package before we package | 9 | # Better expressed as ensure all RDEPENDS package before we package |
| 10 | # This means we can't have circular RDEPENDS/RRECOMMENDS | 10 | # This means we can't have circular RDEPENDS/RRECOMMENDS |
| @@ -51,11 +51,11 @@ python debian_package_name_hook () { | |||
| 51 | return (s[stat.ST_MODE] & stat.S_IEXEC) | 51 | return (s[stat.ST_MODE] & stat.S_IEXEC) |
| 52 | 52 | ||
| 53 | def add_rprovides(pkg, d): | 53 | def add_rprovides(pkg, d): |
| 54 | newpkg = d.getVar('PKG_' + pkg) | 54 | newpkg = d.getVar('PKG:' + pkg) |
| 55 | if newpkg and newpkg != pkg: | 55 | if newpkg and newpkg != pkg: |
| 56 | provs = (d.getVar('RPROVIDES_' + pkg) or "").split() | 56 | provs = (d.getVar('RPROVIDES:' + pkg) or "").split() |
| 57 | if pkg not in provs: | 57 | if pkg not in provs: |
| 58 | d.appendVar('RPROVIDES_' + pkg, " " + pkg + " (=" + d.getVar("PKGV") + ")") | 58 | d.appendVar('RPROVIDES:' + pkg, " " + pkg + " (=" + d.getVar("PKGV") + ")") |
| 59 | 59 | ||
| 60 | def auto_libname(packages, orig_pkg): | 60 | def auto_libname(packages, orig_pkg): |
| 61 | p = lambda var: pathlib.PurePath(d.getVar(var)) | 61 | p = lambda var: pathlib.PurePath(d.getVar(var)) |
| @@ -110,10 +110,10 @@ python debian_package_name_hook () { | |||
| 110 | if soname_result: | 110 | if soname_result: |
| 111 | (pkgname, devname) = soname_result | 111 | (pkgname, devname) = soname_result |
| 112 | for pkg in packages.split(): | 112 | for pkg in packages.split(): |
| 113 | if (d.getVar('PKG_' + pkg, False) or d.getVar('DEBIAN_NOAUTONAME_' + pkg, False)): | 113 | if (d.getVar('PKG:' + pkg, False) or d.getVar('DEBIAN_NOAUTONAME:' + pkg, False)): |
| 114 | add_rprovides(pkg, d) | 114 | add_rprovides(pkg, d) |
| 115 | continue | 115 | continue |
| 116 | debian_pn = d.getVar('DEBIANNAME_' + pkg, False) | 116 | debian_pn = d.getVar('DEBIANNAME:' + pkg, False) |
| 117 | if debian_pn: | 117 | if debian_pn: |
| 118 | newpkg = debian_pn | 118 | newpkg = debian_pn |
| 119 | elif pkg == orig_pkg: | 119 | elif pkg == orig_pkg: |
| @@ -126,7 +126,7 @@ python debian_package_name_hook () { | |||
| 126 | newpkg = mlpre + newpkg | 126 | newpkg = mlpre + newpkg |
| 127 | if newpkg != pkg: | 127 | if newpkg != pkg: |
| 128 | bb.note("debian: renaming %s to %s" % (pkg, newpkg)) | 128 | bb.note("debian: renaming %s to %s" % (pkg, newpkg)) |
| 129 | d.setVar('PKG_' + pkg, newpkg) | 129 | d.setVar('PKG:' + pkg, newpkg) |
| 130 | add_rprovides(pkg, d) | 130 | add_rprovides(pkg, d) |
| 131 | else: | 131 | else: |
| 132 | add_rprovides(orig_pkg, d) | 132 | add_rprovides(orig_pkg, d) |
diff --git a/meta/classes/devicetree.bbclass b/meta/classes/devicetree.bbclass index ece883accf..8546c1cf80 100644 --- a/meta/classes/devicetree.bbclass +++ b/meta/classes/devicetree.bbclass | |||
| @@ -32,7 +32,7 @@ PROVIDES = "virtual/dtb" | |||
| 32 | PACKAGE_ARCH = "${MACHINE_ARCH}" | 32 | PACKAGE_ARCH = "${MACHINE_ARCH}" |
| 33 | 33 | ||
| 34 | SYSROOT_DIRS += "/boot/devicetree" | 34 | SYSROOT_DIRS += "/boot/devicetree" |
| 35 | FILES_${PN} = "/boot/devicetree/*.dtb /boot/devicetree/*.dtbo" | 35 | FILES:${PN} = "/boot/devicetree/*.dtb /boot/devicetree/*.dtbo" |
| 36 | 36 | ||
| 37 | S = "${WORKDIR}" | 37 | S = "${WORKDIR}" |
| 38 | B = "${WORKDIR}/build" | 38 | B = "${WORKDIR}/build" |
diff --git a/meta/classes/devupstream.bbclass b/meta/classes/devupstream.bbclass index 7780c5482c..1230fa12ea 100644 --- a/meta/classes/devupstream.bbclass +++ b/meta/classes/devupstream.bbclass | |||
| @@ -4,8 +4,8 @@ | |||
| 4 | # | 4 | # |
| 5 | # Usage: | 5 | # Usage: |
| 6 | # BBCLASSEXTEND = "devupstream:target" | 6 | # BBCLASSEXTEND = "devupstream:target" |
| 7 | # SRC_URI_class-devupstream = "git://git.example.com/example" | 7 | # SRC_URI:class-devupstream = "git://git.example.com/example" |
| 8 | # SRCREV_class-devupstream = "abcdef" | 8 | # SRCREV:class-devupstream = "abcdef" |
| 9 | # | 9 | # |
| 10 | # If the first entry in SRC_URI is a git: URL then S is rewritten to | 10 | # If the first entry in SRC_URI is a git: URL then S is rewritten to |
| 11 | # WORKDIR/git. | 11 | # WORKDIR/git. |
diff --git a/meta/classes/distrooverrides.bbclass b/meta/classes/distrooverrides.bbclass index 9f4db0d771..c172a348d8 100644 --- a/meta/classes/distrooverrides.bbclass +++ b/meta/classes/distrooverrides.bbclass | |||
| @@ -6,7 +6,7 @@ | |||
| 6 | # This makes it simpler to write .bbappends that only change the | 6 | # This makes it simpler to write .bbappends that only change the |
| 7 | # task signatures of the recipe if the change is really enabled, | 7 | # task signatures of the recipe if the change is really enabled, |
| 8 | # for example with: | 8 | # for example with: |
| 9 | # do_install_append_df-my-feature () { ... } | 9 | # do_install:append_df-my-feature () { ... } |
| 10 | # where "my-feature" is a DISTRO_FEATURE. | 10 | # where "my-feature" is a DISTRO_FEATURE. |
| 11 | # | 11 | # |
| 12 | # The class is meant to be used in a layer.conf or distro | 12 | # The class is meant to be used in a layer.conf or distro |
| @@ -22,8 +22,8 @@ DISTRO_FEATURES_OVERRIDES ?= "" | |||
| 22 | DISTRO_FEATURES_OVERRIDES[doc] = "A space-separated list of <feature> entries. \ | 22 | DISTRO_FEATURES_OVERRIDES[doc] = "A space-separated list of <feature> entries. \ |
| 23 | Each entry is added to OVERRIDES as df-<feature> if <feature> is in DISTRO_FEATURES." | 23 | Each entry is added to OVERRIDES as df-<feature> if <feature> is in DISTRO_FEATURES." |
| 24 | 24 | ||
| 25 | DISTRO_FEATURES_FILTER_NATIVE_append = " ${DISTRO_FEATURES_OVERRIDES}" | 25 | DISTRO_FEATURES_FILTER_NATIVE:append = " ${DISTRO_FEATURES_OVERRIDES}" |
| 26 | DISTRO_FEATURES_FILTER_NATIVESDK_append = " ${DISTRO_FEATURES_OVERRIDES}" | 26 | DISTRO_FEATURES_FILTER_NATIVESDK:append = " ${DISTRO_FEATURES_OVERRIDES}" |
| 27 | 27 | ||
| 28 | # If DISTRO_FEATURES_OVERRIDES or DISTRO_FEATURES show up in a task | 28 | # If DISTRO_FEATURES_OVERRIDES or DISTRO_FEATURES show up in a task |
| 29 | # signature because of this line, then the task dependency on | 29 | # signature because of this line, then the task dependency on |
diff --git a/meta/classes/distutils-common-base.bbclass b/meta/classes/distutils-common-base.bbclass index 43a38e5a3a..bc994f78de 100644 --- a/meta/classes/distutils-common-base.bbclass +++ b/meta/classes/distutils-common-base.bbclass | |||
| @@ -13,12 +13,12 @@ export CCSHARED = "-fPIC -DPIC" | |||
| 13 | # the python executable | 13 | # the python executable |
| 14 | export LINKFORSHARED = "${SECURITY_CFLAGS} -Xlinker -export-dynamic" | 14 | export LINKFORSHARED = "${SECURITY_CFLAGS} -Xlinker -export-dynamic" |
| 15 | 15 | ||
| 16 | FILES_${PN} += "${libdir}/* ${libdir}/${PYTHON_DIR}/*" | 16 | FILES:${PN} += "${libdir}/* ${libdir}/${PYTHON_DIR}/*" |
| 17 | 17 | ||
| 18 | FILES_${PN}-staticdev += "\ | 18 | FILES:${PN}-staticdev += "\ |
| 19 | ${PYTHON_SITEPACKAGES_DIR}/*.a \ | 19 | ${PYTHON_SITEPACKAGES_DIR}/*.a \ |
| 20 | " | 20 | " |
| 21 | FILES_${PN}-dev += "\ | 21 | FILES:${PN}-dev += "\ |
| 22 | ${datadir}/pkgconfig \ | 22 | ${datadir}/pkgconfig \ |
| 23 | ${libdir}/pkgconfig \ | 23 | ${libdir}/pkgconfig \ |
| 24 | ${PYTHON_SITEPACKAGES_DIR}/*.la \ | 24 | ${PYTHON_SITEPACKAGES_DIR}/*.la \ |
diff --git a/meta/classes/distutils3-base.bbclass b/meta/classes/distutils3-base.bbclass index 302ee8c82c..d41873e486 100644 --- a/meta/classes/distutils3-base.bbclass +++ b/meta/classes/distutils3-base.bbclass | |||
| @@ -1,6 +1,6 @@ | |||
| 1 | DEPENDS_append_class-target = " ${PYTHON_PN}-native ${PYTHON_PN}" | 1 | DEPENDS:append:class-target = " ${PYTHON_PN}-native ${PYTHON_PN}" |
| 2 | DEPENDS_append_class-nativesdk = " ${PYTHON_PN}-native ${PYTHON_PN}" | 2 | DEPENDS:append:class-nativesdk = " ${PYTHON_PN}-native ${PYTHON_PN}" |
| 3 | RDEPENDS_${PN} += "${@['', '${PYTHON_PN}-core']['${CLASSOVERRIDE}' == 'class-target']}" | 3 | RDEPENDS:${PN} += "${@['', '${PYTHON_PN}-core']['${CLASSOVERRIDE}' == 'class-target']}" |
| 4 | 4 | ||
| 5 | inherit distutils-common-base python3native python3targetconfig | 5 | inherit distutils-common-base python3native python3targetconfig |
| 6 | 6 | ||
diff --git a/meta/classes/distutils3.bbclass b/meta/classes/distutils3.bbclass index a916a8000c..be645d37bd 100644 --- a/meta/classes/distutils3.bbclass +++ b/meta/classes/distutils3.bbclass | |||
| @@ -10,7 +10,7 @@ DISTUTILS_INSTALL_ARGS ?= "--root=${D} \ | |||
| 10 | --install-data=${datadir}" | 10 | --install-data=${datadir}" |
| 11 | 11 | ||
| 12 | DISTUTILS_PYTHON = "python3" | 12 | DISTUTILS_PYTHON = "python3" |
| 13 | DISTUTILS_PYTHON_class-native = "nativepython3" | 13 | DISTUTILS_PYTHON:class-native = "nativepython3" |
| 14 | 14 | ||
| 15 | DISTUTILS_SETUP_PATH ?= "${S}" | 15 | DISTUTILS_SETUP_PATH ?= "${S}" |
| 16 | 16 | ||
diff --git a/meta/classes/externalsrc.bbclass b/meta/classes/externalsrc.bbclass index 3d6b80bee2..54b08adf62 100644 --- a/meta/classes/externalsrc.bbclass +++ b/meta/classes/externalsrc.bbclass | |||
| @@ -13,7 +13,7 @@ | |||
| 13 | # called "myrecipe" you would do: | 13 | # called "myrecipe" you would do: |
| 14 | # | 14 | # |
| 15 | # INHERIT += "externalsrc" | 15 | # INHERIT += "externalsrc" |
| 16 | # EXTERNALSRC_pn-myrecipe = "/path/to/my/source/tree" | 16 | # EXTERNALSRC:pn-myrecipe = "/path/to/my/source/tree" |
| 17 | # | 17 | # |
| 18 | # In order to make this class work for both target and native versions (or with | 18 | # In order to make this class work for both target and native versions (or with |
| 19 | # multilibs/cross or other BBCLASSEXTEND variants), B is set to point to a separate | 19 | # multilibs/cross or other BBCLASSEXTEND variants), B is set to point to a separate |
| @@ -21,7 +21,7 @@ | |||
| 21 | # the default, but the build directory can be set to the source directory if | 21 | # the default, but the build directory can be set to the source directory if |
| 22 | # circumstances dictate by setting EXTERNALSRC_BUILD to the same value, e.g.: | 22 | # circumstances dictate by setting EXTERNALSRC_BUILD to the same value, e.g.: |
| 23 | # | 23 | # |
| 24 | # EXTERNALSRC_BUILD_pn-myrecipe = "/path/to/my/source/tree" | 24 | # EXTERNALSRC_BUILD:pn-myrecipe = "/path/to/my/source/tree" |
| 25 | # | 25 | # |
| 26 | 26 | ||
| 27 | SRCTREECOVEREDTASKS ?= "do_patch do_unpack do_fetch" | 27 | SRCTREECOVEREDTASKS ?= "do_patch do_unpack do_fetch" |
| @@ -45,11 +45,11 @@ python () { | |||
| 45 | if bpn == d.getVar('PN') or not classextend: | 45 | if bpn == d.getVar('PN') or not classextend: |
| 46 | if (externalsrc or | 46 | if (externalsrc or |
| 47 | ('native' in classextend and | 47 | ('native' in classextend and |
| 48 | d.getVar('EXTERNALSRC_pn-%s-native' % bpn)) or | 48 | d.getVar('EXTERNALSRC:pn-%s-native' % bpn)) or |
| 49 | ('nativesdk' in classextend and | 49 | ('nativesdk' in classextend and |
| 50 | d.getVar('EXTERNALSRC_pn-nativesdk-%s' % bpn)) or | 50 | d.getVar('EXTERNALSRC:pn-nativesdk-%s' % bpn)) or |
| 51 | ('cross' in classextend and | 51 | ('cross' in classextend and |
| 52 | d.getVar('EXTERNALSRC_pn-%s-cross' % bpn))): | 52 | d.getVar('EXTERNALSRC:pn-%s-cross' % bpn))): |
| 53 | d.setVar('BB_DONT_CACHE', '1') | 53 | d.setVar('BB_DONT_CACHE', '1') |
| 54 | 54 | ||
| 55 | if externalsrc: | 55 | if externalsrc: |
diff --git a/meta/classes/extrausers.bbclass b/meta/classes/extrausers.bbclass index 90811bfe2a..a8ef660b30 100644 --- a/meta/classes/extrausers.bbclass +++ b/meta/classes/extrausers.bbclass | |||
| @@ -14,10 +14,10 @@ | |||
| 14 | 14 | ||
| 15 | inherit useradd_base | 15 | inherit useradd_base |
| 16 | 16 | ||
| 17 | PACKAGE_INSTALL_append = " ${@['', 'base-passwd shadow'][bool(d.getVar('EXTRA_USERS_PARAMS'))]}" | 17 | PACKAGE_INSTALL:append = " ${@['', 'base-passwd shadow'][bool(d.getVar('EXTRA_USERS_PARAMS'))]}" |
| 18 | 18 | ||
| 19 | # Image level user / group settings | 19 | # Image level user / group settings |
| 20 | ROOTFS_POSTPROCESS_COMMAND_append = " set_user_group;" | 20 | ROOTFS_POSTPROCESS_COMMAND:append = " set_user_group;" |
| 21 | 21 | ||
| 22 | # Image level user / group settings | 22 | # Image level user / group settings |
| 23 | set_user_group () { | 23 | set_user_group () { |
diff --git a/meta/classes/fontcache.bbclass b/meta/classes/fontcache.bbclass index 624a420a0d..442bfc7392 100644 --- a/meta/classes/fontcache.bbclass +++ b/meta/classes/fontcache.bbclass | |||
| @@ -35,23 +35,23 @@ python () { | |||
| 35 | deps = d.getVar("FONT_EXTRA_RDEPENDS") | 35 | deps = d.getVar("FONT_EXTRA_RDEPENDS") |
| 36 | 36 | ||
| 37 | for pkg in font_pkgs: | 37 | for pkg in font_pkgs: |
| 38 | if deps: d.appendVar('RDEPENDS_' + pkg, ' '+deps) | 38 | if deps: d.appendVar('RDEPENDS:' + pkg, ' '+deps) |
| 39 | } | 39 | } |
| 40 | 40 | ||
| 41 | python add_fontcache_postinsts() { | 41 | python add_fontcache_postinsts() { |
| 42 | for pkg in d.getVar('FONT_PACKAGES').split(): | 42 | for pkg in d.getVar('FONT_PACKAGES').split(): |
| 43 | bb.note("adding fonts postinst and postrm scripts to %s" % pkg) | 43 | bb.note("adding fonts postinst and postrm scripts to %s" % pkg) |
| 44 | postinst = d.getVar('pkg_postinst_%s' % pkg) or d.getVar('pkg_postinst') | 44 | postinst = d.getVar('pkg_postinst:%s' % pkg) or d.getVar('pkg_postinst') |
| 45 | if not postinst: | 45 | if not postinst: |
| 46 | postinst = '#!/bin/sh\n' | 46 | postinst = '#!/bin/sh\n' |
| 47 | postinst += d.getVar('fontcache_common') | 47 | postinst += d.getVar('fontcache_common') |
| 48 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 48 | d.setVar('pkg_postinst:%s' % pkg, postinst) |
| 49 | 49 | ||
| 50 | postrm = d.getVar('pkg_postrm_%s' % pkg) or d.getVar('pkg_postrm') | 50 | postrm = d.getVar('pkg_postrm:%s' % pkg) or d.getVar('pkg_postrm') |
| 51 | if not postrm: | 51 | if not postrm: |
| 52 | postrm = '#!/bin/sh\n' | 52 | postrm = '#!/bin/sh\n' |
| 53 | postrm += d.getVar('fontcache_common') | 53 | postrm += d.getVar('fontcache_common') |
| 54 | d.setVar('pkg_postrm_%s' % pkg, postrm) | 54 | d.setVar('pkg_postrm:%s' % pkg, postrm) |
| 55 | } | 55 | } |
| 56 | 56 | ||
| 57 | PACKAGEFUNCS =+ "add_fontcache_postinsts" | 57 | PACKAGEFUNCS =+ "add_fontcache_postinsts" |
diff --git a/meta/classes/gconf.bbclass b/meta/classes/gconf.bbclass index 3e3c509d5f..9d3668edd3 100644 --- a/meta/classes/gconf.bbclass +++ b/meta/classes/gconf.bbclass | |||
| @@ -41,7 +41,7 @@ for SCHEMA in ${SCHEMA_FILES}; do | |||
| 41 | done | 41 | done |
| 42 | } | 42 | } |
| 43 | 43 | ||
| 44 | python populate_packages_append () { | 44 | python populate_packages:append () { |
| 45 | import re | 45 | import re |
| 46 | packages = d.getVar('PACKAGES').split() | 46 | packages = d.getVar('PACKAGES').split() |
| 47 | pkgdest = d.getVar('PKGDEST') | 47 | pkgdest = d.getVar('PKGDEST') |
| @@ -57,15 +57,15 @@ python populate_packages_append () { | |||
| 57 | if schemas != []: | 57 | if schemas != []: |
| 58 | bb.note("adding gconf postinst and prerm scripts to %s" % pkg) | 58 | bb.note("adding gconf postinst and prerm scripts to %s" % pkg) |
| 59 | d.setVar('SCHEMA_FILES', " ".join(schemas)) | 59 | d.setVar('SCHEMA_FILES', " ".join(schemas)) |
| 60 | postinst = d.getVar('pkg_postinst_%s' % pkg) | 60 | postinst = d.getVar('pkg_postinst:%s' % pkg) |
| 61 | if not postinst: | 61 | if not postinst: |
| 62 | postinst = '#!/bin/sh\n' | 62 | postinst = '#!/bin/sh\n' |
| 63 | postinst += d.getVar('gconf_postinst') | 63 | postinst += d.getVar('gconf_postinst') |
| 64 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 64 | d.setVar('pkg_postinst:%s' % pkg, postinst) |
| 65 | prerm = d.getVar('pkg_prerm_%s' % pkg) | 65 | prerm = d.getVar('pkg_prerm:%s' % pkg) |
| 66 | if not prerm: | 66 | if not prerm: |
| 67 | prerm = '#!/bin/sh\n' | 67 | prerm = '#!/bin/sh\n' |
| 68 | prerm += d.getVar('gconf_prerm') | 68 | prerm += d.getVar('gconf_prerm') |
| 69 | d.setVar('pkg_prerm_%s' % pkg, prerm) | 69 | d.setVar('pkg_prerm:%s' % pkg, prerm) |
| 70 | d.appendVar("RDEPENDS_%s" % pkg, ' ' + d.getVar('MLPREFIX', False) + 'gconf') | 70 | d.appendVar("RDEPENDS:%s" % pkg, ' ' + d.getVar('MLPREFIX', False) + 'gconf') |
| 71 | } | 71 | } |
diff --git a/meta/classes/gettext.bbclass b/meta/classes/gettext.bbclass index be2ef3b311..f11cb04456 100644 --- a/meta/classes/gettext.bbclass +++ b/meta/classes/gettext.bbclass | |||
| @@ -13,10 +13,10 @@ def gettext_oeconf(d): | |||
| 13 | return '--disable-nls' | 13 | return '--disable-nls' |
| 14 | return "--enable-nls" | 14 | return "--enable-nls" |
| 15 | 15 | ||
| 16 | BASEDEPENDS_append = " ${@gettext_dependencies(d)}" | 16 | BASEDEPENDS:append = " ${@gettext_dependencies(d)}" |
| 17 | EXTRA_OECONF_append = " ${@gettext_oeconf(d)}" | 17 | EXTRA_OECONF:append = " ${@gettext_oeconf(d)}" |
| 18 | 18 | ||
| 19 | # Without this, msgfmt from gettext-native will not find ITS files | 19 | # Without this, msgfmt from gettext-native will not find ITS files |
| 20 | # provided by target recipes (for example, polkit.its). | 20 | # provided by target recipes (for example, polkit.its). |
| 21 | GETTEXTDATADIRS_append_class-target = ":${STAGING_DATADIR}/gettext" | 21 | GETTEXTDATADIRS:append:class-target = ":${STAGING_DATADIR}/gettext" |
| 22 | export GETTEXTDATADIRS | 22 | export GETTEXTDATADIRS |
diff --git a/meta/classes/gi-docgen.bbclass b/meta/classes/gi-docgen.bbclass index 5750f7028d..15581ca127 100644 --- a/meta/classes/gi-docgen.bbclass +++ b/meta/classes/gi-docgen.bbclass | |||
| @@ -7,8 +7,8 @@ | |||
| 7 | GIDOCGEN_ENABLED ?= "${@bb.utils.contains('DISTRO_FEATURES', 'api-documentation', 'True', 'False', d)}" | 7 | GIDOCGEN_ENABLED ?= "${@bb.utils.contains('DISTRO_FEATURES', 'api-documentation', 'True', 'False', d)}" |
| 8 | # When building native recipes, disable gi-docgen, as it is not necessary, | 8 | # When building native recipes, disable gi-docgen, as it is not necessary, |
| 9 | # pulls in additional dependencies, and makes build times longer | 9 | # pulls in additional dependencies, and makes build times longer |
| 10 | GIDOCGEN_ENABLED_class-native = "False" | 10 | GIDOCGEN_ENABLED:class-native = "False" |
| 11 | GIDOCGEN_ENABLED_class-nativesdk = "False" | 11 | GIDOCGEN_ENABLED:class-nativesdk = "False" |
| 12 | 12 | ||
| 13 | # meson: default option name to enable/disable gi-docgen. This matches most | 13 | # meson: default option name to enable/disable gi-docgen. This matches most |
| 14 | # projects' configuration. In doubts - check meson_options.txt in project's | 14 | # projects' configuration. In doubts - check meson_options.txt in project's |
| @@ -18,7 +18,7 @@ GIDOCGEN_MESON_ENABLE_FLAG ?= 'true' | |||
| 18 | GIDOCGEN_MESON_DISABLE_FLAG ?= 'false' | 18 | GIDOCGEN_MESON_DISABLE_FLAG ?= 'false' |
| 19 | 19 | ||
| 20 | # Auto enable/disable based on GIDOCGEN_ENABLED | 20 | # Auto enable/disable based on GIDOCGEN_ENABLED |
| 21 | EXTRA_OEMESON_prepend = "-D${GIDOCGEN_MESON_OPTION}=${@bb.utils.contains('GIDOCGEN_ENABLED', 'True', '${GIDOCGEN_MESON_ENABLE_FLAG}', '${GIDOCGEN_MESON_DISABLE_FLAG}', d)} " | 21 | EXTRA_OEMESON:prepend = "-D${GIDOCGEN_MESON_OPTION}=${@bb.utils.contains('GIDOCGEN_ENABLED', 'True', '${GIDOCGEN_MESON_ENABLE_FLAG}', '${GIDOCGEN_MESON_DISABLE_FLAG}', d)} " |
| 22 | 22 | ||
| 23 | DEPENDS_append = "${@' gi-docgen-native gi-docgen' if d.getVar('GIDOCGEN_ENABLED') == 'True' else ''}" | 23 | DEPENDS:append = "${@' gi-docgen-native gi-docgen' if d.getVar('GIDOCGEN_ENABLED') == 'True' else ''}" |
| 24 | 24 | ||
diff --git a/meta/classes/gio-module-cache.bbclass b/meta/classes/gio-module-cache.bbclass index e429bd3197..021eeb1cf8 100644 --- a/meta/classes/gio-module-cache.bbclass +++ b/meta/classes/gio-module-cache.bbclass | |||
| @@ -17,22 +17,22 @@ else | |||
| 17 | fi | 17 | fi |
| 18 | } | 18 | } |
| 19 | 19 | ||
| 20 | python populate_packages_append () { | 20 | python populate_packages:append () { |
| 21 | packages = d.getVar('GIO_MODULE_PACKAGES').split() | 21 | packages = d.getVar('GIO_MODULE_PACKAGES').split() |
| 22 | 22 | ||
| 23 | for pkg in packages: | 23 | for pkg in packages: |
| 24 | bb.note("adding gio-module-cache postinst and postrm scripts to %s" % pkg) | 24 | bb.note("adding gio-module-cache postinst and postrm scripts to %s" % pkg) |
| 25 | 25 | ||
| 26 | postinst = d.getVar('pkg_postinst_%s' % pkg) | 26 | postinst = d.getVar('pkg_postinst:%s' % pkg) |
| 27 | if not postinst: | 27 | if not postinst: |
| 28 | postinst = '#!/bin/sh\n' | 28 | postinst = '#!/bin/sh\n' |
| 29 | postinst += d.getVar('gio_module_cache_common') | 29 | postinst += d.getVar('gio_module_cache_common') |
| 30 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 30 | d.setVar('pkg_postinst:%s' % pkg, postinst) |
| 31 | 31 | ||
| 32 | postrm = d.getVar('pkg_postrm_%s' % pkg) | 32 | postrm = d.getVar('pkg_postrm:%s' % pkg) |
| 33 | if not postrm: | 33 | if not postrm: |
| 34 | postrm = '#!/bin/sh\n' | 34 | postrm = '#!/bin/sh\n' |
| 35 | postrm += d.getVar('gio_module_cache_common') | 35 | postrm += d.getVar('gio_module_cache_common') |
| 36 | d.setVar('pkg_postrm_%s' % pkg, postrm) | 36 | d.setVar('pkg_postrm:%s' % pkg, postrm) |
| 37 | } | 37 | } |
| 38 | 38 | ||
diff --git a/meta/classes/glide.bbclass b/meta/classes/glide.bbclass index db421745bd..2db4ac6846 100644 --- a/meta/classes/glide.bbclass +++ b/meta/classes/glide.bbclass | |||
| @@ -2,8 +2,8 @@ | |||
| 2 | # | 2 | # |
| 3 | # Copyright 2018 (C) O.S. Systems Software LTDA. | 3 | # Copyright 2018 (C) O.S. Systems Software LTDA. |
| 4 | 4 | ||
| 5 | DEPENDS_append = " glide-native" | 5 | DEPENDS:append = " glide-native" |
| 6 | 6 | ||
| 7 | do_compile_prepend() { | 7 | do_compile:prepend() { |
| 8 | ( cd ${B}/src/${GO_IMPORT} && glide install ) | 8 | ( cd ${B}/src/${GO_IMPORT} && glide install ) |
| 9 | } | 9 | } |
diff --git a/meta/classes/gnomebase.bbclass b/meta/classes/gnomebase.bbclass index 884b1a106f..9a5bd9a232 100644 --- a/meta/classes/gnomebase.bbclass +++ b/meta/classes/gnomebase.bbclass | |||
| @@ -7,7 +7,7 @@ SECTION ?= "x11/gnome" | |||
| 7 | GNOMEBN ?= "${BPN}" | 7 | GNOMEBN ?= "${BPN}" |
| 8 | SRC_URI = "${GNOME_MIRROR}/${GNOMEBN}/${@gnome_verdir("${PV}")}/${GNOMEBN}-${PV}.tar.${GNOME_COMPRESS_TYPE};name=archive" | 8 | SRC_URI = "${GNOME_MIRROR}/${GNOMEBN}/${@gnome_verdir("${PV}")}/${GNOMEBN}-${PV}.tar.${GNOME_COMPRESS_TYPE};name=archive" |
| 9 | 9 | ||
| 10 | FILES_${PN} += "${datadir}/application-registry \ | 10 | FILES:${PN} += "${datadir}/application-registry \ |
| 11 | ${datadir}/mime-info \ | 11 | ${datadir}/mime-info \ |
| 12 | ${datadir}/mime/packages \ | 12 | ${datadir}/mime/packages \ |
| 13 | ${datadir}/mime/application \ | 13 | ${datadir}/mime/application \ |
| @@ -19,12 +19,12 @@ FILES_${PN} += "${datadir}/application-registry \ | |||
| 19 | ${datadir}/icons \ | 19 | ${datadir}/icons \ |
| 20 | " | 20 | " |
| 21 | 21 | ||
| 22 | FILES_${PN}-doc += "${datadir}/devhelp" | 22 | FILES:${PN}-doc += "${datadir}/devhelp" |
| 23 | 23 | ||
| 24 | GNOMEBASEBUILDCLASS ??= "autotools" | 24 | GNOMEBASEBUILDCLASS ??= "autotools" |
| 25 | inherit ${GNOMEBASEBUILDCLASS} pkgconfig | 25 | inherit ${GNOMEBASEBUILDCLASS} pkgconfig |
| 26 | 26 | ||
| 27 | do_install_append() { | 27 | do_install:append() { |
| 28 | rm -rf ${D}${localstatedir}/lib/scrollkeeper/* | 28 | rm -rf ${D}${localstatedir}/lib/scrollkeeper/* |
| 29 | rm -rf ${D}${localstatedir}/scrollkeeper/* | 29 | rm -rf ${D}${localstatedir}/scrollkeeper/* |
| 30 | rm -f ${D}${datadir}/applications/*.cache | 30 | rm -f ${D}${datadir}/applications/*.cache |
diff --git a/meta/classes/go-mod.bbclass b/meta/classes/go-mod.bbclass index cabb04d0ec..674d2434e0 100644 --- a/meta/classes/go-mod.bbclass +++ b/meta/classes/go-mod.bbclass | |||
| @@ -12,7 +12,7 @@ | |||
| 12 | 12 | ||
| 13 | # The '-modcacherw' option ensures we have write access to the cached objects so | 13 | # The '-modcacherw' option ensures we have write access to the cached objects so |
| 14 | # we avoid errors during clean task as well as when removing the TMPDIR. | 14 | # we avoid errors during clean task as well as when removing the TMPDIR. |
| 15 | GOBUILDFLAGS_append = " -modcacherw" | 15 | GOBUILDFLAGS:append = " -modcacherw" |
| 16 | 16 | ||
| 17 | inherit go | 17 | inherit go |
| 18 | 18 | ||
diff --git a/meta/classes/go-ptest.bbclass b/meta/classes/go-ptest.bbclass index e230a80587..b282ff7374 100644 --- a/meta/classes/go-ptest.bbclass +++ b/meta/classes/go-ptest.bbclass | |||
| @@ -50,5 +50,5 @@ do_install_ptest_base() { | |||
| 50 | chown -R root:root ${D}${PTEST_PATH} | 50 | chown -R root:root ${D}${PTEST_PATH} |
| 51 | } | 51 | } |
| 52 | 52 | ||
| 53 | INSANE_SKIP_${PN}-ptest += "ldflags" | 53 | INSANE_SKIP:${PN}-ptest += "ldflags" |
| 54 | 54 | ||
diff --git a/meta/classes/go.bbclass b/meta/classes/go.bbclass index 77ec98dd51..0dd0c5f1d2 100644 --- a/meta/classes/go.bbclass +++ b/meta/classes/go.bbclass | |||
| @@ -2,8 +2,8 @@ inherit goarch | |||
| 2 | 2 | ||
| 3 | GO_PARALLEL_BUILD ?= "${@oe.utils.parallel_make_argument(d, '-p %d')}" | 3 | GO_PARALLEL_BUILD ?= "${@oe.utils.parallel_make_argument(d, '-p %d')}" |
| 4 | 4 | ||
| 5 | GOROOT_class-native = "${STAGING_LIBDIR_NATIVE}/go" | 5 | GOROOT:class-native = "${STAGING_LIBDIR_NATIVE}/go" |
| 6 | GOROOT_class-nativesdk = "${STAGING_DIR_TARGET}${libdir}/go" | 6 | GOROOT:class-nativesdk = "${STAGING_DIR_TARGET}${libdir}/go" |
| 7 | GOROOT = "${STAGING_LIBDIR}/go" | 7 | GOROOT = "${STAGING_LIBDIR}/go" |
| 8 | export GOROOT | 8 | export GOROOT |
| 9 | export GOROOT_FINAL = "${libdir}/go" | 9 | export GOROOT_FINAL = "${libdir}/go" |
| @@ -15,42 +15,42 @@ export GOHOSTARCH="${BUILD_GOARCH}" | |||
| 15 | export GOHOSTOS="${BUILD_GOOS}" | 15 | export GOHOSTOS="${BUILD_GOOS}" |
| 16 | 16 | ||
| 17 | GOARM[export] = "0" | 17 | GOARM[export] = "0" |
| 18 | GOARM_arm_class-target = "${TARGET_GOARM}" | 18 | GOARM:arm:class-target = "${TARGET_GOARM}" |
| 19 | GOARM_arm_class-target[export] = "1" | 19 | GOARM:arm:class-target[export] = "1" |
| 20 | 20 | ||
| 21 | GO386[export] = "0" | 21 | GO386[export] = "0" |
| 22 | GO386_x86_class-target = "${TARGET_GO386}" | 22 | GO386:x86:class-target = "${TARGET_GO386}" |
| 23 | GO386_x86_class-target[export] = "1" | 23 | GO386:x86:class-target[export] = "1" |
| 24 | 24 | ||
| 25 | GOMIPS[export] = "0" | 25 | GOMIPS[export] = "0" |
| 26 | GOMIPS_mips_class-target = "${TARGET_GOMIPS}" | 26 | GOMIPS:mips:class-target = "${TARGET_GOMIPS}" |
| 27 | GOMIPS_mips_class-target[export] = "1" | 27 | GOMIPS:mips:class-target[export] = "1" |
| 28 | 28 | ||
| 29 | DEPENDS_GOLANG_class-target = "virtual/${TUNE_PKGARCH}-go virtual/${TARGET_PREFIX}go-runtime" | 29 | DEPENDS_GOLANG:class-target = "virtual/${TUNE_PKGARCH}-go virtual/${TARGET_PREFIX}go-runtime" |
| 30 | DEPENDS_GOLANG_class-native = "go-native" | 30 | DEPENDS_GOLANG:class-native = "go-native" |
| 31 | DEPENDS_GOLANG_class-nativesdk = "virtual/${TARGET_PREFIX}go-crosssdk virtual/${TARGET_PREFIX}go-runtime" | 31 | DEPENDS_GOLANG:class-nativesdk = "virtual/${TARGET_PREFIX}go-crosssdk virtual/${TARGET_PREFIX}go-runtime" |
| 32 | 32 | ||
| 33 | DEPENDS_append = " ${DEPENDS_GOLANG}" | 33 | DEPENDS:append = " ${DEPENDS_GOLANG}" |
| 34 | 34 | ||
| 35 | GO_LINKSHARED ?= "${@'-linkshared' if d.getVar('GO_DYNLINK') else ''}" | 35 | GO_LINKSHARED ?= "${@'-linkshared' if d.getVar('GO_DYNLINK') else ''}" |
| 36 | GO_RPATH_LINK = "${@'-Wl,-rpath-link=${STAGING_DIR_TARGET}${libdir}/go/pkg/${TARGET_GOTUPLE}_dynlink' if d.getVar('GO_DYNLINK') else ''}" | 36 | GO_RPATH_LINK = "${@'-Wl,-rpath-link=${STAGING_DIR_TARGET}${libdir}/go/pkg/${TARGET_GOTUPLE}_dynlink' if d.getVar('GO_DYNLINK') else ''}" |
| 37 | GO_RPATH = "${@'-r ${libdir}/go/pkg/${TARGET_GOTUPLE}_dynlink' if d.getVar('GO_DYNLINK') else ''}" | 37 | GO_RPATH = "${@'-r ${libdir}/go/pkg/${TARGET_GOTUPLE}_dynlink' if d.getVar('GO_DYNLINK') else ''}" |
| 38 | GO_RPATH_class-native = "${@'-r ${STAGING_LIBDIR_NATIVE}/go/pkg/${TARGET_GOTUPLE}_dynlink' if d.getVar('GO_DYNLINK') else ''}" | 38 | GO_RPATH:class-native = "${@'-r ${STAGING_LIBDIR_NATIVE}/go/pkg/${TARGET_GOTUPLE}_dynlink' if d.getVar('GO_DYNLINK') else ''}" |
| 39 | GO_RPATH_LINK_class-native = "${@'-Wl,-rpath-link=${STAGING_LIBDIR_NATIVE}/go/pkg/${TARGET_GOTUPLE}_dynlink' if d.getVar('GO_DYNLINK') else ''}" | 39 | GO_RPATH_LINK:class-native = "${@'-Wl,-rpath-link=${STAGING_LIBDIR_NATIVE}/go/pkg/${TARGET_GOTUPLE}_dynlink' if d.getVar('GO_DYNLINK') else ''}" |
| 40 | GO_EXTLDFLAGS ?= "${HOST_CC_ARCH}${TOOLCHAIN_OPTIONS} ${GO_RPATH_LINK} ${LDFLAGS}" | 40 | GO_EXTLDFLAGS ?= "${HOST_CC_ARCH}${TOOLCHAIN_OPTIONS} ${GO_RPATH_LINK} ${LDFLAGS}" |
| 41 | GO_LINKMODE ?= "" | 41 | GO_LINKMODE ?= "" |
| 42 | GO_LINKMODE_class-nativesdk = "--linkmode=external" | 42 | GO_LINKMODE:class-nativesdk = "--linkmode=external" |
| 43 | GO_LINKMODE_class-native = "--linkmode=external" | 43 | GO_LINKMODE:class-native = "--linkmode=external" |
| 44 | GO_LDFLAGS ?= '-ldflags="${GO_RPATH} ${GO_LINKMODE} -extldflags '${GO_EXTLDFLAGS}'"' | 44 | GO_LDFLAGS ?= '-ldflags="${GO_RPATH} ${GO_LINKMODE} -extldflags '${GO_EXTLDFLAGS}'"' |
| 45 | export GOBUILDFLAGS ?= "-v ${GO_LDFLAGS} -trimpath" | 45 | export GOBUILDFLAGS ?= "-v ${GO_LDFLAGS} -trimpath" |
| 46 | export GOPATH_OMIT_IN_ACTIONID ?= "1" | 46 | export GOPATH_OMIT_IN_ACTIONID ?= "1" |
| 47 | export GOPTESTBUILDFLAGS ?= "${GOBUILDFLAGS} -c" | 47 | export GOPTESTBUILDFLAGS ?= "${GOBUILDFLAGS} -c" |
| 48 | export GOPTESTFLAGS ?= "" | 48 | export GOPTESTFLAGS ?= "" |
| 49 | GOBUILDFLAGS_prepend_task-compile = "${GO_PARALLEL_BUILD} " | 49 | GOBUILDFLAGS:prepend:task-compile = "${GO_PARALLEL_BUILD} " |
| 50 | 50 | ||
| 51 | export GO = "${HOST_PREFIX}go" | 51 | export GO = "${HOST_PREFIX}go" |
| 52 | GOTOOLDIR = "${STAGING_LIBDIR_NATIVE}/${TARGET_SYS}/go/pkg/tool/${BUILD_GOTUPLE}" | 52 | GOTOOLDIR = "${STAGING_LIBDIR_NATIVE}/${TARGET_SYS}/go/pkg/tool/${BUILD_GOTUPLE}" |
| 53 | GOTOOLDIR_class-native = "${STAGING_LIBDIR_NATIVE}/go/pkg/tool/${BUILD_GOTUPLE}" | 53 | GOTOOLDIR:class-native = "${STAGING_LIBDIR_NATIVE}/go/pkg/tool/${BUILD_GOTUPLE}" |
| 54 | export GOTOOLDIR | 54 | export GOTOOLDIR |
| 55 | 55 | ||
| 56 | export CGO_ENABLED ?= "1" | 56 | export CGO_ENABLED ?= "1" |
| @@ -140,17 +140,17 @@ go_stage_testdata() { | |||
| 140 | 140 | ||
| 141 | EXPORT_FUNCTIONS do_unpack do_configure do_compile do_install | 141 | EXPORT_FUNCTIONS do_unpack do_configure do_compile do_install |
| 142 | 142 | ||
| 143 | FILES_${PN}-dev = "${libdir}/go/src" | 143 | FILES:${PN}-dev = "${libdir}/go/src" |
| 144 | FILES_${PN}-staticdev = "${libdir}/go/pkg" | 144 | FILES:${PN}-staticdev = "${libdir}/go/pkg" |
| 145 | 145 | ||
| 146 | INSANE_SKIP_${PN} += "ldflags" | 146 | INSANE_SKIP:${PN} += "ldflags" |
| 147 | 147 | ||
| 148 | # Add -buildmode=pie to GOBUILDFLAGS to satisfy "textrel" QA checking, but mips | 148 | # Add -buildmode=pie to GOBUILDFLAGS to satisfy "textrel" QA checking, but mips |
| 149 | # doesn't support -buildmode=pie, so skip the QA checking for mips/rv32 and its | 149 | # doesn't support -buildmode=pie, so skip the QA checking for mips/rv32 and its |
| 150 | # variants. | 150 | # variants. |
| 151 | python() { | 151 | python() { |
| 152 | if 'mips' in d.getVar('TARGET_ARCH') or 'riscv32' in d.getVar('TARGET_ARCH'): | 152 | if 'mips' in d.getVar('TARGET_ARCH') or 'riscv32' in d.getVar('TARGET_ARCH'): |
| 153 | d.appendVar('INSANE_SKIP_%s' % d.getVar('PN'), " textrel") | 153 | d.appendVar('INSANE_SKIP:%s' % d.getVar('PN'), " textrel") |
| 154 | else: | 154 | else: |
| 155 | d.appendVar('GOBUILDFLAGS', ' -buildmode=pie') | 155 | d.appendVar('GOBUILDFLAGS', ' -buildmode=pie') |
| 156 | } | 156 | } |
diff --git a/meta/classes/goarch.bbclass b/meta/classes/goarch.bbclass index e4e0ca37be..48dc48dabf 100644 --- a/meta/classes/goarch.bbclass +++ b/meta/classes/goarch.bbclass | |||
| @@ -6,54 +6,54 @@ HOST_GOARCH = "${@go_map_arch(d.getVar('HOST_ARCH'), d)}" | |||
| 6 | HOST_GOARM = "${@go_map_arm(d.getVar('HOST_ARCH'), d)}" | 6 | HOST_GOARM = "${@go_map_arm(d.getVar('HOST_ARCH'), d)}" |
| 7 | HOST_GO386 = "${@go_map_386(d.getVar('HOST_ARCH'), d.getVar('TUNE_FEATURES'), d)}" | 7 | HOST_GO386 = "${@go_map_386(d.getVar('HOST_ARCH'), d.getVar('TUNE_FEATURES'), d)}" |
| 8 | HOST_GOMIPS = "${@go_map_mips(d.getVar('HOST_ARCH'), d.getVar('TUNE_FEATURES'), d)}" | 8 | HOST_GOMIPS = "${@go_map_mips(d.getVar('HOST_ARCH'), d.getVar('TUNE_FEATURES'), d)}" |
| 9 | HOST_GOARM_class-native = "7" | 9 | HOST_GOARM:class-native = "7" |
| 10 | HOST_GO386_class-native = "sse2" | 10 | HOST_GO386:class-native = "sse2" |
| 11 | HOST_GOMIPS_class-native = "hardfloat" | 11 | HOST_GOMIPS:class-native = "hardfloat" |
| 12 | HOST_GOTUPLE = "${HOST_GOOS}_${HOST_GOARCH}" | 12 | HOST_GOTUPLE = "${HOST_GOOS}_${HOST_GOARCH}" |
| 13 | TARGET_GOOS = "${@go_map_os(d.getVar('TARGET_OS'), d)}" | 13 | TARGET_GOOS = "${@go_map_os(d.getVar('TARGET_OS'), d)}" |
| 14 | TARGET_GOARCH = "${@go_map_arch(d.getVar('TARGET_ARCH'), d)}" | 14 | TARGET_GOARCH = "${@go_map_arch(d.getVar('TARGET_ARCH'), d)}" |
| 15 | TARGET_GOARM = "${@go_map_arm(d.getVar('TARGET_ARCH'), d)}" | 15 | TARGET_GOARM = "${@go_map_arm(d.getVar('TARGET_ARCH'), d)}" |
| 16 | TARGET_GO386 = "${@go_map_386(d.getVar('TARGET_ARCH'), d.getVar('TUNE_FEATURES'), d)}" | 16 | TARGET_GO386 = "${@go_map_386(d.getVar('TARGET_ARCH'), d.getVar('TUNE_FEATURES'), d)}" |
| 17 | TARGET_GOMIPS = "${@go_map_mips(d.getVar('TARGET_ARCH'), d.getVar('TUNE_FEATURES'), d)}" | 17 | TARGET_GOMIPS = "${@go_map_mips(d.getVar('TARGET_ARCH'), d.getVar('TUNE_FEATURES'), d)}" |
| 18 | TARGET_GOARM_class-native = "7" | 18 | TARGET_GOARM:class-native = "7" |
| 19 | TARGET_GO386_class-native = "sse2" | 19 | TARGET_GO386:class-native = "sse2" |
| 20 | TARGET_GOMIPS_class-native = "hardfloat" | 20 | TARGET_GOMIPS:class-native = "hardfloat" |
| 21 | TARGET_GOTUPLE = "${TARGET_GOOS}_${TARGET_GOARCH}" | 21 | TARGET_GOTUPLE = "${TARGET_GOOS}_${TARGET_GOARCH}" |
| 22 | GO_BUILD_BINDIR = "${@['bin/${HOST_GOTUPLE}','bin'][d.getVar('BUILD_GOTUPLE') == d.getVar('HOST_GOTUPLE')]}" | 22 | GO_BUILD_BINDIR = "${@['bin/${HOST_GOTUPLE}','bin'][d.getVar('BUILD_GOTUPLE') == d.getVar('HOST_GOTUPLE')]}" |
| 23 | 23 | ||
| 24 | # Use the MACHINEOVERRIDES to map ARM CPU architecture passed to GO via GOARM. | 24 | # Use the MACHINEOVERRIDES to map ARM CPU architecture passed to GO via GOARM. |
| 25 | # This is combined with *_ARCH to set HOST_GOARM and TARGET_GOARM. | 25 | # This is combined with *_ARCH to set HOST_GOARM and TARGET_GOARM. |
| 26 | BASE_GOARM = '' | 26 | BASE_GOARM = '' |
| 27 | BASE_GOARM_armv7ve = '7' | 27 | BASE_GOARM:armv7ve = '7' |
| 28 | BASE_GOARM_armv7a = '7' | 28 | BASE_GOARM:armv7a = '7' |
| 29 | BASE_GOARM_armv6 = '6' | 29 | BASE_GOARM:armv6 = '6' |
| 30 | BASE_GOARM_armv5 = '5' | 30 | BASE_GOARM:armv5 = '5' |
| 31 | 31 | ||
| 32 | # Go supports dynamic linking on a limited set of architectures. | 32 | # Go supports dynamic linking on a limited set of architectures. |
| 33 | # See the supportsDynlink function in go/src/cmd/compile/internal/gc/main.go | 33 | # See the supportsDynlink function in go/src/cmd/compile/internal/gc/main.go |
| 34 | GO_DYNLINK = "" | 34 | GO_DYNLINK = "" |
| 35 | GO_DYNLINK_arm = "1" | 35 | GO_DYNLINK:arm = "1" |
| 36 | GO_DYNLINK_aarch64 = "1" | 36 | GO_DYNLINK:aarch64 = "1" |
| 37 | GO_DYNLINK_x86 = "1" | 37 | GO_DYNLINK:x86 = "1" |
| 38 | GO_DYNLINK_x86-64 = "1" | 38 | GO_DYNLINK:x86-64 = "1" |
| 39 | GO_DYNLINK_powerpc64 = "1" | 39 | GO_DYNLINK:powerpc64 = "1" |
| 40 | GO_DYNLINK_powerpc64le = "1" | 40 | GO_DYNLINK:powerpc64le = "1" |
| 41 | GO_DYNLINK_class-native = "" | 41 | GO_DYNLINK:class-native = "" |
| 42 | GO_DYNLINK_class-nativesdk = "" | 42 | GO_DYNLINK:class-nativesdk = "" |
| 43 | 43 | ||
| 44 | # define here because everybody inherits this class | 44 | # define here because everybody inherits this class |
| 45 | # | 45 | # |
| 46 | COMPATIBLE_HOST_linux-gnux32 = "null" | 46 | COMPATIBLE_HOST:linux-gnux32 = "null" |
| 47 | COMPATIBLE_HOST_linux-muslx32 = "null" | 47 | COMPATIBLE_HOST:linux-muslx32 = "null" |
| 48 | COMPATIBLE_HOST_powerpc = "null" | 48 | COMPATIBLE_HOST:powerpc = "null" |
| 49 | COMPATIBLE_HOST_powerpc64 = "null" | 49 | COMPATIBLE_HOST:powerpc64 = "null" |
| 50 | COMPATIBLE_HOST_mipsarchn32 = "null" | 50 | COMPATIBLE_HOST:mipsarchn32 = "null" |
| 51 | 51 | ||
| 52 | ARM_INSTRUCTION_SET_armv4 = "arm" | 52 | ARM_INSTRUCTION_SET:armv4 = "arm" |
| 53 | ARM_INSTRUCTION_SET_armv5 = "arm" | 53 | ARM_INSTRUCTION_SET:armv5 = "arm" |
| 54 | ARM_INSTRUCTION_SET_armv6 = "arm" | 54 | ARM_INSTRUCTION_SET:armv6 = "arm" |
| 55 | 55 | ||
| 56 | TUNE_CCARGS_remove = "-march=mips32r2" | 56 | TUNE_CCARGS:remove = "-march=mips32r2" |
| 57 | SECURITY_NOPIE_CFLAGS ??= "" | 57 | SECURITY_NOPIE_CFLAGS ??= "" |
| 58 | 58 | ||
| 59 | # go can't be built with ccache: | 59 | # go can't be built with ccache: |
diff --git a/meta/classes/gobject-introspection.bbclass b/meta/classes/gobject-introspection.bbclass index 504f75e28d..822a226d5f 100644 --- a/meta/classes/gobject-introspection.bbclass +++ b/meta/classes/gobject-introspection.bbclass | |||
| @@ -15,29 +15,29 @@ GIR_MESON_ENABLE_FLAG ?= 'true' | |||
| 15 | GIR_MESON_DISABLE_FLAG ?= 'false' | 15 | GIR_MESON_DISABLE_FLAG ?= 'false' |
| 16 | 16 | ||
| 17 | # Auto enable/disable based on GI_DATA_ENABLED | 17 | # Auto enable/disable based on GI_DATA_ENABLED |
| 18 | EXTRA_OECONF_prepend_class-target = "${@bb.utils.contains('GI_DATA_ENABLED', 'True', '--enable-introspection', '--disable-introspection', d)} " | 18 | EXTRA_OECONF:prepend:class-target = "${@bb.utils.contains('GI_DATA_ENABLED', 'True', '--enable-introspection', '--disable-introspection', d)} " |
| 19 | EXTRA_OEMESON_prepend_class-target = "-D${GIR_MESON_OPTION}=${@bb.utils.contains('GI_DATA_ENABLED', 'True', '${GIR_MESON_ENABLE_FLAG}', '${GIR_MESON_DISABLE_FLAG}', d)} " | 19 | EXTRA_OEMESON:prepend:class-target = "-D${GIR_MESON_OPTION}=${@bb.utils.contains('GI_DATA_ENABLED', 'True', '${GIR_MESON_ENABLE_FLAG}', '${GIR_MESON_DISABLE_FLAG}', d)} " |
| 20 | 20 | ||
| 21 | # When building native recipes, disable introspection, as it is not necessary, | 21 | # When building native recipes, disable introspection, as it is not necessary, |
| 22 | # pulls in additional dependencies, and makes build times longer | 22 | # pulls in additional dependencies, and makes build times longer |
| 23 | EXTRA_OECONF_prepend_class-native = "--disable-introspection " | 23 | EXTRA_OECONF:prepend:class-native = "--disable-introspection " |
| 24 | EXTRA_OECONF_prepend_class-nativesdk = "--disable-introspection " | 24 | EXTRA_OECONF:prepend:class-nativesdk = "--disable-introspection " |
| 25 | EXTRA_OEMESON_prepend_class-native = "-D${GIR_MESON_OPTION}=${GIR_MESON_DISABLE_FLAG} " | 25 | EXTRA_OEMESON:prepend:class-native = "-D${GIR_MESON_OPTION}=${GIR_MESON_DISABLE_FLAG} " |
| 26 | EXTRA_OEMESON_prepend_class-nativesdk = "-D${GIR_MESON_OPTION}=${GIR_MESON_DISABLE_FLAG} " | 26 | EXTRA_OEMESON:prepend:class-nativesdk = "-D${GIR_MESON_OPTION}=${GIR_MESON_DISABLE_FLAG} " |
| 27 | 27 | ||
| 28 | # Generating introspection data depends on a combination of native and target | 28 | # Generating introspection data depends on a combination of native and target |
| 29 | # introspection tools, and qemu to run the target tools. | 29 | # introspection tools, and qemu to run the target tools. |
| 30 | DEPENDS_append_class-target = " gobject-introspection gobject-introspection-native qemu-native prelink-native" | 30 | DEPENDS:append:class-target = " gobject-introspection gobject-introspection-native qemu-native prelink-native" |
| 31 | 31 | ||
| 32 | # Even though introspection is disabled on -native, gobject-introspection package is still | 32 | # Even though introspection is disabled on -native, gobject-introspection package is still |
| 33 | # needed for m4 macros. | 33 | # needed for m4 macros. |
| 34 | DEPENDS_append_class-native = " gobject-introspection-native" | 34 | DEPENDS:append:class-native = " gobject-introspection-native" |
| 35 | DEPENDS_append_class-nativesdk = " gobject-introspection-native" | 35 | DEPENDS:append:class-nativesdk = " gobject-introspection-native" |
| 36 | 36 | ||
| 37 | # This is used by introspection tools to find .gir includes | 37 | # This is used by introspection tools to find .gir includes |
| 38 | export XDG_DATA_DIRS = "${STAGING_DATADIR}:${STAGING_LIBDIR}" | 38 | export XDG_DATA_DIRS = "${STAGING_DATADIR}:${STAGING_LIBDIR}" |
| 39 | 39 | ||
| 40 | do_configure_prepend_class-target () { | 40 | do_configure:prepend:class-target () { |
| 41 | # introspection.m4 pre-packaged with upstream tarballs does not yet | 41 | # introspection.m4 pre-packaged with upstream tarballs does not yet |
| 42 | # have our fixes | 42 | # have our fixes |
| 43 | mkdir -p ${S}/m4 | 43 | mkdir -p ${S}/m4 |
| @@ -46,8 +46,8 @@ do_configure_prepend_class-target () { | |||
| 46 | 46 | ||
| 47 | # .typelib files are needed at runtime and so they go to the main package (so | 47 | # .typelib files are needed at runtime and so they go to the main package (so |
| 48 | # they'll be together with libraries they support). | 48 | # they'll be together with libraries they support). |
| 49 | FILES_${PN}_append = " ${libdir}/girepository-*/*.typelib" | 49 | FILES:${PN}:append = " ${libdir}/girepository-*/*.typelib" |
| 50 | 50 | ||
| 51 | # .gir files go to dev package, as they're needed for developing (but not for | 51 | # .gir files go to dev package, as they're needed for developing (but not for |
| 52 | # running) things that depends on introspection. | 52 | # running) things that depends on introspection. |
| 53 | FILES_${PN}-dev_append = " ${datadir}/gir-*/*.gir ${libdir}/gir-*/*.gir" | 53 | FILES:${PN}-dev:append = " ${datadir}/gir-*/*.gir ${libdir}/gir-*/*.gir" |
diff --git a/meta/classes/gsettings.bbclass b/meta/classes/gsettings.bbclass index 33afc96a9c..3fa5bd40b3 100644 --- a/meta/classes/gsettings.bbclass +++ b/meta/classes/gsettings.bbclass | |||
| @@ -13,30 +13,30 @@ python __anonymous() { | |||
| 13 | pkg = d.getVar("GSETTINGS_PACKAGE") | 13 | pkg = d.getVar("GSETTINGS_PACKAGE") |
| 14 | if pkg: | 14 | if pkg: |
| 15 | d.appendVar("PACKAGE_WRITE_DEPS", " glib-2.0-native") | 15 | d.appendVar("PACKAGE_WRITE_DEPS", " glib-2.0-native") |
| 16 | d.appendVar("RDEPENDS_" + pkg, " ${MLPREFIX}glib-2.0-utils") | 16 | d.appendVar("RDEPENDS:" + pkg, " ${MLPREFIX}glib-2.0-utils") |
| 17 | d.appendVar("FILES_" + pkg, " ${datadir}/glib-2.0/schemas") | 17 | d.appendVar("FILES:" + pkg, " ${datadir}/glib-2.0/schemas") |
| 18 | } | 18 | } |
| 19 | 19 | ||
| 20 | gsettings_postinstrm () { | 20 | gsettings_postinstrm () { |
| 21 | glib-compile-schemas $D${datadir}/glib-2.0/schemas | 21 | glib-compile-schemas $D${datadir}/glib-2.0/schemas |
| 22 | } | 22 | } |
| 23 | 23 | ||
| 24 | python populate_packages_append () { | 24 | python populate_packages:append () { |
| 25 | pkg = d.getVar('GSETTINGS_PACKAGE') | 25 | pkg = d.getVar('GSETTINGS_PACKAGE') |
| 26 | if pkg: | 26 | if pkg: |
| 27 | bb.note("adding gsettings postinst scripts to %s" % pkg) | 27 | bb.note("adding gsettings postinst scripts to %s" % pkg) |
| 28 | 28 | ||
| 29 | postinst = d.getVar('pkg_postinst_%s' % pkg) or d.getVar('pkg_postinst') | 29 | postinst = d.getVar('pkg_postinst:%s' % pkg) or d.getVar('pkg_postinst') |
| 30 | if not postinst: | 30 | if not postinst: |
| 31 | postinst = '#!/bin/sh\n' | 31 | postinst = '#!/bin/sh\n' |
| 32 | postinst += d.getVar('gsettings_postinstrm') | 32 | postinst += d.getVar('gsettings_postinstrm') |
| 33 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 33 | d.setVar('pkg_postinst:%s' % pkg, postinst) |
| 34 | 34 | ||
| 35 | bb.note("adding gsettings postrm scripts to %s" % pkg) | 35 | bb.note("adding gsettings postrm scripts to %s" % pkg) |
| 36 | 36 | ||
| 37 | postrm = d.getVar('pkg_postrm_%s' % pkg) or d.getVar('pkg_postrm') | 37 | postrm = d.getVar('pkg_postrm:%s' % pkg) or d.getVar('pkg_postrm') |
| 38 | if not postrm: | 38 | if not postrm: |
| 39 | postrm = '#!/bin/sh\n' | 39 | postrm = '#!/bin/sh\n' |
| 40 | postrm += d.getVar('gsettings_postinstrm') | 40 | postrm += d.getVar('gsettings_postinstrm') |
| 41 | d.setVar('pkg_postrm_%s' % pkg, postrm) | 41 | d.setVar('pkg_postrm:%s' % pkg, postrm) |
| 42 | } | 42 | } |
diff --git a/meta/classes/gtk-doc.bbclass b/meta/classes/gtk-doc.bbclass index ef99e63faf..7149bc06c0 100644 --- a/meta/classes/gtk-doc.bbclass +++ b/meta/classes/gtk-doc.bbclass | |||
| @@ -7,7 +7,7 @@ | |||
| 7 | # | 7 | # |
| 8 | # It should be used in recipes to determine whether gtk-doc based documentation should be built, | 8 | # It should be used in recipes to determine whether gtk-doc based documentation should be built, |
| 9 | # so that qemu use can be avoided when necessary. | 9 | # so that qemu use can be avoided when necessary. |
| 10 | GTKDOC_ENABLED_class-native = "False" | 10 | GTKDOC_ENABLED:class-native = "False" |
| 11 | GTKDOC_ENABLED ?= "${@bb.utils.contains('DISTRO_FEATURES', 'api-documentation', \ | 11 | GTKDOC_ENABLED ?= "${@bb.utils.contains('DISTRO_FEATURES', 'api-documentation', \ |
| 12 | bb.utils.contains('MACHINE_FEATURES', 'qemu-usermode', 'True', 'False', d), 'False', d)}" | 12 | bb.utils.contains('MACHINE_FEATURES', 'qemu-usermode', 'True', 'False', d), 'False', d)}" |
| 13 | 13 | ||
| @@ -19,20 +19,20 @@ GTKDOC_MESON_ENABLE_FLAG ?= 'true' | |||
| 19 | GTKDOC_MESON_DISABLE_FLAG ?= 'false' | 19 | GTKDOC_MESON_DISABLE_FLAG ?= 'false' |
| 20 | 20 | ||
| 21 | # Auto enable/disable based on GTKDOC_ENABLED | 21 | # Auto enable/disable based on GTKDOC_ENABLED |
| 22 | EXTRA_OECONF_prepend_class-target = "${@bb.utils.contains('GTKDOC_ENABLED', 'True', '--enable-gtk-doc --enable-gtk-doc-html --disable-gtk-doc-pdf', \ | 22 | EXTRA_OECONF:prepend:class-target = "${@bb.utils.contains('GTKDOC_ENABLED', 'True', '--enable-gtk-doc --enable-gtk-doc-html --disable-gtk-doc-pdf', \ |
| 23 | '--disable-gtk-doc', d)} " | 23 | '--disable-gtk-doc', d)} " |
| 24 | EXTRA_OEMESON_prepend_class-target = "-D${GTKDOC_MESON_OPTION}=${@bb.utils.contains('GTKDOC_ENABLED', 'True', '${GTKDOC_MESON_ENABLE_FLAG}', '${GTKDOC_MESON_DISABLE_FLAG}', d)} " | 24 | EXTRA_OEMESON:prepend:class-target = "-D${GTKDOC_MESON_OPTION}=${@bb.utils.contains('GTKDOC_ENABLED', 'True', '${GTKDOC_MESON_ENABLE_FLAG}', '${GTKDOC_MESON_DISABLE_FLAG}', d)} " |
| 25 | 25 | ||
| 26 | # When building native recipes, disable gtkdoc, as it is not necessary, | 26 | # When building native recipes, disable gtkdoc, as it is not necessary, |
| 27 | # pulls in additional dependencies, and makes build times longer | 27 | # pulls in additional dependencies, and makes build times longer |
| 28 | EXTRA_OECONF_prepend_class-native = "--disable-gtk-doc " | 28 | EXTRA_OECONF:prepend:class-native = "--disable-gtk-doc " |
| 29 | EXTRA_OECONF_prepend_class-nativesdk = "--disable-gtk-doc " | 29 | EXTRA_OECONF:prepend:class-nativesdk = "--disable-gtk-doc " |
| 30 | EXTRA_OEMESON_prepend_class-native = "-D${GTKDOC_MESON_OPTION}=${GTKDOC_MESON_DISABLE_FLAG} " | 30 | EXTRA_OEMESON:prepend:class-native = "-D${GTKDOC_MESON_OPTION}=${GTKDOC_MESON_DISABLE_FLAG} " |
| 31 | EXTRA_OEMESON_prepend_class-nativesdk = "-D${GTKDOC_MESON_OPTION}=${GTKDOC_MESON_DISABLE_FLAG} " | 31 | EXTRA_OEMESON:prepend:class-nativesdk = "-D${GTKDOC_MESON_OPTION}=${GTKDOC_MESON_DISABLE_FLAG} " |
| 32 | 32 | ||
| 33 | # Even though gtkdoc is disabled on -native, gtk-doc package is still | 33 | # Even though gtkdoc is disabled on -native, gtk-doc package is still |
| 34 | # needed for m4 macros. | 34 | # needed for m4 macros. |
| 35 | DEPENDS_append = " gtk-doc-native" | 35 | DEPENDS:append = " gtk-doc-native" |
| 36 | 36 | ||
| 37 | # The documentation directory, where the infrastructure will be copied. | 37 | # The documentation directory, where the infrastructure will be copied. |
| 38 | # gtkdocize has a default of "." so to handle out-of-tree builds set this to $S. | 38 | # gtkdocize has a default of "." so to handle out-of-tree builds set this to $S. |
| @@ -41,15 +41,15 @@ GTKDOC_DOCDIR ?= "${S}" | |||
| 41 | export STAGING_DIR_HOST | 41 | export STAGING_DIR_HOST |
| 42 | 42 | ||
| 43 | inherit python3native pkgconfig qemu | 43 | inherit python3native pkgconfig qemu |
| 44 | DEPENDS_append = "${@' qemu-native' if d.getVar('GTKDOC_ENABLED') == 'True' else ''}" | 44 | DEPENDS:append = "${@' qemu-native' if d.getVar('GTKDOC_ENABLED') == 'True' else ''}" |
| 45 | 45 | ||
| 46 | do_configure_prepend () { | 46 | do_configure:prepend () { |
| 47 | # Need to use ||true as this is only needed if configure.ac both exists | 47 | # Need to use ||true as this is only needed if configure.ac both exists |
| 48 | # and uses GTK_DOC_CHECK. | 48 | # and uses GTK_DOC_CHECK. |
| 49 | gtkdocize --srcdir ${S} --docdir ${GTKDOC_DOCDIR} || true | 49 | gtkdocize --srcdir ${S} --docdir ${GTKDOC_DOCDIR} || true |
| 50 | } | 50 | } |
| 51 | 51 | ||
| 52 | do_compile_prepend_class-target () { | 52 | do_compile:prepend:class-target () { |
| 53 | if [ ${GTKDOC_ENABLED} = True ]; then | 53 | if [ ${GTKDOC_ENABLED} = True ]; then |
| 54 | # Write out a qemu wrapper that will be given to gtkdoc-scangobj so that it | 54 | # Write out a qemu wrapper that will be given to gtkdoc-scangobj so that it |
| 55 | # can run target helper binaries through that. | 55 | # can run target helper binaries through that. |
diff --git a/meta/classes/gtk-icon-cache.bbclass b/meta/classes/gtk-icon-cache.bbclass index 340a283851..0248ba285e 100644 --- a/meta/classes/gtk-icon-cache.bbclass +++ b/meta/classes/gtk-icon-cache.bbclass | |||
| @@ -1,4 +1,4 @@ | |||
| 1 | FILES_${PN} += "${datadir}/icons/hicolor" | 1 | FILES:${PN} += "${datadir}/icons/hicolor" |
| 2 | 2 | ||
| 3 | #gtk+3 reqiure GTK3DISTROFEATURES, DEPENDS on it make all the | 3 | #gtk+3 reqiure GTK3DISTROFEATURES, DEPENDS on it make all the |
| 4 | #recipes inherit this class require GTK3DISTROFEATURES | 4 | #recipes inherit this class require GTK3DISTROFEATURES |
| @@ -45,7 +45,7 @@ else | |||
| 45 | fi | 45 | fi |
| 46 | } | 46 | } |
| 47 | 47 | ||
| 48 | python populate_packages_append () { | 48 | python populate_packages:append () { |
| 49 | packages = d.getVar('PACKAGES').split() | 49 | packages = d.getVar('PACKAGES').split() |
| 50 | pkgdest = d.getVar('PKGDEST') | 50 | pkgdest = d.getVar('PKGDEST') |
| 51 | 51 | ||
| @@ -56,29 +56,29 @@ python populate_packages_append () { | |||
| 56 | 56 | ||
| 57 | bb.note("adding hicolor-icon-theme dependency to %s" % pkg) | 57 | bb.note("adding hicolor-icon-theme dependency to %s" % pkg) |
| 58 | rdepends = ' ' + d.getVar('MLPREFIX', False) + "hicolor-icon-theme" | 58 | rdepends = ' ' + d.getVar('MLPREFIX', False) + "hicolor-icon-theme" |
| 59 | d.appendVar('RDEPENDS_%s' % pkg, rdepends) | 59 | d.appendVar('RDEPENDS:%s' % pkg, rdepends) |
| 60 | 60 | ||
| 61 | #gtk_icon_cache_postinst depend on gdk-pixbuf and gtk+3 | 61 | #gtk_icon_cache_postinst depend on gdk-pixbuf and gtk+3 |
| 62 | bb.note("adding gdk-pixbuf dependency to %s" % pkg) | 62 | bb.note("adding gdk-pixbuf dependency to %s" % pkg) |
| 63 | rdepends = ' ' + d.getVar('MLPREFIX', False) + "gdk-pixbuf" | 63 | rdepends = ' ' + d.getVar('MLPREFIX', False) + "gdk-pixbuf" |
| 64 | d.appendVar('RDEPENDS_%s' % pkg, rdepends) | 64 | d.appendVar('RDEPENDS:%s' % pkg, rdepends) |
| 65 | 65 | ||
| 66 | bb.note("adding gtk+3 dependency to %s" % pkg) | 66 | bb.note("adding gtk+3 dependency to %s" % pkg) |
| 67 | rdepends = ' ' + d.getVar('MLPREFIX', False) + "gtk+3" | 67 | rdepends = ' ' + d.getVar('MLPREFIX', False) + "gtk+3" |
| 68 | d.appendVar('RDEPENDS_%s' % pkg, rdepends) | 68 | d.appendVar('RDEPENDS:%s' % pkg, rdepends) |
| 69 | 69 | ||
| 70 | bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg) | 70 | bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg) |
| 71 | 71 | ||
| 72 | postinst = d.getVar('pkg_postinst_%s' % pkg) | 72 | postinst = d.getVar('pkg_postinst:%s' % pkg) |
| 73 | if not postinst: | 73 | if not postinst: |
| 74 | postinst = '#!/bin/sh\n' | 74 | postinst = '#!/bin/sh\n' |
| 75 | postinst += d.getVar('gtk_icon_cache_postinst') | 75 | postinst += d.getVar('gtk_icon_cache_postinst') |
| 76 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 76 | d.setVar('pkg_postinst:%s' % pkg, postinst) |
| 77 | 77 | ||
| 78 | postrm = d.getVar('pkg_postrm_%s' % pkg) | 78 | postrm = d.getVar('pkg_postrm:%s' % pkg) |
| 79 | if not postrm: | 79 | if not postrm: |
| 80 | postrm = '#!/bin/sh\n' | 80 | postrm = '#!/bin/sh\n' |
| 81 | postrm += d.getVar('gtk_icon_cache_postrm') | 81 | postrm += d.getVar('gtk_icon_cache_postrm') |
| 82 | d.setVar('pkg_postrm_%s' % pkg, postrm) | 82 | d.setVar('pkg_postrm:%s' % pkg, postrm) |
| 83 | } | 83 | } |
| 84 | 84 | ||
diff --git a/meta/classes/gtk-immodules-cache.bbclass b/meta/classes/gtk-immodules-cache.bbclass index 8e783fb493..2107517540 100644 --- a/meta/classes/gtk-immodules-cache.bbclass +++ b/meta/classes/gtk-immodules-cache.bbclass | |||
| @@ -47,23 +47,23 @@ else | |||
| 47 | fi | 47 | fi |
| 48 | } | 48 | } |
| 49 | 49 | ||
| 50 | python populate_packages_append () { | 50 | python populate_packages:append () { |
| 51 | gtkimmodules_pkgs = d.getVar('GTKIMMODULES_PACKAGES').split() | 51 | gtkimmodules_pkgs = d.getVar('GTKIMMODULES_PACKAGES').split() |
| 52 | 52 | ||
| 53 | for pkg in gtkimmodules_pkgs: | 53 | for pkg in gtkimmodules_pkgs: |
| 54 | bb.note("adding gtk-immodule-cache postinst and postrm scripts to %s" % pkg) | 54 | bb.note("adding gtk-immodule-cache postinst and postrm scripts to %s" % pkg) |
| 55 | 55 | ||
| 56 | postinst = d.getVar('pkg_postinst_%s' % pkg) | 56 | postinst = d.getVar('pkg_postinst:%s' % pkg) |
| 57 | if not postinst: | 57 | if not postinst: |
| 58 | postinst = '#!/bin/sh\n' | 58 | postinst = '#!/bin/sh\n' |
| 59 | postinst += d.getVar('gtk_immodule_cache_postinst') | 59 | postinst += d.getVar('gtk_immodule_cache_postinst') |
| 60 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 60 | d.setVar('pkg_postinst:%s' % pkg, postinst) |
| 61 | 61 | ||
| 62 | postrm = d.getVar('pkg_postrm_%s' % pkg) | 62 | postrm = d.getVar('pkg_postrm:%s' % pkg) |
| 63 | if not postrm: | 63 | if not postrm: |
| 64 | postrm = '#!/bin/sh\n' | 64 | postrm = '#!/bin/sh\n' |
| 65 | postrm += d.getVar('gtk_immodule_cache_postrm') | 65 | postrm += d.getVar('gtk_immodule_cache_postrm') |
| 66 | d.setVar('pkg_postrm_%s' % pkg, postrm) | 66 | d.setVar('pkg_postrm:%s' % pkg, postrm) |
| 67 | } | 67 | } |
| 68 | 68 | ||
| 69 | python __anonymous() { | 69 | python __anonymous() { |
diff --git a/meta/classes/icecc.bbclass b/meta/classes/icecc.bbclass index 089d52732f..794e9930ad 100644 --- a/meta/classes/icecc.bbclass +++ b/meta/classes/icecc.bbclass | |||
| @@ -47,7 +47,7 @@ HOSTTOOLS_NONFATAL += "icecc patchelf" | |||
| 47 | # | 47 | # |
| 48 | # A useful thing to do for testing Icecream changes locally is to add a | 48 | # A useful thing to do for testing Icecream changes locally is to add a |
| 49 | # subversion in local.conf: | 49 | # subversion in local.conf: |
| 50 | # ICECC_ENV_VERSION_append = "-my-ver-1" | 50 | # ICECC_ENV_VERSION:append = "-my-ver-1" |
| 51 | ICECC_ENV_VERSION = "2" | 51 | ICECC_ENV_VERSION = "2" |
| 52 | 52 | ||
| 53 | # Default to disabling the caret workaround, If set to "1" in local.conf, icecc | 53 | # Default to disabling the caret workaround, If set to "1" in local.conf, icecc |
| @@ -105,7 +105,7 @@ def get_icecc_dep(d): | |||
| 105 | return "icecc-create-env-native" | 105 | return "icecc-create-env-native" |
| 106 | return "" | 106 | return "" |
| 107 | 107 | ||
| 108 | DEPENDS_prepend = "${@get_icecc_dep(d)} " | 108 | DEPENDS:prepend = "${@get_icecc_dep(d)} " |
| 109 | 109 | ||
| 110 | get_cross_kernel_cc[vardepsexclude] += "KERNEL_CC" | 110 | get_cross_kernel_cc[vardepsexclude] += "KERNEL_CC" |
| 111 | def get_cross_kernel_cc(bb,d): | 111 | def get_cross_kernel_cc(bb,d): |
| @@ -428,28 +428,28 @@ set_icecc_env() { | |||
| 428 | bbnote "Using icecc tarball: $ICECC_VERSION" | 428 | bbnote "Using icecc tarball: $ICECC_VERSION" |
| 429 | } | 429 | } |
| 430 | 430 | ||
| 431 | do_configure_prepend() { | 431 | do_configure:prepend() { |
| 432 | set_icecc_env | 432 | set_icecc_env |
| 433 | } | 433 | } |
| 434 | 434 | ||
| 435 | do_compile_prepend() { | 435 | do_compile:prepend() { |
| 436 | set_icecc_env | 436 | set_icecc_env |
| 437 | } | 437 | } |
| 438 | 438 | ||
| 439 | do_compile_kernelmodules_prepend() { | 439 | do_compile_kernelmodules:prepend() { |
| 440 | set_icecc_env | 440 | set_icecc_env |
| 441 | } | 441 | } |
| 442 | 442 | ||
| 443 | do_install_prepend() { | 443 | do_install:prepend() { |
| 444 | set_icecc_env | 444 | set_icecc_env |
| 445 | } | 445 | } |
| 446 | 446 | ||
| 447 | # IceCream is not (currently) supported in the extensible SDK | 447 | # IceCream is not (currently) supported in the extensible SDK |
| 448 | ICECC_SDK_HOST_TASK = "nativesdk-icecc-toolchain" | 448 | ICECC_SDK_HOST_TASK = "nativesdk-icecc-toolchain" |
| 449 | ICECC_SDK_HOST_TASK_task-populate-sdk-ext = "" | 449 | ICECC_SDK_HOST_TASK:task-populate-sdk-ext = "" |
| 450 | 450 | ||
| 451 | # Don't include IceCream in uninative tarball | 451 | # Don't include IceCream in uninative tarball |
| 452 | ICECC_SDK_HOST_TASK_pn-uninative-tarball = "" | 452 | ICECC_SDK_HOST_TASK:pn-uninative-tarball = "" |
| 453 | 453 | ||
| 454 | # Add the toolchain scripts to the SDK | 454 | # Add the toolchain scripts to the SDK |
| 455 | TOOLCHAIN_HOST_TASK_append = " ${ICECC_SDK_HOST_TASK}" | 455 | TOOLCHAIN_HOST_TASK:append = " ${ICECC_SDK_HOST_TASK}" |
diff --git a/meta/classes/image-combined-dbg.bbclass b/meta/classes/image-combined-dbg.bbclass index f4772f7ea1..e5dc61f857 100644 --- a/meta/classes/image-combined-dbg.bbclass +++ b/meta/classes/image-combined-dbg.bbclass | |||
| @@ -1,4 +1,4 @@ | |||
| 1 | IMAGE_PREPROCESS_COMMAND_append = " combine_dbg_image; " | 1 | IMAGE_PREPROCESS_COMMAND:append = " combine_dbg_image; " |
| 2 | 2 | ||
| 3 | combine_dbg_image () { | 3 | combine_dbg_image () { |
| 4 | if [ "${IMAGE_GEN_DEBUGFS}" = "1" -a -e ${IMAGE_ROOTFS}-dbg ]; then | 4 | if [ "${IMAGE_GEN_DEBUGFS}" = "1" -a -e ${IMAGE_ROOTFS}-dbg ]; then |
diff --git a/meta/classes/image-prelink.bbclass b/meta/classes/image-prelink.bbclass index ebf6e6d7ee..0da094a551 100644 --- a/meta/classes/image-prelink.bbclass +++ b/meta/classes/image-prelink.bbclass | |||
| @@ -1,6 +1,6 @@ | |||
| 1 | do_rootfs[depends] += "prelink-native:do_populate_sysroot" | 1 | do_rootfs[depends] += "prelink-native:do_populate_sysroot" |
| 2 | 2 | ||
| 3 | IMAGE_PREPROCESS_COMMAND_append_libc-glibc = " prelink_setup; prelink_image; " | 3 | IMAGE_PREPROCESS_COMMAND:append:libc-glibc = " prelink_setup; prelink_image; " |
| 4 | 4 | ||
| 5 | python prelink_setup () { | 5 | python prelink_setup () { |
| 6 | oe.utils.write_ld_so_conf(d) | 6 | oe.utils.write_ld_so_conf(d) |
diff --git a/meta/classes/image.bbclass b/meta/classes/image.bbclass index 67603d958d..45dd56b544 100644 --- a/meta/classes/image.bbclass +++ b/meta/classes/image.bbclass | |||
| @@ -26,7 +26,7 @@ PACKAGES = "" | |||
| 26 | DEPENDS += "${@' '.join(["%s-qemuwrapper-cross" % m for m in d.getVar("MULTILIB_VARIANTS").split()])} qemuwrapper-cross depmodwrapper-cross cross-localedef-native" | 26 | DEPENDS += "${@' '.join(["%s-qemuwrapper-cross" % m for m in d.getVar("MULTILIB_VARIANTS").split()])} qemuwrapper-cross depmodwrapper-cross cross-localedef-native" |
| 27 | RDEPENDS += "${PACKAGE_INSTALL} ${LINGUAS_INSTALL} ${IMAGE_INSTALL_DEBUGFS}" | 27 | RDEPENDS += "${PACKAGE_INSTALL} ${LINGUAS_INSTALL} ${IMAGE_INSTALL_DEBUGFS}" |
| 28 | RRECOMMENDS += "${PACKAGE_INSTALL_ATTEMPTONLY}" | 28 | RRECOMMENDS += "${PACKAGE_INSTALL_ATTEMPTONLY}" |
| 29 | PATH_prepend = "${@":".join(all_multilib_tune_values(d, 'STAGING_BINDIR_CROSS').split())}:" | 29 | PATH:prepend = "${@":".join(all_multilib_tune_values(d, 'STAGING_BINDIR_CROSS').split())}:" |
| 30 | 30 | ||
| 31 | INHIBIT_DEFAULT_DEPS = "1" | 31 | INHIBIT_DEFAULT_DEPS = "1" |
| 32 | 32 | ||
| @@ -92,7 +92,7 @@ PID = "${@os.getpid()}" | |||
| 92 | PACKAGE_ARCH = "${MACHINE_ARCH}" | 92 | PACKAGE_ARCH = "${MACHINE_ARCH}" |
| 93 | 93 | ||
| 94 | LDCONFIGDEPEND ?= "ldconfig-native:do_populate_sysroot" | 94 | LDCONFIGDEPEND ?= "ldconfig-native:do_populate_sysroot" |
| 95 | LDCONFIGDEPEND_libc-musl = "" | 95 | LDCONFIGDEPEND:libc-musl = "" |
| 96 | 96 | ||
| 97 | # This is needed to have depmod data in PKGDATA_DIR, | 97 | # This is needed to have depmod data in PKGDATA_DIR, |
| 98 | # but if you're building small initramfs image | 98 | # but if you're building small initramfs image |
| @@ -273,7 +273,7 @@ fakeroot python do_image_complete () { | |||
| 273 | } | 273 | } |
| 274 | do_image_complete[dirs] = "${TOPDIR}" | 274 | do_image_complete[dirs] = "${TOPDIR}" |
| 275 | SSTATETASKS += "do_image_complete" | 275 | SSTATETASKS += "do_image_complete" |
| 276 | SSTATE_SKIP_CREATION_task-image-complete = '1' | 276 | SSTATE_SKIP_CREATION:task-image-complete = '1' |
| 277 | do_image_complete[sstate-inputdirs] = "${IMGDEPLOYDIR}" | 277 | do_image_complete[sstate-inputdirs] = "${IMGDEPLOYDIR}" |
| 278 | do_image_complete[sstate-outputdirs] = "${DEPLOY_DIR_IMAGE}" | 278 | do_image_complete[sstate-outputdirs] = "${DEPLOY_DIR_IMAGE}" |
| 279 | do_image_complete[stamp-extra-info] = "${MACHINE_ARCH}" | 279 | do_image_complete[stamp-extra-info] = "${MACHINE_ARCH}" |
| @@ -314,7 +314,7 @@ fakeroot python do_image_qa () { | |||
| 314 | addtask do_image_qa after do_rootfs before do_image | 314 | addtask do_image_qa after do_rootfs before do_image |
| 315 | 315 | ||
| 316 | SSTATETASKS += "do_image_qa" | 316 | SSTATETASKS += "do_image_qa" |
| 317 | SSTATE_SKIP_CREATION_task-image-qa = '1' | 317 | SSTATE_SKIP_CREATION:task-image-qa = '1' |
| 318 | do_image_qa[sstate-inputdirs] = "" | 318 | do_image_qa[sstate-inputdirs] = "" |
| 319 | do_image_qa[sstate-outputdirs] = "" | 319 | do_image_qa[sstate-outputdirs] = "" |
| 320 | python do_image_qa_setscene () { | 320 | python do_image_qa_setscene () { |
| @@ -431,21 +431,21 @@ python () { | |||
| 431 | localdata.delVar('DATETIME') | 431 | localdata.delVar('DATETIME') |
| 432 | localdata.delVar('DATE') | 432 | localdata.delVar('DATE') |
| 433 | localdata.delVar('TMPDIR') | 433 | localdata.delVar('TMPDIR') |
| 434 | vardepsexclude = (d.getVarFlag('IMAGE_CMD_' + realt, 'vardepsexclude', True) or '').split() | 434 | vardepsexclude = (d.getVarFlag('IMAGE_CMD:' + realt, 'vardepsexclude', True) or '').split() |
| 435 | for dep in vardepsexclude: | 435 | for dep in vardepsexclude: |
| 436 | localdata.delVar(dep) | 436 | localdata.delVar(dep) |
| 437 | 437 | ||
| 438 | image_cmd = localdata.getVar("IMAGE_CMD") | 438 | image_cmd = localdata.getVar("IMAGE_CMD") |
| 439 | vardeps.add('IMAGE_CMD_' + realt) | 439 | vardeps.add('IMAGE_CMD:' + realt) |
| 440 | if image_cmd: | 440 | if image_cmd: |
| 441 | cmds.append("\t" + image_cmd) | 441 | cmds.append("\t" + image_cmd) |
| 442 | else: | 442 | else: |
| 443 | bb.fatal("No IMAGE_CMD defined for IMAGE_FSTYPES entry '%s' - possibly invalid type name or missing support class" % t) | 443 | bb.fatal("No IMAGE_CMD defined for IMAGE_FSTYPES entry '%s' - possibly invalid type name or missing support class" % t) |
| 444 | cmds.append(localdata.expand("\tcd ${IMGDEPLOYDIR}")) | 444 | cmds.append(localdata.expand("\tcd ${IMGDEPLOYDIR}")) |
| 445 | 445 | ||
| 446 | # Since a copy of IMAGE_CMD_xxx will be inlined within do_image_xxx, | 446 | # Since a copy of IMAGE_CMD:xxx will be inlined within do_image_xxx, |
| 447 | # prevent a redundant copy of IMAGE_CMD_xxx being emitted as a function. | 447 | # prevent a redundant copy of IMAGE_CMD:xxx being emitted as a function. |
| 448 | d.delVarFlag('IMAGE_CMD_' + realt, 'func') | 448 | d.delVarFlag('IMAGE_CMD:' + realt, 'func') |
| 449 | 449 | ||
| 450 | rm_tmp_images = set() | 450 | rm_tmp_images = set() |
| 451 | def gen_conversion_cmds(bt): | 451 | def gen_conversion_cmds(bt): |
| @@ -667,6 +667,6 @@ systemd_preset_all () { | |||
| 667 | fi | 667 | fi |
| 668 | } | 668 | } |
| 669 | 669 | ||
| 670 | IMAGE_PREPROCESS_COMMAND_append = " ${@ 'systemd_preset_all;' if bb.utils.contains('DISTRO_FEATURES', 'systemd', True, False, d) and not bb.utils.contains('IMAGE_FEATURES', 'stateless-rootfs', True, False, d) else ''} reproducible_final_image_task; " | 670 | IMAGE_PREPROCESS_COMMAND:append = " ${@ 'systemd_preset_all;' if bb.utils.contains('DISTRO_FEATURES', 'systemd', True, False, d) and not bb.utils.contains('IMAGE_FEATURES', 'stateless-rootfs', True, False, d) else ''} reproducible_final_image_task; " |
| 671 | 671 | ||
| 672 | CVE_PRODUCT = "" | 672 | CVE_PRODUCT = "" |
diff --git a/meta/classes/image_types.bbclass b/meta/classes/image_types.bbclass index 6b28cdbb3c..3620931224 100644 --- a/meta/classes/image_types.bbclass +++ b/meta/classes/image_types.bbclass | |||
| @@ -56,9 +56,9 @@ ZIP_COMPRESSION_LEVEL ?= "-9" | |||
| 56 | ZSTD_COMPRESSION_LEVEL ?= "-3" | 56 | ZSTD_COMPRESSION_LEVEL ?= "-3" |
| 57 | 57 | ||
| 58 | JFFS2_SUM_EXTRA_ARGS ?= "" | 58 | JFFS2_SUM_EXTRA_ARGS ?= "" |
| 59 | IMAGE_CMD_jffs2 = "mkfs.jffs2 --root=${IMAGE_ROOTFS} --faketime --output=${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.jffs2 ${EXTRA_IMAGECMD}" | 59 | IMAGE_CMD:jffs2 = "mkfs.jffs2 --root=${IMAGE_ROOTFS} --faketime --output=${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.jffs2 ${EXTRA_IMAGECMD}" |
| 60 | 60 | ||
| 61 | IMAGE_CMD_cramfs = "mkfs.cramfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.cramfs ${EXTRA_IMAGECMD}" | 61 | IMAGE_CMD:cramfs = "mkfs.cramfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.cramfs ${EXTRA_IMAGECMD}" |
| 62 | 62 | ||
| 63 | oe_mkext234fs () { | 63 | oe_mkext234fs () { |
| 64 | fstype=$1 | 64 | fstype=$1 |
| @@ -88,12 +88,12 @@ oe_mkext234fs () { | |||
| 88 | fsck.$fstype -pvfD ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.$fstype || [ $? -le 3 ] | 88 | fsck.$fstype -pvfD ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.$fstype || [ $? -le 3 ] |
| 89 | } | 89 | } |
| 90 | 90 | ||
| 91 | IMAGE_CMD_ext2 = "oe_mkext234fs ext2 ${EXTRA_IMAGECMD}" | 91 | IMAGE_CMD:ext2 = "oe_mkext234fs ext2 ${EXTRA_IMAGECMD}" |
| 92 | IMAGE_CMD_ext3 = "oe_mkext234fs ext3 ${EXTRA_IMAGECMD}" | 92 | IMAGE_CMD:ext3 = "oe_mkext234fs ext3 ${EXTRA_IMAGECMD}" |
| 93 | IMAGE_CMD_ext4 = "oe_mkext234fs ext4 ${EXTRA_IMAGECMD}" | 93 | IMAGE_CMD:ext4 = "oe_mkext234fs ext4 ${EXTRA_IMAGECMD}" |
| 94 | 94 | ||
| 95 | MIN_BTRFS_SIZE ?= "16384" | 95 | MIN_BTRFS_SIZE ?= "16384" |
| 96 | IMAGE_CMD_btrfs () { | 96 | IMAGE_CMD:btrfs () { |
| 97 | size=${ROOTFS_SIZE} | 97 | size=${ROOTFS_SIZE} |
| 98 | if [ ${size} -lt ${MIN_BTRFS_SIZE} ] ; then | 98 | if [ ${size} -lt ${MIN_BTRFS_SIZE} ] ; then |
| 99 | size=${MIN_BTRFS_SIZE} | 99 | size=${MIN_BTRFS_SIZE} |
| @@ -103,22 +103,22 @@ IMAGE_CMD_btrfs () { | |||
| 103 | mkfs.btrfs ${EXTRA_IMAGECMD} -r ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.btrfs | 103 | mkfs.btrfs ${EXTRA_IMAGECMD} -r ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.btrfs |
| 104 | } | 104 | } |
| 105 | 105 | ||
| 106 | IMAGE_CMD_squashfs = "mksquashfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.squashfs ${EXTRA_IMAGECMD} -noappend" | 106 | IMAGE_CMD:squashfs = "mksquashfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.squashfs ${EXTRA_IMAGECMD} -noappend" |
| 107 | IMAGE_CMD_squashfs-xz = "mksquashfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.squashfs-xz ${EXTRA_IMAGECMD} -noappend -comp xz" | 107 | IMAGE_CMD:squashfs-xz = "mksquashfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.squashfs-xz ${EXTRA_IMAGECMD} -noappend -comp xz" |
| 108 | IMAGE_CMD_squashfs-lzo = "mksquashfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.squashfs-lzo ${EXTRA_IMAGECMD} -noappend -comp lzo" | 108 | IMAGE_CMD:squashfs-lzo = "mksquashfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.squashfs-lzo ${EXTRA_IMAGECMD} -noappend -comp lzo" |
| 109 | IMAGE_CMD_squashfs-lz4 = "mksquashfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.squashfs-lz4 ${EXTRA_IMAGECMD} -noappend -comp lz4" | 109 | IMAGE_CMD:squashfs-lz4 = "mksquashfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.squashfs-lz4 ${EXTRA_IMAGECMD} -noappend -comp lz4" |
| 110 | 110 | ||
| 111 | IMAGE_CMD_erofs = "mkfs.erofs ${EXTRA_IMAGECMD} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.erofs ${IMAGE_ROOTFS}" | 111 | IMAGE_CMD:erofs = "mkfs.erofs ${EXTRA_IMAGECMD} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.erofs ${IMAGE_ROOTFS}" |
| 112 | IMAGE_CMD_erofs-lz4 = "mkfs.erofs -zlz4 ${EXTRA_IMAGECMD} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.erofs-lz4 ${IMAGE_ROOTFS}" | 112 | IMAGE_CMD:erofs-lz4 = "mkfs.erofs -zlz4 ${EXTRA_IMAGECMD} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.erofs-lz4 ${IMAGE_ROOTFS}" |
| 113 | IMAGE_CMD_erofs-lz4hc = "mkfs.erofs -zlz4hc ${EXTRA_IMAGECMD} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.erofs-lz4hc ${IMAGE_ROOTFS}" | 113 | IMAGE_CMD:erofs-lz4hc = "mkfs.erofs -zlz4hc ${EXTRA_IMAGECMD} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.erofs-lz4hc ${IMAGE_ROOTFS}" |
| 114 | 114 | ||
| 115 | 115 | ||
| 116 | IMAGE_CMD_TAR ?= "tar" | 116 | IMAGE_CMD_TAR ?= "tar" |
| 117 | # ignore return code 1 "file changed as we read it" as other tasks(e.g. do_image_wic) may be hardlinking rootfs | 117 | # ignore return code 1 "file changed as we read it" as other tasks(e.g. do_image_wic) may be hardlinking rootfs |
| 118 | IMAGE_CMD_tar = "${IMAGE_CMD_TAR} --sort=name --format=posix --numeric-owner -cf ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.tar -C ${IMAGE_ROOTFS} . || [ $? -eq 1 ]" | 118 | IMAGE_CMD:tar = "${IMAGE_CMD_TAR} --sort=name --format=posix --numeric-owner -cf ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.tar -C ${IMAGE_ROOTFS} . || [ $? -eq 1 ]" |
| 119 | 119 | ||
| 120 | do_image_cpio[cleandirs] += "${WORKDIR}/cpio_append" | 120 | do_image_cpio[cleandirs] += "${WORKDIR}/cpio_append" |
| 121 | IMAGE_CMD_cpio () { | 121 | IMAGE_CMD:cpio () { |
| 122 | (cd ${IMAGE_ROOTFS} && find . | sort | cpio --reproducible -o -H newc >${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.cpio) | 122 | (cd ${IMAGE_ROOTFS} && find . | sort | cpio --reproducible -o -H newc >${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.cpio) |
| 123 | # We only need the /init symlink if we're building the real | 123 | # We only need the /init symlink if we're building the real |
| 124 | # image. The -dbg image doesn't need it! By being clever | 124 | # image. The -dbg image doesn't need it! By being clever |
| @@ -184,7 +184,7 @@ multiubi_mkfs() { | |||
| 184 | fi | 184 | fi |
| 185 | } | 185 | } |
| 186 | 186 | ||
| 187 | IMAGE_CMD_multiubi () { | 187 | IMAGE_CMD:multiubi () { |
| 188 | # Split MKUBIFS_ARGS_<name> and UBINIZE_ARGS_<name> | 188 | # Split MKUBIFS_ARGS_<name> and UBINIZE_ARGS_<name> |
| 189 | for name in ${MULTIUBI_BUILD}; do | 189 | for name in ${MULTIUBI_BUILD}; do |
| 190 | eval local mkubifs_args=\"\$MKUBIFS_ARGS_${name}\" | 190 | eval local mkubifs_args=\"\$MKUBIFS_ARGS_${name}\" |
| @@ -194,15 +194,15 @@ IMAGE_CMD_multiubi () { | |||
| 194 | done | 194 | done |
| 195 | } | 195 | } |
| 196 | 196 | ||
| 197 | IMAGE_CMD_ubi () { | 197 | IMAGE_CMD:ubi () { |
| 198 | multiubi_mkfs "${MKUBIFS_ARGS}" "${UBINIZE_ARGS}" | 198 | multiubi_mkfs "${MKUBIFS_ARGS}" "${UBINIZE_ARGS}" |
| 199 | } | 199 | } |
| 200 | IMAGE_TYPEDEP_ubi = "ubifs" | 200 | IMAGE_TYPEDEP_ubi = "ubifs" |
| 201 | 201 | ||
| 202 | IMAGE_CMD_ubifs = "mkfs.ubifs -r ${IMAGE_ROOTFS} -o ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.ubifs ${MKUBIFS_ARGS}" | 202 | IMAGE_CMD:ubifs = "mkfs.ubifs -r ${IMAGE_ROOTFS} -o ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.ubifs ${MKUBIFS_ARGS}" |
| 203 | 203 | ||
| 204 | MIN_F2FS_SIZE ?= "524288" | 204 | MIN_F2FS_SIZE ?= "524288" |
| 205 | IMAGE_CMD_f2fs () { | 205 | IMAGE_CMD:f2fs () { |
| 206 | # We need to add additional smarts here form devices smaller than 1.5G | 206 | # We need to add additional smarts here form devices smaller than 1.5G |
| 207 | # Need to scale appropriately between 40M -> 1.5G as the "overprovision | 207 | # Need to scale appropriately between 40M -> 1.5G as the "overprovision |
| 208 | # ratio" goes down as the device gets bigger (70% -> 4.5%), below about | 208 | # ratio" goes down as the device gets bigger (70% -> 4.5%), below about |
| @@ -224,14 +224,14 @@ inherit siteinfo kernel-arch image-artifact-names | |||
| 224 | 224 | ||
| 225 | JFFS2_ENDIANNESS ?= "${@oe.utils.conditional('SITEINFO_ENDIANNESS', 'le', '-l', '-b', d)}" | 225 | JFFS2_ENDIANNESS ?= "${@oe.utils.conditional('SITEINFO_ENDIANNESS', 'le', '-l', '-b', d)}" |
| 226 | JFFS2_ERASEBLOCK ?= "0x40000" | 226 | JFFS2_ERASEBLOCK ?= "0x40000" |
| 227 | EXTRA_IMAGECMD_jffs2 ?= "--pad ${JFFS2_ENDIANNESS} --eraseblock=${JFFS2_ERASEBLOCK} --no-cleanmarkers" | 227 | EXTRA_IMAGECMD:jffs2 ?= "--pad ${JFFS2_ENDIANNESS} --eraseblock=${JFFS2_ERASEBLOCK} --no-cleanmarkers" |
| 228 | 228 | ||
| 229 | # Change these if you want default mkfs behavior (i.e. create minimal inode number) | 229 | # Change these if you want default mkfs behavior (i.e. create minimal inode number) |
| 230 | EXTRA_IMAGECMD_ext2 ?= "-i 4096" | 230 | EXTRA_IMAGECMD:ext2 ?= "-i 4096" |
| 231 | EXTRA_IMAGECMD_ext3 ?= "-i 4096" | 231 | EXTRA_IMAGECMD:ext3 ?= "-i 4096" |
| 232 | EXTRA_IMAGECMD_ext4 ?= "-i 4096" | 232 | EXTRA_IMAGECMD:ext4 ?= "-i 4096" |
| 233 | EXTRA_IMAGECMD_btrfs ?= "-n 4096 --shrink" | 233 | EXTRA_IMAGECMD:btrfs ?= "-n 4096 --shrink" |
| 234 | EXTRA_IMAGECMD_f2fs ?= "" | 234 | EXTRA_IMAGECMD:f2fs ?= "" |
| 235 | 235 | ||
| 236 | do_image_cpio[depends] += "cpio-native:do_populate_sysroot" | 236 | do_image_cpio[depends] += "cpio-native:do_populate_sysroot" |
| 237 | do_image_jffs2[depends] += "mtd-utils-native:do_populate_sysroot" | 237 | do_image_jffs2[depends] += "mtd-utils-native:do_populate_sysroot" |
diff --git a/meta/classes/image_types_wic.bbclass b/meta/classes/image_types_wic.bbclass index 49be1da77a..2f1a0b709c 100644 --- a/meta/classes/image_types_wic.bbclass +++ b/meta/classes/image_types_wic.bbclass | |||
| @@ -26,7 +26,7 @@ def wks_search(files, search_path): | |||
| 26 | 26 | ||
| 27 | WIC_CREATE_EXTRA_ARGS ?= "" | 27 | WIC_CREATE_EXTRA_ARGS ?= "" |
| 28 | 28 | ||
| 29 | IMAGE_CMD_wic () { | 29 | IMAGE_CMD:wic () { |
| 30 | out="${IMGDEPLOYDIR}/${IMAGE_NAME}" | 30 | out="${IMGDEPLOYDIR}/${IMAGE_NAME}" |
| 31 | build_wic="${WORKDIR}/build-wic" | 31 | build_wic="${WORKDIR}/build-wic" |
| 32 | tmp_wic="${WORKDIR}/tmp-wic" | 32 | tmp_wic="${WORKDIR}/tmp-wic" |
| @@ -42,7 +42,7 @@ IMAGE_CMD_wic () { | |||
| 42 | BUILDDIR="${TOPDIR}" PSEUDO_UNLOAD=1 wic create "$wks" --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" -o "$build_wic/" -w "$tmp_wic" ${WIC_CREATE_EXTRA_ARGS} | 42 | BUILDDIR="${TOPDIR}" PSEUDO_UNLOAD=1 wic create "$wks" --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" -o "$build_wic/" -w "$tmp_wic" ${WIC_CREATE_EXTRA_ARGS} |
| 43 | mv "$build_wic/$(basename "${wks%.wks}")"*.direct "$out${IMAGE_NAME_SUFFIX}.wic" | 43 | mv "$build_wic/$(basename "${wks%.wks}")"*.direct "$out${IMAGE_NAME_SUFFIX}.wic" |
| 44 | } | 44 | } |
| 45 | IMAGE_CMD_wic[vardepsexclude] = "WKS_FULL_PATH WKS_FILES TOPDIR" | 45 | IMAGE_CMD:wic[vardepsexclude] = "WKS_FULL_PATH WKS_FILES TOPDIR" |
| 46 | do_image_wic[cleandirs] = "${WORKDIR}/build-wic" | 46 | do_image_wic[cleandirs] = "${WORKDIR}/build-wic" |
| 47 | 47 | ||
| 48 | PSEUDO_IGNORE_PATHS .= ",${WORKDIR}/build-wic" | 48 | PSEUDO_IGNORE_PATHS .= ",${WORKDIR}/build-wic" |
| @@ -60,9 +60,9 @@ do_image_wic[deptask] += "do_image_complete" | |||
| 60 | WKS_FILE_DEPENDS_DEFAULT = '${@bb.utils.contains_any("BUILD_ARCH", [ 'x86_64', 'i686' ], "syslinux-native", "",d)}' | 60 | WKS_FILE_DEPENDS_DEFAULT = '${@bb.utils.contains_any("BUILD_ARCH", [ 'x86_64', 'i686' ], "syslinux-native", "",d)}' |
| 61 | WKS_FILE_DEPENDS_DEFAULT += "bmap-tools-native cdrtools-native btrfs-tools-native squashfs-tools-native e2fsprogs-native" | 61 | WKS_FILE_DEPENDS_DEFAULT += "bmap-tools-native cdrtools-native btrfs-tools-native squashfs-tools-native e2fsprogs-native" |
| 62 | WKS_FILE_DEPENDS_BOOTLOADERS = "" | 62 | WKS_FILE_DEPENDS_BOOTLOADERS = "" |
| 63 | WKS_FILE_DEPENDS_BOOTLOADERS_x86 = "syslinux grub-efi systemd-boot" | 63 | WKS_FILE_DEPENDS_BOOTLOADERS:x86 = "syslinux grub-efi systemd-boot" |
| 64 | WKS_FILE_DEPENDS_BOOTLOADERS_x86-64 = "syslinux grub-efi systemd-boot" | 64 | WKS_FILE_DEPENDS_BOOTLOADERS:x86-64 = "syslinux grub-efi systemd-boot" |
| 65 | WKS_FILE_DEPENDS_BOOTLOADERS_x86-x32 = "syslinux grub-efi" | 65 | WKS_FILE_DEPENDS_BOOTLOADERS:x86-x32 = "syslinux grub-efi" |
| 66 | 66 | ||
| 67 | WKS_FILE_DEPENDS ??= "${WKS_FILE_DEPENDS_DEFAULT} ${WKS_FILE_DEPENDS_BOOTLOADERS}" | 67 | WKS_FILE_DEPENDS ??= "${WKS_FILE_DEPENDS_DEFAULT} ${WKS_FILE_DEPENDS_BOOTLOADERS}" |
| 68 | 68 | ||
diff --git a/meta/classes/insane.bbclass b/meta/classes/insane.bbclass index 763d5f1da2..f9a35f2daa 100644 --- a/meta/classes/insane.bbclass +++ b/meta/classes/insane.bbclass | |||
| @@ -40,7 +40,7 @@ ERROR_QA ?= "dev-so debug-deps dev-deps debug-files arch pkgconfig la \ | |||
| 40 | useless-rpaths rpaths staticdev \ | 40 | useless-rpaths rpaths staticdev \ |
| 41 | " | 41 | " |
| 42 | # Add usrmerge QA check based on distro feature | 42 | # Add usrmerge QA check based on distro feature |
| 43 | ERROR_QA_append = "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', ' usrmerge', '', d)}" | 43 | ERROR_QA:append = "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', ' usrmerge', '', d)}" |
| 44 | 44 | ||
| 45 | FAKEROOT_QA = "host-user-contaminated" | 45 | FAKEROOT_QA = "host-user-contaminated" |
| 46 | FAKEROOT_QA[doc] = "QA tests which need to run under fakeroot. If any \ | 46 | FAKEROOT_QA[doc] = "QA tests which need to run under fakeroot. If any \ |
| @@ -246,7 +246,7 @@ def package_qa_check_mime_xdg(path, name, d, elf, messages): | |||
| 246 | pkgname = name | 246 | pkgname = name |
| 247 | if name == d.getVar('PN'): | 247 | if name == d.getVar('PN'): |
| 248 | pkgname = '${PN}' | 248 | pkgname = '${PN}' |
| 249 | wstr += "If yes: add \'inhert mime-xdg\' and \'MIME_XDG_PACKAGES += \"%s\"\' / if no add \'INSANE_SKIP_%s += \"mime-xdg\"\' to recipe." % (pkgname, pkgname) | 249 | wstr += "If yes: add \'inhert mime-xdg\' and \'MIME_XDG_PACKAGES += \"%s\"\' / if no add \'INSANE_SKIP:%s += \"mime-xdg\"\' to recipe." % (pkgname, pkgname) |
| 250 | package_qa_add_message(messages, "mime-xdg", wstr) | 250 | package_qa_add_message(messages, "mime-xdg", wstr) |
| 251 | if mime_type_found: | 251 | if mime_type_found: |
| 252 | package_qa_add_message(messages, "mime-xdg", "package contains desktop file with key 'MimeType' but does not inhert mime-xdg: %s path '%s'" % \ | 252 | package_qa_add_message(messages, "mime-xdg", "package contains desktop file with key 'MimeType' but does not inhert mime-xdg: %s path '%s'" % \ |
| @@ -279,7 +279,7 @@ def package_qa_check_libdir(d): | |||
| 279 | # Skip subdirectories for any packages with libdir in INSANE_SKIP | 279 | # Skip subdirectories for any packages with libdir in INSANE_SKIP |
| 280 | skippackages = [] | 280 | skippackages = [] |
| 281 | for package in dirs: | 281 | for package in dirs: |
| 282 | if 'libdir' in (d.getVar('INSANE_SKIP_' + package) or "").split(): | 282 | if 'libdir' in (d.getVar('INSANE_SKIP:' + package) or "").split(): |
| 283 | bb.note("Package %s skipping libdir QA test" % (package)) | 283 | bb.note("Package %s skipping libdir QA test" % (package)) |
| 284 | skippackages.append(package) | 284 | skippackages.append(package) |
| 285 | elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory' and package.endswith("-dbg"): | 285 | elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory' and package.endswith("-dbg"): |
| @@ -483,7 +483,7 @@ def package_qa_check_xorg_driver_abi(path, name, d, elf, messages): | |||
| 483 | driverdir = d.expand("${libdir}/xorg/modules/drivers/") | 483 | driverdir = d.expand("${libdir}/xorg/modules/drivers/") |
| 484 | if driverdir in path and path.endswith(".so"): | 484 | if driverdir in path and path.endswith(".so"): |
| 485 | mlprefix = d.getVar('MLPREFIX') or '' | 485 | mlprefix = d.getVar('MLPREFIX') or '' |
| 486 | for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + name) or ""): | 486 | for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + name) or ""): |
| 487 | if rdep.startswith("%sxorg-abi-" % mlprefix): | 487 | if rdep.startswith("%sxorg-abi-" % mlprefix): |
| 488 | return | 488 | return |
| 489 | package_qa_add_message(messages, "xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path))) | 489 | package_qa_add_message(messages, "xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path))) |
| @@ -795,7 +795,7 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): | |||
| 795 | 795 | ||
| 796 | # The python is not a package, but python-core provides it, so | 796 | # The python is not a package, but python-core provides it, so |
| 797 | # skip checking /usr/bin/python if python is in the rdeps, in | 797 | # skip checking /usr/bin/python if python is in the rdeps, in |
| 798 | # case there is a RDEPENDS_pkg = "python" in the recipe. | 798 | # case there is a RDEPENDS:pkg = "python" in the recipe. |
| 799 | for py in [ d.getVar('MLPREFIX') + "python", "python" ]: | 799 | for py in [ d.getVar('MLPREFIX') + "python", "python" ]: |
| 800 | if py in done: | 800 | if py in done: |
| 801 | filerdepends.pop("/usr/bin/python",None) | 801 | filerdepends.pop("/usr/bin/python",None) |
| @@ -808,7 +808,7 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): | |||
| 808 | # For Saving the FILERPROVIDES, RPROVIDES and FILES_INFO | 808 | # For Saving the FILERPROVIDES, RPROVIDES and FILES_INFO |
| 809 | rdep_data = oe.packagedata.read_subpkgdata(rdep, d) | 809 | rdep_data = oe.packagedata.read_subpkgdata(rdep, d) |
| 810 | for key in rdep_data: | 810 | for key in rdep_data: |
| 811 | if key.startswith("FILERPROVIDES_") or key.startswith("RPROVIDES_"): | 811 | if key.startswith("FILERPROVIDES_") or key.startswith("RPROVIDES:"): |
| 812 | for subkey in bb.utils.explode_deps(rdep_data[key]): | 812 | for subkey in bb.utils.explode_deps(rdep_data[key]): |
| 813 | filerdepends.pop(subkey,None) | 813 | filerdepends.pop(subkey,None) |
| 814 | # Add the files list to the rprovides | 814 | # Add the files list to the rprovides |
| @@ -821,7 +821,7 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): | |||
| 821 | break | 821 | break |
| 822 | if filerdepends: | 822 | if filerdepends: |
| 823 | for key in filerdepends: | 823 | for key in filerdepends: |
| 824 | error_msg = "%s contained in package %s requires %s, but no providers found in RDEPENDS_%s?" % \ | 824 | error_msg = "%s contained in package %s requires %s, but no providers found in RDEPENDS:%s?" % \ |
| 825 | (filerdepends[key].replace("_%s" % pkg, "").replace("@underscore@", "_"), pkg, key, pkg) | 825 | (filerdepends[key].replace("_%s" % pkg, "").replace("@underscore@", "_"), pkg, key, pkg) |
| 826 | package_qa_handle_error("file-rdeps", error_msg, d) | 826 | package_qa_handle_error("file-rdeps", error_msg, d) |
| 827 | package_qa_check_rdepends[vardepsexclude] = "OVERRIDES" | 827 | package_qa_check_rdepends[vardepsexclude] = "OVERRIDES" |
| @@ -903,7 +903,7 @@ def package_qa_check_unlisted_pkg_lics(package, d, messages): | |||
| 903 | """ | 903 | """ |
| 904 | Check that all licenses for a package are among the licenses for the recipe. | 904 | Check that all licenses for a package are among the licenses for the recipe. |
| 905 | """ | 905 | """ |
| 906 | pkg_lics = d.getVar('LICENSE_' + package) | 906 | pkg_lics = d.getVar('LICENSE:' + package) |
| 907 | if not pkg_lics: | 907 | if not pkg_lics: |
| 908 | return True | 908 | return True |
| 909 | 909 | ||
| @@ -913,7 +913,7 @@ def package_qa_check_unlisted_pkg_lics(package, d, messages): | |||
| 913 | return True | 913 | return True |
| 914 | 914 | ||
| 915 | package_qa_add_message(messages, "unlisted-pkg-lics", | 915 | package_qa_add_message(messages, "unlisted-pkg-lics", |
| 916 | "LICENSE_%s includes licenses (%s) that are not " | 916 | "LICENSE:%s includes licenses (%s) that are not " |
| 917 | "listed in LICENSE" % (package, ' '.join(unlisted))) | 917 | "listed in LICENSE" % (package, ' '.join(unlisted))) |
| 918 | return False | 918 | return False |
| 919 | 919 | ||
| @@ -996,8 +996,8 @@ def package_qa_check_missing_update_alternatives(pn, d, messages): | |||
| 996 | # Look at all packages and find out if any of those sets ALTERNATIVE variable | 996 | # Look at all packages and find out if any of those sets ALTERNATIVE variable |
| 997 | # without inheriting update-alternatives class | 997 | # without inheriting update-alternatives class |
| 998 | for pkg in (d.getVar('PACKAGES') or '').split(): | 998 | for pkg in (d.getVar('PACKAGES') or '').split(): |
| 999 | if d.getVar('ALTERNATIVE_%s' % pkg) and not bb.data.inherits_class('update-alternatives', d): | 999 | if d.getVar('ALTERNATIVE:%s' % pkg) and not bb.data.inherits_class('update-alternatives', d): |
| 1000 | package_qa_handle_error("missing-update-alternatives", "%s: recipe defines ALTERNATIVE_%s but doesn't inherit update-alternatives. This might fail during do_rootfs later!" % (pn, pkg), d) | 1000 | package_qa_handle_error("missing-update-alternatives", "%s: recipe defines ALTERNATIVE:%s but doesn't inherit update-alternatives. This might fail during do_rootfs later!" % (pn, pkg), d) |
| 1001 | 1001 | ||
| 1002 | # The PACKAGE FUNC to scan each package | 1002 | # The PACKAGE FUNC to scan each package |
| 1003 | python do_package_qa () { | 1003 | python do_package_qa () { |
| @@ -1066,7 +1066,7 @@ python do_package_qa () { | |||
| 1066 | 1066 | ||
| 1067 | for package in packages: | 1067 | for package in packages: |
| 1068 | skip = set((d.getVar('INSANE_SKIP') or "").split() + | 1068 | skip = set((d.getVar('INSANE_SKIP') or "").split() + |
| 1069 | (d.getVar('INSANE_SKIP_' + package) or "").split()) | 1069 | (d.getVar('INSANE_SKIP:' + package) or "").split()) |
| 1070 | if skip: | 1070 | if skip: |
| 1071 | bb.note("Package %s skipping QA tests: %s" % (package, str(skip))) | 1071 | bb.note("Package %s skipping QA tests: %s" % (package, str(skip))) |
| 1072 | 1072 | ||
| @@ -1108,7 +1108,7 @@ addtask do_package_qa after do_packagedata do_package before do_build | |||
| 1108 | python() { | 1108 | python() { |
| 1109 | pkgs = (d.getVar('PACKAGES') or '').split() | 1109 | pkgs = (d.getVar('PACKAGES') or '').split() |
| 1110 | for pkg in pkgs: | 1110 | for pkg in pkgs: |
| 1111 | d.appendVarFlag("do_package_qa", "vardeps", " INSANE_SKIP_{}".format(pkg)) | 1111 | d.appendVarFlag("do_package_qa", "vardeps", " INSANE_SKIP:{}".format(pkg)) |
| 1112 | } | 1112 | } |
| 1113 | 1113 | ||
| 1114 | SSTATETASKS += "do_package_qa" | 1114 | SSTATETASKS += "do_package_qa" |
| @@ -1309,8 +1309,8 @@ python () { | |||
| 1309 | msg = "FILESEXTRAPATHS-variable, must always use _prepend (or _append)\n" | 1309 | msg = "FILESEXTRAPATHS-variable, must always use _prepend (or _append)\n" |
| 1310 | msg += "type of assignment, and don't forget the colon.\n" | 1310 | msg += "type of assignment, and don't forget the colon.\n" |
| 1311 | msg += "Please assign it with the format of:\n" | 1311 | msg += "Please assign it with the format of:\n" |
| 1312 | msg += " FILESEXTRAPATHS_append := \":${THISDIR}/Your_Files_Path\" or\n" | 1312 | msg += " FILESEXTRAPATHS:append := \":${THISDIR}/Your_Files_Path\" or\n" |
| 1313 | msg += " FILESEXTRAPATHS_prepend := \"${THISDIR}/Your_Files_Path:\"\n" | 1313 | msg += " FILESEXTRAPATHS:prepend := \"${THISDIR}/Your_Files_Path:\"\n" |
| 1314 | msg += "in your bbappend file\n\n" | 1314 | msg += "in your bbappend file\n\n" |
| 1315 | msg += "Your incorrect assignment is:\n" | 1315 | msg += "Your incorrect assignment is:\n" |
| 1316 | msg += "%s\n" % extrapaths | 1316 | msg += "%s\n" % extrapaths |
diff --git a/meta/classes/kernel-devicetree.bbclass b/meta/classes/kernel-devicetree.bbclass index 27a4905ac6..a50ea4fb67 100644 --- a/meta/classes/kernel-devicetree.bbclass +++ b/meta/classes/kernel-devicetree.bbclass | |||
| @@ -6,8 +6,8 @@ python () { | |||
| 6 | d.appendVar("PACKAGES", " ${KERNEL_PACKAGE_NAME}-image-zimage-bundle") | 6 | d.appendVar("PACKAGES", " ${KERNEL_PACKAGE_NAME}-image-zimage-bundle") |
| 7 | } | 7 | } |
| 8 | 8 | ||
| 9 | FILES_${KERNEL_PACKAGE_NAME}-devicetree = "/${KERNEL_IMAGEDEST}/*.dtb /${KERNEL_IMAGEDEST}/*.dtbo" | 9 | FILES:${KERNEL_PACKAGE_NAME}-devicetree = "/${KERNEL_IMAGEDEST}/*.dtb /${KERNEL_IMAGEDEST}/*.dtbo" |
| 10 | FILES_${KERNEL_PACKAGE_NAME}-image-zimage-bundle = "/${KERNEL_IMAGEDEST}/zImage-*.dtb.bin" | 10 | FILES:${KERNEL_PACKAGE_NAME}-image-zimage-bundle = "/${KERNEL_IMAGEDEST}/zImage-*.dtb.bin" |
| 11 | 11 | ||
| 12 | # Generate kernel+devicetree bundle | 12 | # Generate kernel+devicetree bundle |
| 13 | KERNEL_DEVICETREE_BUNDLE ?= "0" | 13 | KERNEL_DEVICETREE_BUNDLE ?= "0" |
| @@ -33,7 +33,7 @@ get_real_dtb_path_in_kernel () { | |||
| 33 | echo "$dtb_path" | 33 | echo "$dtb_path" |
| 34 | } | 34 | } |
| 35 | 35 | ||
| 36 | do_configure_append() { | 36 | do_configure:append() { |
| 37 | if [ "${KERNEL_DEVICETREE_BUNDLE}" = "1" ]; then | 37 | if [ "${KERNEL_DEVICETREE_BUNDLE}" = "1" ]; then |
| 38 | if echo ${KERNEL_IMAGETYPE_FOR_MAKE} | grep -q 'zImage'; then | 38 | if echo ${KERNEL_IMAGETYPE_FOR_MAKE} | grep -q 'zImage'; then |
| 39 | case "${ARCH}" in | 39 | case "${ARCH}" in |
| @@ -55,7 +55,7 @@ do_configure_append() { | |||
| 55 | fi | 55 | fi |
| 56 | } | 56 | } |
| 57 | 57 | ||
| 58 | do_compile_append() { | 58 | do_compile:append() { |
| 59 | if [ -n "${KERNEL_DTC_FLAGS}" ]; then | 59 | if [ -n "${KERNEL_DTC_FLAGS}" ]; then |
| 60 | export DTC_FLAGS="${KERNEL_DTC_FLAGS}" | 60 | export DTC_FLAGS="${KERNEL_DTC_FLAGS}" |
| 61 | fi | 61 | fi |
| @@ -66,7 +66,7 @@ do_compile_append() { | |||
| 66 | done | 66 | done |
| 67 | } | 67 | } |
| 68 | 68 | ||
| 69 | do_install_append() { | 69 | do_install:append() { |
| 70 | for dtbf in ${KERNEL_DEVICETREE}; do | 70 | for dtbf in ${KERNEL_DEVICETREE}; do |
| 71 | dtb=`normalize_dtb "$dtbf"` | 71 | dtb=`normalize_dtb "$dtbf"` |
| 72 | dtb_ext=${dtb##*.} | 72 | dtb_ext=${dtb##*.} |
| @@ -76,7 +76,7 @@ do_install_append() { | |||
| 76 | done | 76 | done |
| 77 | } | 77 | } |
| 78 | 78 | ||
| 79 | do_deploy_append() { | 79 | do_deploy:append() { |
| 80 | for dtbf in ${KERNEL_DEVICETREE}; do | 80 | for dtbf in ${KERNEL_DEVICETREE}; do |
| 81 | dtb=`normalize_dtb "$dtbf"` | 81 | dtb=`normalize_dtb "$dtbf"` |
| 82 | dtb_ext=${dtb##*.} | 82 | dtb_ext=${dtb##*.} |
diff --git a/meta/classes/kernel-fitimage.bbclass b/meta/classes/kernel-fitimage.bbclass index e363eeb64c..a9d1002200 100644 --- a/meta/classes/kernel-fitimage.bbclass +++ b/meta/classes/kernel-fitimage.bbclass | |||
| @@ -697,7 +697,7 @@ do_kernel_generate_rsa_keys() { | |||
| 697 | addtask kernel_generate_rsa_keys before do_assemble_fitimage after do_compile | 697 | addtask kernel_generate_rsa_keys before do_assemble_fitimage after do_compile |
| 698 | 698 | ||
| 699 | kernel_do_deploy[vardepsexclude] = "DATETIME" | 699 | kernel_do_deploy[vardepsexclude] = "DATETIME" |
| 700 | kernel_do_deploy_append() { | 700 | kernel_do_deploy:append() { |
| 701 | # Update deploy directory | 701 | # Update deploy directory |
| 702 | if echo ${KERNEL_IMAGETYPES} | grep -wq "fitImage"; then | 702 | if echo ${KERNEL_IMAGETYPES} | grep -wq "fitImage"; then |
| 703 | 703 | ||
diff --git a/meta/classes/kernel-grub.bbclass b/meta/classes/kernel-grub.bbclass index 5d92f3b636..0bf8fda3e1 100644 --- a/meta/classes/kernel-grub.bbclass +++ b/meta/classes/kernel-grub.bbclass | |||
| @@ -97,9 +97,9 @@ python __anonymous () { | |||
| 97 | 97 | ||
| 98 | for type in imagetypes.split(): | 98 | for type in imagetypes.split(): |
| 99 | typelower = type.lower() | 99 | typelower = type.lower() |
| 100 | preinst_append = preinst.replace('KERNEL_IMAGETYPE', type) | 100 | preinst:append = preinst.replace('KERNEL_IMAGETYPE', type) |
| 101 | postinst_prepend = postinst.replace('KERNEL_IMAGETYPE', type) | 101 | postinst:prepend = postinst.replace('KERNEL_IMAGETYPE', type) |
| 102 | d.setVar('pkg_preinst_kernel-image-' + typelower + '_append', preinst_append) | 102 | d.setVar('pkg_preinst:kernel-image-' + typelower + '_append', preinst:append) |
| 103 | d.setVar('pkg_postinst_kernel-image-' + typelower + '_prepend', postinst_prepend) | 103 | d.setVar('pkg_postinst:kernel-image-' + typelower + '_prepend', postinst:prepend) |
| 104 | } | 104 | } |
| 105 | 105 | ||
diff --git a/meta/classes/kernel-module-split.bbclass b/meta/classes/kernel-module-split.bbclass index baa32e0a90..b56dd4a9c7 100644 --- a/meta/classes/kernel-module-split.bbclass +++ b/meta/classes/kernel-module-split.bbclass | |||
| @@ -1,4 +1,4 @@ | |||
| 1 | pkg_postinst_modules () { | 1 | pkg_postinst:modules () { |
| 2 | if [ -z "$D" ]; then | 2 | if [ -z "$D" ]; then |
| 3 | depmod -a ${KERNEL_VERSION} | 3 | depmod -a ${KERNEL_VERSION} |
| 4 | else | 4 | else |
| @@ -8,7 +8,7 @@ else | |||
| 8 | fi | 8 | fi |
| 9 | } | 9 | } |
| 10 | 10 | ||
| 11 | pkg_postrm_modules () { | 11 | pkg_postrm:modules () { |
| 12 | if [ -z "$D" ]; then | 12 | if [ -z "$D" ]; then |
| 13 | depmod -a ${KERNEL_VERSION} | 13 | depmod -a ${KERNEL_VERSION} |
| 14 | else | 14 | else |
| @@ -24,11 +24,11 @@ fi | |||
| 24 | 24 | ||
| 25 | PACKAGE_WRITE_DEPS += "kmod-native depmodwrapper-cross" | 25 | PACKAGE_WRITE_DEPS += "kmod-native depmodwrapper-cross" |
| 26 | 26 | ||
| 27 | do_install_append() { | 27 | do_install:append() { |
| 28 | install -d ${D}${sysconfdir}/modules-load.d/ ${D}${sysconfdir}/modprobe.d/ | 28 | install -d ${D}${sysconfdir}/modules-load.d/ ${D}${sysconfdir}/modprobe.d/ |
| 29 | } | 29 | } |
| 30 | 30 | ||
| 31 | PACKAGESPLITFUNCS_prepend = "split_kernel_module_packages " | 31 | PACKAGESPLITFUNCS:prepend = "split_kernel_module_packages " |
| 32 | 32 | ||
| 33 | KERNEL_MODULES_META_PACKAGE ?= "${@ d.getVar("KERNEL_PACKAGE_NAME") or "kernel" }-modules" | 33 | KERNEL_MODULES_META_PACKAGE ?= "${@ d.getVar("KERNEL_PACKAGE_NAME") or "kernel" }-modules" |
| 34 | 34 | ||
| @@ -100,11 +100,11 @@ python split_kernel_module_packages () { | |||
| 100 | else: | 100 | else: |
| 101 | f.write('%s\n' % basename) | 101 | f.write('%s\n' % basename) |
| 102 | f.close() | 102 | f.close() |
| 103 | postinst = d.getVar('pkg_postinst_%s' % pkg) | 103 | postinst = d.getVar('pkg_postinst:%s' % pkg) |
| 104 | if not postinst: | 104 | if not postinst: |
| 105 | bb.fatal("pkg_postinst_%s not defined" % pkg) | 105 | bb.fatal("pkg_postinst:%s not defined" % pkg) |
| 106 | postinst += d.getVar('autoload_postinst_fragment') % (autoload or basename) | 106 | postinst += d.getVar('autoload_postinst_fragment') % (autoload or basename) |
| 107 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 107 | d.setVar('pkg_postinst:%s' % pkg, postinst) |
| 108 | 108 | ||
| 109 | # Write out any modconf fragment | 109 | # Write out any modconf fragment |
| 110 | modconflist = (d.getVar("KERNEL_MODULE_PROBECONF") or "").split() | 110 | modconflist = (d.getVar("KERNEL_MODULE_PROBECONF") or "").split() |
| @@ -117,19 +117,19 @@ python split_kernel_module_packages () { | |||
| 117 | elif modconf: | 117 | elif modconf: |
| 118 | bb.error("Please ensure module %s is listed in KERNEL_MODULE_PROBECONF since module_conf_%s is set" % (basename, basename)) | 118 | bb.error("Please ensure module %s is listed in KERNEL_MODULE_PROBECONF since module_conf_%s is set" % (basename, basename)) |
| 119 | 119 | ||
| 120 | files = d.getVar('FILES_%s' % pkg) | 120 | files = d.getVar('FILES:%s' % pkg) |
| 121 | files = "%s /etc/modules-load.d/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename) | 121 | files = "%s /etc/modules-load.d/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename) |
| 122 | d.setVar('FILES_%s' % pkg, files) | 122 | d.setVar('FILES:%s' % pkg, files) |
| 123 | 123 | ||
| 124 | conffiles = d.getVar('CONFFILES_%s' % pkg) | 124 | conffiles = d.getVar('CONFFILES:%s' % pkg) |
| 125 | conffiles = "%s /etc/modules-load.d/%s.conf /etc/modprobe.d/%s.conf" % (conffiles, basename, basename) | 125 | conffiles = "%s /etc/modules-load.d/%s.conf /etc/modprobe.d/%s.conf" % (conffiles, basename, basename) |
| 126 | d.setVar('CONFFILES_%s' % pkg, conffiles) | 126 | d.setVar('CONFFILES:%s' % pkg, conffiles) |
| 127 | 127 | ||
| 128 | if "description" in vals: | 128 | if "description" in vals: |
| 129 | old_desc = d.getVar('DESCRIPTION_' + pkg) or "" | 129 | old_desc = d.getVar('DESCRIPTION:' + pkg) or "" |
| 130 | d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"]) | 130 | d.setVar('DESCRIPTION:' + pkg, old_desc + "; " + vals["description"]) |
| 131 | 131 | ||
| 132 | rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg) or "") | 132 | rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "") |
| 133 | modinfo_deps = [] | 133 | modinfo_deps = [] |
| 134 | if "depends" in vals and vals["depends"] != "": | 134 | if "depends" in vals and vals["depends"] != "": |
| 135 | for dep in vals["depends"].split(","): | 135 | for dep in vals["depends"].split(","): |
| @@ -139,16 +139,16 @@ python split_kernel_module_packages () { | |||
| 139 | for dep in modinfo_deps: | 139 | for dep in modinfo_deps: |
| 140 | if not dep in rdepends: | 140 | if not dep in rdepends: |
| 141 | rdepends[dep] = [] | 141 | rdepends[dep] = [] |
| 142 | d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False)) | 142 | d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False)) |
| 143 | 143 | ||
| 144 | # Avoid automatic -dev recommendations for modules ending with -dev. | 144 | # Avoid automatic -dev recommendations for modules ending with -dev. |
| 145 | d.setVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs', 1) | 145 | d.setVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs', 1) |
| 146 | 146 | ||
| 147 | # Provide virtual package without postfix | 147 | # Provide virtual package without postfix |
| 148 | providevirt = d.getVar('KERNEL_MODULE_PROVIDE_VIRTUAL') | 148 | providevirt = d.getVar('KERNEL_MODULE_PROVIDE_VIRTUAL') |
| 149 | if providevirt == "1": | 149 | if providevirt == "1": |
| 150 | postfix = format.split('%s')[1] | 150 | postfix = format.split('%s')[1] |
| 151 | d.setVar('RPROVIDES_' + pkg, pkg.replace(postfix, '')) | 151 | d.setVar('RPROVIDES:' + pkg, pkg.replace(postfix, '')) |
| 152 | 152 | ||
| 153 | kernel_package_name = d.getVar("KERNEL_PACKAGE_NAME") or "kernel" | 153 | kernel_package_name = d.getVar("KERNEL_PACKAGE_NAME") or "kernel" |
| 154 | kernel_version = d.getVar("KERNEL_VERSION") | 154 | kernel_version = d.getVar("KERNEL_VERSION") |
| @@ -159,13 +159,13 @@ python split_kernel_module_packages () { | |||
| 159 | module_pattern_suffix = d.getVar('KERNEL_MODULE_PACKAGE_SUFFIX') | 159 | module_pattern_suffix = d.getVar('KERNEL_MODULE_PACKAGE_SUFFIX') |
| 160 | module_pattern = module_pattern_prefix + kernel_package_name + '-module-%s' + module_pattern_suffix | 160 | module_pattern = module_pattern_prefix + kernel_package_name + '-module-%s' + module_pattern_suffix |
| 161 | 161 | ||
| 162 | postinst = d.getVar('pkg_postinst_modules') | 162 | postinst = d.getVar('pkg_postinst:modules') |
| 163 | postrm = d.getVar('pkg_postrm_modules') | 163 | postrm = d.getVar('pkg_postrm:modules') |
| 164 | 164 | ||
| 165 | modules = do_split_packages(d, root='${nonarch_base_libdir}/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='%s-%s' % (kernel_package_name, kernel_version)) | 165 | modules = do_split_packages(d, root='${nonarch_base_libdir}/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='%s-%s' % (kernel_package_name, kernel_version)) |
| 166 | if modules: | 166 | if modules: |
| 167 | metapkg = d.getVar('KERNEL_MODULES_META_PACKAGE') | 167 | metapkg = d.getVar('KERNEL_MODULES_META_PACKAGE') |
| 168 | d.appendVar('RDEPENDS_' + metapkg, ' '+' '.join(modules)) | 168 | d.appendVar('RDEPENDS:' + metapkg, ' '+' '.join(modules)) |
| 169 | 169 | ||
| 170 | # If modules-load.d and modprobe.d are empty at this point, remove them to | 170 | # If modules-load.d and modprobe.d are empty at this point, remove them to |
| 171 | # avoid warnings. removedirs only raises an OSError if an empty | 171 | # avoid warnings. removedirs only raises an OSError if an empty |
diff --git a/meta/classes/kernel-yocto.bbclass b/meta/classes/kernel-yocto.bbclass index 0df61cdef0..7deadcaedb 100644 --- a/meta/classes/kernel-yocto.bbclass +++ b/meta/classes/kernel-yocto.bbclass | |||
| @@ -474,7 +474,7 @@ python do_config_analysis() { | |||
| 474 | env['srctree'] = s | 474 | env['srctree'] = s |
| 475 | 475 | ||
| 476 | # read specific symbols from the kernel recipe or from local.conf | 476 | # read specific symbols from the kernel recipe or from local.conf |
| 477 | # i.e.: CONFIG_ANALYSIS_pn-linux-yocto-dev = 'NF_CONNTRACK LOCALVERSION' | 477 | # i.e.: CONFIG_ANALYSIS:pn-linux-yocto-dev = 'NF_CONNTRACK LOCALVERSION' |
| 478 | config = d.getVar( 'CONFIG_ANALYSIS' ) | 478 | config = d.getVar( 'CONFIG_ANALYSIS' ) |
| 479 | if not config: | 479 | if not config: |
| 480 | config = [ "" ] | 480 | config = [ "" ] |
diff --git a/meta/classes/kernel.bbclass b/meta/classes/kernel.bbclass index 846b19663b..027e66eec7 100644 --- a/meta/classes/kernel.bbclass +++ b/meta/classes/kernel.bbclass | |||
| @@ -46,7 +46,7 @@ python __anonymous () { | |||
| 46 | kpn = d.getVar("KERNEL_PACKAGE_NAME") | 46 | kpn = d.getVar("KERNEL_PACKAGE_NAME") |
| 47 | 47 | ||
| 48 | # XXX Remove this after bug 11905 is resolved | 48 | # XXX Remove this after bug 11905 is resolved |
| 49 | # FILES_${KERNEL_PACKAGE_NAME}-dev doesn't expand correctly | 49 | # FILES:${KERNEL_PACKAGE_NAME}-dev doesn't expand correctly |
| 50 | if kpn == pn: | 50 | if kpn == pn: |
| 51 | bb.warn("Some packages (E.g. *-dev) might be missing due to " | 51 | bb.warn("Some packages (E.g. *-dev) might be missing due to " |
| 52 | "bug 11905 (variable KERNEL_PACKAGE_NAME == PN)") | 52 | "bug 11905 (variable KERNEL_PACKAGE_NAME == PN)") |
| @@ -96,11 +96,11 @@ python __anonymous () { | |||
| 96 | continue | 96 | continue |
| 97 | typelower = type.lower() | 97 | typelower = type.lower() |
| 98 | d.appendVar('PACKAGES', ' %s-image-%s' % (kname, typelower)) | 98 | d.appendVar('PACKAGES', ' %s-image-%s' % (kname, typelower)) |
| 99 | d.setVar('FILES_' + kname + '-image-' + typelower, '/' + imagedest + '/' + type + '-${KERNEL_VERSION_NAME}' + ' /' + imagedest + '/' + type) | 99 | d.setVar('FILES:' + kname + '-image-' + typelower, '/' + imagedest + '/' + type + '-${KERNEL_VERSION_NAME}' + ' /' + imagedest + '/' + type) |
| 100 | d.appendVar('RDEPENDS_%s-image' % kname, ' %s-image-%s' % (kname, typelower)) | 100 | d.appendVar('RDEPENDS:%s-image' % kname, ' %s-image-%s' % (kname, typelower)) |
| 101 | d.setVar('PKG_%s-image-%s' % (kname,typelower), '%s-image-%s-${KERNEL_VERSION_PKG_NAME}' % (kname, typelower)) | 101 | d.setVar('PKG:%s-image-%s' % (kname,typelower), '%s-image-%s-${KERNEL_VERSION_PKG_NAME}' % (kname, typelower)) |
| 102 | d.setVar('ALLOW_EMPTY_%s-image-%s' % (kname, typelower), '1') | 102 | d.setVar('ALLOW_EMPTY:%s-image-%s' % (kname, typelower), '1') |
| 103 | d.setVar('pkg_postinst_%s-image-%s' % (kname,typelower), """set +e | 103 | d.setVar('pkg_postinst:%s-image-%s' % (kname,typelower), """set +e |
| 104 | if [ -n "$D" ]; then | 104 | if [ -n "$D" ]; then |
| 105 | ln -sf %s-${KERNEL_VERSION} $D/${KERNEL_IMAGEDEST}/%s > /dev/null 2>&1 | 105 | ln -sf %s-${KERNEL_VERSION} $D/${KERNEL_IMAGEDEST}/%s > /dev/null 2>&1 |
| 106 | else | 106 | else |
| @@ -112,7 +112,7 @@ else | |||
| 112 | fi | 112 | fi |
| 113 | set -e | 113 | set -e |
| 114 | """ % (type, type, type, type, type, type, type)) | 114 | """ % (type, type, type, type, type, type, type)) |
| 115 | d.setVar('pkg_postrm_%s-image-%s' % (kname,typelower), """set +e | 115 | d.setVar('pkg_postrm:%s-image-%s' % (kname,typelower), """set +e |
| 116 | if [ -f "${KERNEL_IMAGEDEST}/%s" -o -L "${KERNEL_IMAGEDEST}/%s" ]; then | 116 | if [ -f "${KERNEL_IMAGEDEST}/%s" -o -L "${KERNEL_IMAGEDEST}/%s" ]; then |
| 117 | rm -f ${KERNEL_IMAGEDEST}/%s > /dev/null 2>&1 | 117 | rm -f ${KERNEL_IMAGEDEST}/%s > /dev/null 2>&1 |
| 118 | fi | 118 | fi |
| @@ -303,7 +303,7 @@ do_bundle_initramfs () { | |||
| 303 | } | 303 | } |
| 304 | do_bundle_initramfs[dirs] = "${B}" | 304 | do_bundle_initramfs[dirs] = "${B}" |
| 305 | 305 | ||
| 306 | python do_devshell_prepend () { | 306 | python do_devshell:prepend () { |
| 307 | os.environ["LDFLAGS"] = '' | 307 | os.environ["LDFLAGS"] = '' |
| 308 | } | 308 | } |
| 309 | 309 | ||
| @@ -591,7 +591,7 @@ kernel_do_configure() { | |||
| 591 | fi | 591 | fi |
| 592 | 592 | ||
| 593 | # Copy defconfig to .config if .config does not exist. This allows | 593 | # Copy defconfig to .config if .config does not exist. This allows |
| 594 | # recipes to manage the .config themselves in do_configure_prepend(). | 594 | # recipes to manage the .config themselves in do_configure:prepend(). |
| 595 | if [ -f "${WORKDIR}/defconfig" ] && [ ! -f "${B}/.config" ]; then | 595 | if [ -f "${WORKDIR}/defconfig" ] && [ ! -f "${B}/.config" ]; then |
| 596 | cp "${WORKDIR}/defconfig" "${B}/.config" | 596 | cp "${WORKDIR}/defconfig" "${B}/.config" |
| 597 | fi | 597 | fi |
| @@ -608,34 +608,34 @@ addtask savedefconfig after do_configure | |||
| 608 | 608 | ||
| 609 | inherit cml1 | 609 | inherit cml1 |
| 610 | 610 | ||
| 611 | KCONFIG_CONFIG_COMMAND_append = " LD='${KERNEL_LD}' HOSTLDFLAGS='${BUILD_LDFLAGS}'" | 611 | KCONFIG_CONFIG_COMMAND:append = " LD='${KERNEL_LD}' HOSTLDFLAGS='${BUILD_LDFLAGS}'" |
| 612 | 612 | ||
| 613 | EXPORT_FUNCTIONS do_compile do_install do_configure | 613 | EXPORT_FUNCTIONS do_compile do_install do_configure |
| 614 | 614 | ||
| 615 | # kernel-base becomes kernel-${KERNEL_VERSION} | 615 | # kernel-base becomes kernel-${KERNEL_VERSION} |
| 616 | # kernel-image becomes kernel-image-${KERNEL_VERSION} | 616 | # kernel-image becomes kernel-image-${KERNEL_VERSION} |
| 617 | PACKAGES = "${KERNEL_PACKAGE_NAME} ${KERNEL_PACKAGE_NAME}-base ${KERNEL_PACKAGE_NAME}-vmlinux ${KERNEL_PACKAGE_NAME}-image ${KERNEL_PACKAGE_NAME}-dev ${KERNEL_PACKAGE_NAME}-modules" | 617 | PACKAGES = "${KERNEL_PACKAGE_NAME} ${KERNEL_PACKAGE_NAME}-base ${KERNEL_PACKAGE_NAME}-vmlinux ${KERNEL_PACKAGE_NAME}-image ${KERNEL_PACKAGE_NAME}-dev ${KERNEL_PACKAGE_NAME}-modules" |
| 618 | FILES_${PN} = "" | 618 | FILES:${PN} = "" |
| 619 | FILES_${KERNEL_PACKAGE_NAME}-base = "${nonarch_base_libdir}/modules/${KERNEL_VERSION}/modules.order ${nonarch_base_libdir}/modules/${KERNEL_VERSION}/modules.builtin ${nonarch_base_libdir}/modules/${KERNEL_VERSION}/modules.builtin.modinfo" | 619 | FILES:${KERNEL_PACKAGE_NAME}-base = "${nonarch_base_libdir}/modules/${KERNEL_VERSION}/modules.order ${nonarch_base_libdir}/modules/${KERNEL_VERSION}/modules.builtin ${nonarch_base_libdir}/modules/${KERNEL_VERSION}/modules.builtin.modinfo" |
| 620 | FILES_${KERNEL_PACKAGE_NAME}-image = "" | 620 | FILES:${KERNEL_PACKAGE_NAME}-image = "" |
| 621 | FILES_${KERNEL_PACKAGE_NAME}-dev = "/boot/System.map* /boot/Module.symvers* /boot/config* ${KERNEL_SRC_PATH} ${nonarch_base_libdir}/modules/${KERNEL_VERSION}/build" | 621 | FILES:${KERNEL_PACKAGE_NAME}-dev = "/boot/System.map* /boot/Module.symvers* /boot/config* ${KERNEL_SRC_PATH} ${nonarch_base_libdir}/modules/${KERNEL_VERSION}/build" |
| 622 | FILES_${KERNEL_PACKAGE_NAME}-vmlinux = "/boot/vmlinux-${KERNEL_VERSION_NAME}" | 622 | FILES:${KERNEL_PACKAGE_NAME}-vmlinux = "/boot/vmlinux-${KERNEL_VERSION_NAME}" |
| 623 | FILES_${KERNEL_PACKAGE_NAME}-modules = "" | 623 | FILES:${KERNEL_PACKAGE_NAME}-modules = "" |
| 624 | RDEPENDS_${KERNEL_PACKAGE_NAME} = "${KERNEL_PACKAGE_NAME}-base" | 624 | RDEPENDS:${KERNEL_PACKAGE_NAME} = "${KERNEL_PACKAGE_NAME}-base" |
| 625 | # Allow machines to override this dependency if kernel image files are | 625 | # Allow machines to override this dependency if kernel image files are |
| 626 | # not wanted in images as standard | 626 | # not wanted in images as standard |
| 627 | RDEPENDS_${KERNEL_PACKAGE_NAME}-base ?= "${KERNEL_PACKAGE_NAME}-image" | 627 | RDEPENDS:${KERNEL_PACKAGE_NAME}-base ?= "${KERNEL_PACKAGE_NAME}-image" |
| 628 | PKG_${KERNEL_PACKAGE_NAME}-image = "${KERNEL_PACKAGE_NAME}-image-${@legitimize_package_name(d.getVar('KERNEL_VERSION'))}" | 628 | PKG:${KERNEL_PACKAGE_NAME}-image = "${KERNEL_PACKAGE_NAME}-image-${@legitimize_package_name(d.getVar('KERNEL_VERSION'))}" |
| 629 | RDEPENDS_${KERNEL_PACKAGE_NAME}-image += "${@oe.utils.conditional('KERNEL_IMAGETYPE', 'vmlinux', '${KERNEL_PACKAGE_NAME}-vmlinux', '', d)}" | 629 | RDEPENDS:${KERNEL_PACKAGE_NAME}-image += "${@oe.utils.conditional('KERNEL_IMAGETYPE', 'vmlinux', '${KERNEL_PACKAGE_NAME}-vmlinux', '', d)}" |
| 630 | PKG_${KERNEL_PACKAGE_NAME}-base = "${KERNEL_PACKAGE_NAME}-${@legitimize_package_name(d.getVar('KERNEL_VERSION'))}" | 630 | PKG:${KERNEL_PACKAGE_NAME}-base = "${KERNEL_PACKAGE_NAME}-${@legitimize_package_name(d.getVar('KERNEL_VERSION'))}" |
| 631 | RPROVIDES_${KERNEL_PACKAGE_NAME}-base += "${KERNEL_PACKAGE_NAME}-${KERNEL_VERSION}" | 631 | RPROVIDES:${KERNEL_PACKAGE_NAME}-base += "${KERNEL_PACKAGE_NAME}-${KERNEL_VERSION}" |
| 632 | ALLOW_EMPTY_${KERNEL_PACKAGE_NAME} = "1" | 632 | ALLOW_EMPTY:${KERNEL_PACKAGE_NAME} = "1" |
| 633 | ALLOW_EMPTY_${KERNEL_PACKAGE_NAME}-base = "1" | 633 | ALLOW_EMPTY:${KERNEL_PACKAGE_NAME}-base = "1" |
| 634 | ALLOW_EMPTY_${KERNEL_PACKAGE_NAME}-image = "1" | 634 | ALLOW_EMPTY:${KERNEL_PACKAGE_NAME}-image = "1" |
| 635 | ALLOW_EMPTY_${KERNEL_PACKAGE_NAME}-modules = "1" | 635 | ALLOW_EMPTY:${KERNEL_PACKAGE_NAME}-modules = "1" |
| 636 | DESCRIPTION_${KERNEL_PACKAGE_NAME}-modules = "Kernel modules meta package" | 636 | DESCRIPTION:${KERNEL_PACKAGE_NAME}-modules = "Kernel modules meta package" |
| 637 | 637 | ||
| 638 | pkg_postinst_${KERNEL_PACKAGE_NAME}-base () { | 638 | pkg_postinst:${KERNEL_PACKAGE_NAME}-base () { |
| 639 | if [ ! -e "$D/lib/modules/${KERNEL_VERSION}" ]; then | 639 | if [ ! -e "$D/lib/modules/${KERNEL_VERSION}" ]; then |
| 640 | mkdir -p $D/lib/modules/${KERNEL_VERSION} | 640 | mkdir -p $D/lib/modules/${KERNEL_VERSION} |
| 641 | fi | 641 | fi |
| @@ -646,7 +646,7 @@ pkg_postinst_${KERNEL_PACKAGE_NAME}-base () { | |||
| 646 | fi | 646 | fi |
| 647 | } | 647 | } |
| 648 | 648 | ||
| 649 | PACKAGESPLITFUNCS_prepend = "split_kernel_packages " | 649 | PACKAGESPLITFUNCS:prepend = "split_kernel_packages " |
| 650 | 650 | ||
| 651 | python split_kernel_packages () { | 651 | python split_kernel_packages () { |
| 652 | do_split_packages(d, root='${nonarch_base_libdir}/firmware', file_regex=r'^(.*)\.(bin|fw|cis|csp|dsp)$', output_pattern='${KERNEL_PACKAGE_NAME}-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') | 652 | do_split_packages(d, root='${nonarch_base_libdir}/firmware', file_regex=r'^(.*)\.(bin|fw|cis|csp|dsp)$', output_pattern='${KERNEL_PACKAGE_NAME}-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') |
diff --git a/meta/classes/libc-package.bbclass b/meta/classes/libc-package.bbclass index de3b4250c7..7a661d44bd 100644 --- a/meta/classes/libc-package.bbclass +++ b/meta/classes/libc-package.bbclass | |||
| @@ -42,7 +42,7 @@ python __anonymous () { | |||
| 42 | # try to fix disable charsets/locales/locale-code compile fail | 42 | # try to fix disable charsets/locales/locale-code compile fail |
| 43 | PACKAGE_NO_GCONV ?= "0" | 43 | PACKAGE_NO_GCONV ?= "0" |
| 44 | 44 | ||
| 45 | OVERRIDES_append = ":${TARGET_ARCH}-${TARGET_OS}" | 45 | OVERRIDES:append = ":${TARGET_ARCH}-${TARGET_OS}" |
| 46 | 46 | ||
| 47 | locale_base_postinst_ontarget() { | 47 | locale_base_postinst_ontarget() { |
| 48 | localedef --inputfile=${datadir}/i18n/locales/%s --charmap=%s %s | 48 | localedef --inputfile=${datadir}/i18n/locales/%s --charmap=%s %s |
| @@ -129,9 +129,9 @@ python package_do_split_gconvs () { | |||
| 129 | deps.append(dp) | 129 | deps.append(dp) |
| 130 | f.close() | 130 | f.close() |
| 131 | if deps != []: | 131 | if deps != []: |
| 132 | d.setVar('RDEPENDS_%s' % pkg, " ".join(deps)) | 132 | d.setVar('RDEPENDS:%s' % pkg, " ".join(deps)) |
| 133 | if bpn != 'glibc': | 133 | if bpn != 'glibc': |
| 134 | d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc')) | 134 | d.setVar('RPROVIDES:%s' % pkg, pkg.replace(bpn, 'glibc')) |
| 135 | 135 | ||
| 136 | do_split_packages(d, gconv_libdir, file_regex=r'^(.*)\.so$', output_pattern=bpn+'-gconv-%s', \ | 136 | do_split_packages(d, gconv_libdir, file_regex=r'^(.*)\.so$', output_pattern=bpn+'-gconv-%s', \ |
| 137 | description='gconv module for character set %s', hook=calc_gconv_deps, \ | 137 | description='gconv module for character set %s', hook=calc_gconv_deps, \ |
| @@ -151,9 +151,9 @@ python package_do_split_gconvs () { | |||
| 151 | deps.append(dp) | 151 | deps.append(dp) |
| 152 | f.close() | 152 | f.close() |
| 153 | if deps != []: | 153 | if deps != []: |
| 154 | d.setVar('RDEPENDS_%s' % pkg, " ".join(deps)) | 154 | d.setVar('RDEPENDS:%s' % pkg, " ".join(deps)) |
| 155 | if bpn != 'glibc': | 155 | if bpn != 'glibc': |
| 156 | d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc')) | 156 | d.setVar('RPROVIDES:%s' % pkg, pkg.replace(bpn, 'glibc')) |
| 157 | 157 | ||
| 158 | do_split_packages(d, charmap_dir, file_regex=r'^(.*)\.gz$', output_pattern=bpn+'-charmap-%s', \ | 158 | do_split_packages(d, charmap_dir, file_regex=r'^(.*)\.gz$', output_pattern=bpn+'-charmap-%s', \ |
| 159 | description='character map for %s encoding', hook=calc_charmap_deps, extra_depends='') | 159 | description='character map for %s encoding', hook=calc_charmap_deps, extra_depends='') |
| @@ -172,9 +172,9 @@ python package_do_split_gconvs () { | |||
| 172 | deps.append(dp) | 172 | deps.append(dp) |
| 173 | f.close() | 173 | f.close() |
| 174 | if deps != []: | 174 | if deps != []: |
| 175 | d.setVar('RDEPENDS_%s' % pkg, " ".join(deps)) | 175 | d.setVar('RDEPENDS:%s' % pkg, " ".join(deps)) |
| 176 | if bpn != 'glibc': | 176 | if bpn != 'glibc': |
| 177 | d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc')) | 177 | d.setVar('RPROVIDES:%s' % pkg, pkg.replace(bpn, 'glibc')) |
| 178 | 178 | ||
| 179 | do_split_packages(d, locales_dir, file_regex=r'(.*)', output_pattern=bpn+'-localedata-%s', \ | 179 | do_split_packages(d, locales_dir, file_regex=r'(.*)', output_pattern=bpn+'-localedata-%s', \ |
| 180 | description='locale definition for %s', hook=calc_locale_deps, extra_depends='') | 180 | description='locale definition for %s', hook=calc_locale_deps, extra_depends='') |
| @@ -210,11 +210,11 @@ python package_do_split_gconvs () { | |||
| 210 | supported[locale] = charset | 210 | supported[locale] = charset |
| 211 | 211 | ||
| 212 | def output_locale_source(name, pkgname, locale, encoding): | 212 | def output_locale_source(name, pkgname, locale, encoding): |
| 213 | d.setVar('RDEPENDS_%s' % pkgname, '%slocaledef %s-localedata-%s %s-charmap-%s' % \ | 213 | d.setVar('RDEPENDS:%s' % pkgname, '%slocaledef %s-localedata-%s %s-charmap-%s' % \ |
| 214 | (mlprefix, mlprefix+bpn, legitimize_package_name(locale), mlprefix+bpn, legitimize_package_name(encoding))) | 214 | (mlprefix, mlprefix+bpn, legitimize_package_name(locale), mlprefix+bpn, legitimize_package_name(encoding))) |
| 215 | d.setVar('pkg_postinst_ontarget_%s' % pkgname, d.getVar('locale_base_postinst_ontarget') \ | 215 | d.setVar('pkg_postinst_ontarget:%s' % pkgname, d.getVar('locale_base_postinst_ontarget') \ |
| 216 | % (locale, encoding, locale)) | 216 | % (locale, encoding, locale)) |
| 217 | d.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm') % \ | 217 | d.setVar('pkg_postrm:%s' % pkgname, d.getVar('locale_base_postrm') % \ |
| 218 | (locale, encoding, locale)) | 218 | (locale, encoding, locale)) |
| 219 | 219 | ||
| 220 | def output_locale_binary_rdepends(name, pkgname, locale, encoding): | 220 | def output_locale_binary_rdepends(name, pkgname, locale, encoding): |
| @@ -222,8 +222,8 @@ python package_do_split_gconvs () { | |||
| 222 | lcsplit = d.getVar('GLIBC_SPLIT_LC_PACKAGES') | 222 | lcsplit = d.getVar('GLIBC_SPLIT_LC_PACKAGES') |
| 223 | if lcsplit and int(lcsplit): | 223 | if lcsplit and int(lcsplit): |
| 224 | d.appendVar('PACKAGES', ' ' + dep) | 224 | d.appendVar('PACKAGES', ' ' + dep) |
| 225 | d.setVar('ALLOW_EMPTY_%s' % dep, '1') | 225 | d.setVar('ALLOW_EMPTY:%s' % dep, '1') |
| 226 | d.setVar('RDEPENDS_%s' % pkgname, mlprefix + dep) | 226 | d.setVar('RDEPENDS:%s' % pkgname, mlprefix + dep) |
| 227 | 227 | ||
| 228 | commands = {} | 228 | commands = {} |
| 229 | 229 | ||
| @@ -293,13 +293,13 @@ python package_do_split_gconvs () { | |||
| 293 | 293 | ||
| 294 | def output_locale(name, locale, encoding): | 294 | def output_locale(name, locale, encoding): |
| 295 | pkgname = d.getVar('MLPREFIX', False) + 'locale-base-' + legitimize_package_name(name) | 295 | pkgname = d.getVar('MLPREFIX', False) + 'locale-base-' + legitimize_package_name(name) |
| 296 | d.setVar('ALLOW_EMPTY_%s' % pkgname, '1') | 296 | d.setVar('ALLOW_EMPTY:%s' % pkgname, '1') |
| 297 | d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES'))) | 297 | d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES'))) |
| 298 | rprovides = ' %svirtual-locale-%s' % (mlprefix, legitimize_package_name(name)) | 298 | rprovides = ' %svirtual-locale-%s' % (mlprefix, legitimize_package_name(name)) |
| 299 | m = re.match(r"(.*)_(.*)", name) | 299 | m = re.match(r"(.*)_(.*)", name) |
| 300 | if m: | 300 | if m: |
| 301 | rprovides += ' %svirtual-locale-%s' % (mlprefix, m.group(1)) | 301 | rprovides += ' %svirtual-locale-%s' % (mlprefix, m.group(1)) |
| 302 | d.setVar('RPROVIDES_%s' % pkgname, rprovides) | 302 | d.setVar('RPROVIDES:%s' % pkgname, rprovides) |
| 303 | 303 | ||
| 304 | if use_bin == "compile": | 304 | if use_bin == "compile": |
| 305 | output_locale_binary_rdepends(name, pkgname, locale, encoding) | 305 | output_locale_binary_rdepends(name, pkgname, locale, encoding) |
| @@ -343,7 +343,7 @@ python package_do_split_gconvs () { | |||
| 343 | def metapkg_hook(file, pkg, pattern, format, basename): | 343 | def metapkg_hook(file, pkg, pattern, format, basename): |
| 344 | name = basename.split('/', 1)[0] | 344 | name = basename.split('/', 1)[0] |
| 345 | metapkg = legitimize_package_name('%s-binary-localedata-%s' % (mlprefix+bpn, name)) | 345 | metapkg = legitimize_package_name('%s-binary-localedata-%s' % (mlprefix+bpn, name)) |
| 346 | d.appendVar('RDEPENDS_%s' % metapkg, ' ' + pkg) | 346 | d.appendVar('RDEPENDS:%s' % metapkg, ' ' + pkg) |
| 347 | 347 | ||
| 348 | if use_bin == "compile": | 348 | if use_bin == "compile": |
| 349 | makefile = oe.path.join(d.getVar("WORKDIR"), "locale-tree", "Makefile") | 349 | makefile = oe.path.join(d.getVar("WORKDIR"), "locale-tree", "Makefile") |
| @@ -379,6 +379,6 @@ python package_do_split_gconvs () { | |||
| 379 | 379 | ||
| 380 | # We want to do this indirection so that we can safely 'return' | 380 | # We want to do this indirection so that we can safely 'return' |
| 381 | # from the called function even though we're prepending | 381 | # from the called function even though we're prepending |
| 382 | python populate_packages_prepend () { | 382 | python populate_packages:prepend () { |
| 383 | bb.build.exec_func('package_do_split_gconvs', d) | 383 | bb.build.exec_func('package_do_split_gconvs', d) |
| 384 | } | 384 | } |
diff --git a/meta/classes/license.bbclass b/meta/classes/license.bbclass index c87473cbb8..fecbc49497 100644 --- a/meta/classes/license.bbclass +++ b/meta/classes/license.bbclass | |||
| @@ -6,7 +6,7 @@ | |||
| 6 | LICENSE_DIRECTORY ??= "${DEPLOY_DIR}/licenses" | 6 | LICENSE_DIRECTORY ??= "${DEPLOY_DIR}/licenses" |
| 7 | LICSSTATEDIR = "${WORKDIR}/license-destdir/" | 7 | LICSSTATEDIR = "${WORKDIR}/license-destdir/" |
| 8 | 8 | ||
| 9 | # Create extra package with license texts and add it to RRECOMMENDS_${PN} | 9 | # Create extra package with license texts and add it to RRECOMMENDS:${PN} |
| 10 | LICENSE_CREATE_PACKAGE[type] = "boolean" | 10 | LICENSE_CREATE_PACKAGE[type] = "boolean" |
| 11 | LICENSE_CREATE_PACKAGE ??= "0" | 11 | LICENSE_CREATE_PACKAGE ??= "0" |
| 12 | LICENSE_PACKAGE_SUFFIX ??= "-lic" | 12 | LICENSE_PACKAGE_SUFFIX ??= "-lic" |
| @@ -32,8 +32,8 @@ python do_populate_lic() { | |||
| 32 | } | 32 | } |
| 33 | 33 | ||
| 34 | PSEUDO_IGNORE_PATHS .= ",${@','.join(((d.getVar('COMMON_LICENSE_DIR') or '') + ' ' + (d.getVar('LICENSE_PATH') or '')).split())}" | 34 | PSEUDO_IGNORE_PATHS .= ",${@','.join(((d.getVar('COMMON_LICENSE_DIR') or '') + ' ' + (d.getVar('LICENSE_PATH') or '')).split())}" |
| 35 | # it would be better to copy them in do_install_append, but find_license_filesa is python | 35 | # it would be better to copy them in do_install:append, but find_license_filesa is python |
| 36 | python perform_packagecopy_prepend () { | 36 | python perform_packagecopy:prepend () { |
| 37 | enabled = oe.data.typed_value('LICENSE_CREATE_PACKAGE', d) | 37 | enabled = oe.data.typed_value('LICENSE_CREATE_PACKAGE', d) |
| 38 | if d.getVar('CLASSOVERRIDE') == 'class-target' and enabled: | 38 | if d.getVar('CLASSOVERRIDE') == 'class-target' and enabled: |
| 39 | lic_files_paths = find_license_files(d) | 39 | lic_files_paths = find_license_files(d) |
| @@ -62,7 +62,7 @@ def add_package_and_files(d): | |||
| 62 | else: | 62 | else: |
| 63 | # first in PACKAGES to be sure that nothing else gets LICENSE_FILES_DIRECTORY | 63 | # first in PACKAGES to be sure that nothing else gets LICENSE_FILES_DIRECTORY |
| 64 | d.setVar('PACKAGES', "%s %s" % (pn_lic, packages)) | 64 | d.setVar('PACKAGES', "%s %s" % (pn_lic, packages)) |
| 65 | d.setVar('FILES_' + pn_lic, files) | 65 | d.setVar('FILES:' + pn_lic, files) |
| 66 | 66 | ||
| 67 | def copy_license_files(lic_files_paths, destdir): | 67 | def copy_license_files(lic_files_paths, destdir): |
| 68 | import shutil | 68 | import shutil |
| @@ -324,7 +324,7 @@ def incompatible_license(d, dont_want_licenses, package=None): | |||
| 324 | as canonical (SPDX) names. | 324 | as canonical (SPDX) names. |
| 325 | """ | 325 | """ |
| 326 | import oe.license | 326 | import oe.license |
| 327 | license = d.getVar("LICENSE_%s" % package) if package else None | 327 | license = d.getVar("LICENSE:%s" % package) if package else None |
| 328 | if not license: | 328 | if not license: |
| 329 | license = d.getVar('LICENSE') | 329 | license = d.getVar('LICENSE') |
| 330 | 330 | ||
| @@ -419,7 +419,7 @@ SSTATETASKS += "do_populate_lic" | |||
| 419 | do_populate_lic[sstate-inputdirs] = "${LICSSTATEDIR}" | 419 | do_populate_lic[sstate-inputdirs] = "${LICSSTATEDIR}" |
| 420 | do_populate_lic[sstate-outputdirs] = "${LICENSE_DIRECTORY}/" | 420 | do_populate_lic[sstate-outputdirs] = "${LICENSE_DIRECTORY}/" |
| 421 | 421 | ||
| 422 | IMAGE_CLASSES_append = " license_image" | 422 | IMAGE_CLASSES:append = " license_image" |
| 423 | 423 | ||
| 424 | python do_populate_lic_setscene () { | 424 | python do_populate_lic_setscene () { |
| 425 | sstate_setscene(d) | 425 | sstate_setscene(d) |
diff --git a/meta/classes/license_image.bbclass b/meta/classes/license_image.bbclass index 5dbec288a4..5cec7a07fa 100644 --- a/meta/classes/license_image.bbclass +++ b/meta/classes/license_image.bbclass | |||
| @@ -39,7 +39,7 @@ python license_create_manifest() { | |||
| 39 | 39 | ||
| 40 | pkg_dic[pkg_name] = oe.packagedata.read_pkgdatafile(pkg_info) | 40 | pkg_dic[pkg_name] = oe.packagedata.read_pkgdatafile(pkg_info) |
| 41 | if not "LICENSE" in pkg_dic[pkg_name].keys(): | 41 | if not "LICENSE" in pkg_dic[pkg_name].keys(): |
| 42 | pkg_lic_name = "LICENSE_" + pkg_name | 42 | pkg_lic_name = "LICENSE:" + pkg_name |
| 43 | pkg_dic[pkg_name]["LICENSE"] = pkg_dic[pkg_name][pkg_lic_name] | 43 | pkg_dic[pkg_name]["LICENSE"] = pkg_dic[pkg_name][pkg_lic_name] |
| 44 | 44 | ||
| 45 | rootfs_license_manifest = os.path.join(d.getVar('LICENSE_DIRECTORY'), | 45 | rootfs_license_manifest = os.path.join(d.getVar('LICENSE_DIRECTORY'), |
| @@ -269,7 +269,7 @@ def get_deployed_files(man_file): | |||
| 269 | dep_files.append(os.path.basename(f)) | 269 | dep_files.append(os.path.basename(f)) |
| 270 | return dep_files | 270 | return dep_files |
| 271 | 271 | ||
| 272 | ROOTFS_POSTPROCESS_COMMAND_prepend = "write_package_manifest; license_create_manifest; " | 272 | ROOTFS_POSTPROCESS_COMMAND:prepend = "write_package_manifest; license_create_manifest; " |
| 273 | do_rootfs[recrdeptask] += "do_populate_lic" | 273 | do_rootfs[recrdeptask] += "do_populate_lic" |
| 274 | 274 | ||
| 275 | python do_populate_lic_deploy() { | 275 | python do_populate_lic_deploy() { |
diff --git a/meta/classes/linux-dummy.bbclass b/meta/classes/linux-dummy.bbclass index cd8791557d..9a06a509dd 100644 --- a/meta/classes/linux-dummy.bbclass +++ b/meta/classes/linux-dummy.bbclass | |||
| @@ -6,7 +6,7 @@ python __anonymous () { | |||
| 6 | 6 | ||
| 7 | # set an empty package of kernel-devicetree | 7 | # set an empty package of kernel-devicetree |
| 8 | d.appendVar('PACKAGES', ' %s-devicetree' % kname) | 8 | d.appendVar('PACKAGES', ' %s-devicetree' % kname) |
| 9 | d.setVar('ALLOW_EMPTY_%s-devicetree' % kname, '1') | 9 | d.setVar('ALLOW_EMPTY:%s-devicetree' % kname, '1') |
| 10 | 10 | ||
| 11 | # Merge KERNEL_IMAGETYPE and KERNEL_ALT_IMAGETYPE into KERNEL_IMAGETYPES | 11 | # Merge KERNEL_IMAGETYPE and KERNEL_ALT_IMAGETYPE into KERNEL_IMAGETYPES |
| 12 | type = d.getVar('KERNEL_IMAGETYPE') or "" | 12 | type = d.getVar('KERNEL_IMAGETYPE') or "" |
| @@ -21,6 +21,6 @@ python __anonymous () { | |||
| 21 | for type in types.split(): | 21 | for type in types.split(): |
| 22 | typelower = type.lower() | 22 | typelower = type.lower() |
| 23 | d.appendVar('PACKAGES', ' %s-image-%s' % (kname, typelower)) | 23 | d.appendVar('PACKAGES', ' %s-image-%s' % (kname, typelower)) |
| 24 | d.setVar('ALLOW_EMPTY_%s-image-%s' % (kname, typelower), '1') | 24 | d.setVar('ALLOW_EMPTY:%s-image-%s' % (kname, typelower), '1') |
| 25 | } | 25 | } |
| 26 | 26 | ||
diff --git a/meta/classes/manpages.bbclass b/meta/classes/manpages.bbclass index 1e66780646..3a96659460 100644 --- a/meta/classes/manpages.bbclass +++ b/meta/classes/manpages.bbclass | |||
| @@ -2,7 +2,7 @@ | |||
| 2 | # depending on whether 'api-documentation' is in DISTRO_FEATURES. Such building | 2 | # depending on whether 'api-documentation' is in DISTRO_FEATURES. Such building |
| 3 | # tends to pull in the entire XML stack and other tools, so it's not enabled | 3 | # tends to pull in the entire XML stack and other tools, so it's not enabled |
| 4 | # by default. | 4 | # by default. |
| 5 | PACKAGECONFIG_append_class-target = " ${@bb.utils.contains('DISTRO_FEATURES', 'api-documentation', 'manpages', '', d)}" | 5 | PACKAGECONFIG:append:class-target = " ${@bb.utils.contains('DISTRO_FEATURES', 'api-documentation', 'manpages', '', d)}" |
| 6 | 6 | ||
| 7 | inherit qemu | 7 | inherit qemu |
| 8 | 8 | ||
| @@ -10,9 +10,9 @@ inherit qemu | |||
| 10 | MAN_PKG ?= "${PN}-doc" | 10 | MAN_PKG ?= "${PN}-doc" |
| 11 | 11 | ||
| 12 | # only add man-db to RDEPENDS when manual files are built and installed | 12 | # only add man-db to RDEPENDS when manual files are built and installed |
| 13 | RDEPENDS_${MAN_PKG} += "${@bb.utils.contains('PACKAGECONFIG', 'manpages', 'man-db', '', d)}" | 13 | RDEPENDS:${MAN_PKG} += "${@bb.utils.contains('PACKAGECONFIG', 'manpages', 'man-db', '', d)}" |
| 14 | 14 | ||
| 15 | pkg_postinst_append_${MAN_PKG} () { | 15 | pkg_postinst:append_${MAN_PKG} () { |
| 16 | # only update manual page index caches when manual files are built and installed | 16 | # only update manual page index caches when manual files are built and installed |
| 17 | if ${@bb.utils.contains('PACKAGECONFIG', 'manpages', 'true', 'false', d)}; then | 17 | if ${@bb.utils.contains('PACKAGECONFIG', 'manpages', 'true', 'false', d)}; then |
| 18 | if test -n "$D"; then | 18 | if test -n "$D"; then |
| @@ -36,7 +36,7 @@ pkg_postinst_append_${MAN_PKG} () { | |||
| 36 | fi | 36 | fi |
| 37 | } | 37 | } |
| 38 | 38 | ||
| 39 | pkg_postrm_append_${MAN_PKG} () { | 39 | pkg_postrm:append_${MAN_PKG} () { |
| 40 | # only update manual page index caches when manual files are built and installed | 40 | # only update manual page index caches when manual files are built and installed |
| 41 | if ${@bb.utils.contains('PACKAGECONFIG', 'manpages', 'true', 'false', d)}; then | 41 | if ${@bb.utils.contains('PACKAGECONFIG', 'manpages', 'true', 'false', d)}; then |
| 42 | mandb -q | 42 | mandb -q |
diff --git a/meta/classes/meson.bbclass b/meta/classes/meson.bbclass index 8ae0285f72..2beed89d11 100644 --- a/meta/classes/meson.bbclass +++ b/meta/classes/meson.bbclass | |||
| @@ -1,6 +1,6 @@ | |||
| 1 | inherit python3native meson-routines | 1 | inherit python3native meson-routines |
| 2 | 2 | ||
| 3 | DEPENDS_append = " meson-native ninja-native" | 3 | DEPENDS:append = " meson-native ninja-native" |
| 4 | 4 | ||
| 5 | # As Meson enforces out-of-tree builds we can just use cleandirs | 5 | # As Meson enforces out-of-tree builds we can just use cleandirs |
| 6 | B = "${WORKDIR}/build" | 6 | B = "${WORKDIR}/build" |
| @@ -29,11 +29,11 @@ MESONOPTS = " --prefix ${prefix} \ | |||
| 29 | --wrap-mode nodownload \ | 29 | --wrap-mode nodownload \ |
| 30 | --native-file ${WORKDIR}/meson.native" | 30 | --native-file ${WORKDIR}/meson.native" |
| 31 | 31 | ||
| 32 | EXTRA_OEMESON_append = " ${PACKAGECONFIG_CONFARGS}" | 32 | EXTRA_OEMESON:append = " ${PACKAGECONFIG_CONFARGS}" |
| 33 | 33 | ||
| 34 | MESON_CROSS_FILE = "" | 34 | MESON_CROSS_FILE = "" |
| 35 | MESON_CROSS_FILE_class-target = "--cross-file ${WORKDIR}/meson.cross" | 35 | MESON_CROSS_FILE:class-target = "--cross-file ${WORKDIR}/meson.cross" |
| 36 | MESON_CROSS_FILE_class-nativesdk = "--cross-file ${WORKDIR}/meson.cross" | 36 | MESON_CROSS_FILE:class-nativesdk = "--cross-file ${WORKDIR}/meson.cross" |
| 37 | 37 | ||
| 38 | addtask write_config before do_configure | 38 | addtask write_config before do_configure |
| 39 | do_write_config[vardeps] += "CC CXX LD AR NM STRIP READELF CFLAGS CXXFLAGS LDFLAGS" | 39 | do_write_config[vardeps] += "CC CXX LD AR NM STRIP READELF CFLAGS CXXFLAGS LDFLAGS" |
diff --git a/meta/classes/mime-xdg.bbclass b/meta/classes/mime-xdg.bbclass index 642a5b7595..271f48dd72 100644 --- a/meta/classes/mime-xdg.bbclass +++ b/meta/classes/mime-xdg.bbclass | |||
| @@ -34,7 +34,7 @@ else | |||
| 34 | fi | 34 | fi |
| 35 | } | 35 | } |
| 36 | 36 | ||
| 37 | python populate_packages_append () { | 37 | python populate_packages:append () { |
| 38 | packages = d.getVar('PACKAGES').split() | 38 | packages = d.getVar('PACKAGES').split() |
| 39 | pkgdest = d.getVar('PKGDEST') | 39 | pkgdest = d.getVar('PKGDEST') |
| 40 | desktop_base = d.getVar('DESKTOPDIR') | 40 | desktop_base = d.getVar('DESKTOPDIR') |
| @@ -59,16 +59,16 @@ python populate_packages_append () { | |||
| 59 | break | 59 | break |
| 60 | if desktops_with_mime_found: | 60 | if desktops_with_mime_found: |
| 61 | bb.note("adding mime-xdg postinst and postrm scripts to %s" % pkg) | 61 | bb.note("adding mime-xdg postinst and postrm scripts to %s" % pkg) |
| 62 | postinst = d.getVar('pkg_postinst_%s' % pkg) | 62 | postinst = d.getVar('pkg_postinst:%s' % pkg) |
| 63 | if not postinst: | 63 | if not postinst: |
| 64 | postinst = '#!/bin/sh\n' | 64 | postinst = '#!/bin/sh\n' |
| 65 | postinst += d.getVar('mime_xdg_postinst') | 65 | postinst += d.getVar('mime_xdg_postinst') |
| 66 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 66 | d.setVar('pkg_postinst:%s' % pkg, postinst) |
| 67 | postrm = d.getVar('pkg_postrm_%s' % pkg) | 67 | postrm = d.getVar('pkg_postrm:%s' % pkg) |
| 68 | if not postrm: | 68 | if not postrm: |
| 69 | postrm = '#!/bin/sh\n' | 69 | postrm = '#!/bin/sh\n' |
| 70 | postrm += d.getVar('mime_xdg_postrm') | 70 | postrm += d.getVar('mime_xdg_postrm') |
| 71 | d.setVar('pkg_postrm_%s' % pkg, postrm) | 71 | d.setVar('pkg_postrm:%s' % pkg, postrm) |
| 72 | bb.note("adding desktop-file-utils dependency to %s" % pkg) | 72 | bb.note("adding desktop-file-utils dependency to %s" % pkg) |
| 73 | d.appendVar('RDEPENDS_' + pkg, " " + d.getVar('MLPREFIX')+"desktop-file-utils") | 73 | d.appendVar('RDEPENDS:' + pkg, " " + d.getVar('MLPREFIX')+"desktop-file-utils") |
| 74 | } | 74 | } |
diff --git a/meta/classes/mime.bbclass b/meta/classes/mime.bbclass index bb99bc35cb..8d176a884e 100644 --- a/meta/classes/mime.bbclass +++ b/meta/classes/mime.bbclass | |||
| @@ -39,7 +39,7 @@ fi | |||
| 39 | fi | 39 | fi |
| 40 | } | 40 | } |
| 41 | 41 | ||
| 42 | python populate_packages_append () { | 42 | python populate_packages:append () { |
| 43 | packages = d.getVar('PACKAGES').split() | 43 | packages = d.getVar('PACKAGES').split() |
| 44 | pkgdest = d.getVar('PKGDEST') | 44 | pkgdest = d.getVar('PKGDEST') |
| 45 | mimedir = d.getVar('MIMEDIR') | 45 | mimedir = d.getVar('MIMEDIR') |
| @@ -54,17 +54,17 @@ python populate_packages_append () { | |||
| 54 | break | 54 | break |
| 55 | if mimes_types_found: | 55 | if mimes_types_found: |
| 56 | bb.note("adding mime postinst and postrm scripts to %s" % pkg) | 56 | bb.note("adding mime postinst and postrm scripts to %s" % pkg) |
| 57 | postinst = d.getVar('pkg_postinst_%s' % pkg) | 57 | postinst = d.getVar('pkg_postinst:%s' % pkg) |
| 58 | if not postinst: | 58 | if not postinst: |
| 59 | postinst = '#!/bin/sh\n' | 59 | postinst = '#!/bin/sh\n' |
| 60 | postinst += d.getVar('mime_postinst') | 60 | postinst += d.getVar('mime_postinst') |
| 61 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 61 | d.setVar('pkg_postinst:%s' % pkg, postinst) |
| 62 | postrm = d.getVar('pkg_postrm_%s' % pkg) | 62 | postrm = d.getVar('pkg_postrm:%s' % pkg) |
| 63 | if not postrm: | 63 | if not postrm: |
| 64 | postrm = '#!/bin/sh\n' | 64 | postrm = '#!/bin/sh\n' |
| 65 | postrm += d.getVar('mime_postrm') | 65 | postrm += d.getVar('mime_postrm') |
| 66 | d.setVar('pkg_postrm_%s' % pkg, postrm) | 66 | d.setVar('pkg_postrm:%s' % pkg, postrm) |
| 67 | if pkg != 'shared-mime-info-data': | 67 | if pkg != 'shared-mime-info-data': |
| 68 | bb.note("adding shared-mime-info-data dependency to %s" % pkg) | 68 | bb.note("adding shared-mime-info-data dependency to %s" % pkg) |
| 69 | d.appendVar('RDEPENDS_' + pkg, " " + d.getVar('MLPREFIX')+"shared-mime-info-data") | 69 | d.appendVar('RDEPENDS:' + pkg, " " + d.getVar('MLPREFIX')+"shared-mime-info-data") |
| 70 | } | 70 | } |
diff --git a/meta/classes/module.bbclass b/meta/classes/module.bbclass index c0dfa35061..a09ec3ed1e 100644 --- a/meta/classes/module.bbclass +++ b/meta/classes/module.bbclass | |||
| @@ -14,7 +14,7 @@ python __anonymous () { | |||
| 14 | d.setVar('KBUILD_EXTRA_SYMBOLS', " ".join(extra_symbols)) | 14 | d.setVar('KBUILD_EXTRA_SYMBOLS', " ".join(extra_symbols)) |
| 15 | } | 15 | } |
| 16 | 16 | ||
| 17 | python do_devshell_prepend () { | 17 | python do_devshell:prepend () { |
| 18 | os.environ['CFLAGS'] = '' | 18 | os.environ['CFLAGS'] = '' |
| 19 | os.environ['CPPFLAGS'] = '' | 19 | os.environ['CPPFLAGS'] = '' |
| 20 | os.environ['CXXFLAGS'] = '' | 20 | os.environ['CXXFLAGS'] = '' |
| @@ -70,5 +70,5 @@ EXPORT_FUNCTIONS do_compile do_install | |||
| 70 | 70 | ||
| 71 | # add all splitted modules to PN RDEPENDS, PN can be empty now | 71 | # add all splitted modules to PN RDEPENDS, PN can be empty now |
| 72 | KERNEL_MODULES_META_PACKAGE = "${PN}" | 72 | KERNEL_MODULES_META_PACKAGE = "${PN}" |
| 73 | FILES_${PN} = "" | 73 | FILES:${PN} = "" |
| 74 | ALLOW_EMPTY_${PN} = "1" | 74 | ALLOW_EMPTY:${PN} = "1" |
diff --git a/meta/classes/multilib.bbclass b/meta/classes/multilib.bbclass index 2ef75c0d16..c3be89767a 100644 --- a/meta/classes/multilib.bbclass +++ b/meta/classes/multilib.bbclass | |||
| @@ -176,7 +176,7 @@ def reset_alternative_priority(d): | |||
| 176 | bb.debug(1, '%s: Setting ALTERNATIVE_PRIORITY_%s to %s' % (pkg, pkg, reset_priority)) | 176 | bb.debug(1, '%s: Setting ALTERNATIVE_PRIORITY_%s to %s' % (pkg, pkg, reset_priority)) |
| 177 | d.setVar('ALTERNATIVE_PRIORITY_%s' % pkg, reset_priority) | 177 | d.setVar('ALTERNATIVE_PRIORITY_%s' % pkg, reset_priority) |
| 178 | 178 | ||
| 179 | for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split(): | 179 | for alt_name in (d.getVar('ALTERNATIVE:%s' % pkg) or "").split(): |
| 180 | # ALTERNATIVE_PRIORITY_pkg[tool] = priority | 180 | # ALTERNATIVE_PRIORITY_pkg[tool] = priority |
| 181 | alt_priority_pkg_name = d.getVarFlag('ALTERNATIVE_PRIORITY_%s' % pkg, alt_name) | 181 | alt_priority_pkg_name = d.getVarFlag('ALTERNATIVE_PRIORITY_%s' % pkg, alt_name) |
| 182 | # ALTERNATIVE_PRIORITY[tool] = priority | 182 | # ALTERNATIVE_PRIORITY[tool] = priority |
| @@ -191,7 +191,7 @@ def reset_alternative_priority(d): | |||
| 191 | bb.debug(1, '%s: Setting ALTERNATIVE_PRIORITY[%s] to %s' % (pkg, alt_name, reset_priority)) | 191 | bb.debug(1, '%s: Setting ALTERNATIVE_PRIORITY[%s] to %s' % (pkg, alt_name, reset_priority)) |
| 192 | d.setVarFlag('ALTERNATIVE_PRIORITY', alt_name, reset_priority) | 192 | d.setVarFlag('ALTERNATIVE_PRIORITY', alt_name, reset_priority) |
| 193 | 193 | ||
| 194 | PACKAGEFUNCS_append = " do_package_qa_multilib" | 194 | PACKAGEFUNCS:append = " do_package_qa_multilib" |
| 195 | 195 | ||
| 196 | python do_package_qa_multilib() { | 196 | python do_package_qa_multilib() { |
| 197 | 197 | ||
diff --git a/meta/classes/multilib_global.bbclass b/meta/classes/multilib_global.bbclass index 96257de5ca..dae015cdaf 100644 --- a/meta/classes/multilib_global.bbclass +++ b/meta/classes/multilib_global.bbclass | |||
| @@ -164,8 +164,8 @@ def preferred_ml_updates(d): | |||
| 164 | python multilib_virtclass_handler_vendor () { | 164 | python multilib_virtclass_handler_vendor () { |
| 165 | if isinstance(e, bb.event.ConfigParsed): | 165 | if isinstance(e, bb.event.ConfigParsed): |
| 166 | for v in e.data.getVar("MULTILIB_VARIANTS").split(): | 166 | for v in e.data.getVar("MULTILIB_VARIANTS").split(): |
| 167 | if e.data.getVar("TARGET_VENDOR_virtclass-multilib-" + v, False) is None: | 167 | if e.data.getVar("TARGET_VENDOR:virtclass-multilib-" + v, False) is None: |
| 168 | e.data.setVar("TARGET_VENDOR_virtclass-multilib-" + v, e.data.getVar("TARGET_VENDOR", False) + "ml" + v) | 168 | e.data.setVar("TARGET_VENDOR:virtclass-multilib-" + v, e.data.getVar("TARGET_VENDOR", False) + "ml" + v) |
| 169 | preferred_ml_updates(e.data) | 169 | preferred_ml_updates(e.data) |
| 170 | } | 170 | } |
| 171 | addhandler multilib_virtclass_handler_vendor | 171 | addhandler multilib_virtclass_handler_vendor |
| @@ -207,13 +207,13 @@ python multilib_virtclass_handler_global () { | |||
| 207 | if rprovs.strip(): | 207 | if rprovs.strip(): |
| 208 | e.data.setVar("RPROVIDES", rprovs) | 208 | e.data.setVar("RPROVIDES", rprovs) |
| 209 | 209 | ||
| 210 | # Process RPROVIDES_${PN}... | 210 | # Process RPROVIDES:${PN}... |
| 211 | for pkg in (e.data.getVar("PACKAGES") or "").split(): | 211 | for pkg in (e.data.getVar("PACKAGES") or "").split(): |
| 212 | origrprovs = rprovs = localdata.getVar("RPROVIDES_%s" % pkg) or "" | 212 | origrprovs = rprovs = localdata.getVar("RPROVIDES:%s" % pkg) or "" |
| 213 | for clsextend in clsextends: | 213 | for clsextend in clsextends: |
| 214 | rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES_%s" % pkg, setvar=False) | 214 | rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES:%s" % pkg, setvar=False) |
| 215 | rprovs = rprovs + " " + clsextend.extname + "-" + pkg | 215 | rprovs = rprovs + " " + clsextend.extname + "-" + pkg |
| 216 | e.data.setVar("RPROVIDES_%s" % pkg, rprovs) | 216 | e.data.setVar("RPROVIDES:%s" % pkg, rprovs) |
| 217 | } | 217 | } |
| 218 | 218 | ||
| 219 | addhandler multilib_virtclass_handler_global | 219 | addhandler multilib_virtclass_handler_global |
diff --git a/meta/classes/multilib_header.bbclass b/meta/classes/multilib_header.bbclass index e03f5b13b2..efbc24f59b 100644 --- a/meta/classes/multilib_header.bbclass +++ b/meta/classes/multilib_header.bbclass | |||
| @@ -42,11 +42,11 @@ oe_multilib_header() { | |||
| 42 | 42 | ||
| 43 | # Dependencies on arch variables like MIPSPKGSFX_ABI can be problematic. | 43 | # Dependencies on arch variables like MIPSPKGSFX_ABI can be problematic. |
| 44 | # We don't need multilib headers for native builds so brute force things. | 44 | # We don't need multilib headers for native builds so brute force things. |
| 45 | oe_multilib_header_class-native () { | 45 | oe_multilib_header:class-native () { |
| 46 | return | 46 | return |
| 47 | } | 47 | } |
| 48 | 48 | ||
| 49 | # Nor do we need multilib headers for nativesdk builds. | 49 | # Nor do we need multilib headers for nativesdk builds. |
| 50 | oe_multilib_header_class-nativesdk () { | 50 | oe_multilib_header:class-nativesdk () { |
| 51 | return | 51 | return |
| 52 | } | 52 | } |
diff --git a/meta/classes/multilib_script.bbclass b/meta/classes/multilib_script.bbclass index b11efc1ec5..41597341cd 100644 --- a/meta/classes/multilib_script.bbclass +++ b/meta/classes/multilib_script.bbclass | |||
| @@ -26,9 +26,9 @@ python () { | |||
| 26 | pkg, script = entry.split(":") | 26 | pkg, script = entry.split(":") |
| 27 | epkg = d.expand(pkg) | 27 | epkg = d.expand(pkg) |
| 28 | scriptname = os.path.basename(script) | 28 | scriptname = os.path.basename(script) |
| 29 | d.appendVar("ALTERNATIVE_" + epkg, " " + scriptname + " ") | 29 | d.appendVar("ALTERNATIVE:" + epkg, " " + scriptname + " ") |
| 30 | d.setVarFlag("ALTERNATIVE_LINK_NAME", scriptname, script) | 30 | d.setVarFlag("ALTERNATIVE_LINK_NAME", scriptname, script) |
| 31 | d.setVarFlag("ALTERNATIVE_TARGET", scriptname, script + "-${MULTILIB_SUFFIX}") | 31 | d.setVarFlag("ALTERNATIVE_TARGET", scriptname, script + "-${MULTILIB_SUFFIX}") |
| 32 | d.appendVar("multilibscript_rename", "\n mv ${PKGD}" + script + " ${PKGD}" + script + "-${MULTILIB_SUFFIX}") | 32 | d.appendVar("multilibscript_rename", "\n mv ${PKGD}" + script + " ${PKGD}" + script + "-${MULTILIB_SUFFIX}") |
| 33 | d.appendVar("FILES_" + epkg, " " + script + "-${MULTILIB_SUFFIX}") | 33 | d.appendVar("FILES:" + epkg, " " + script + "-${MULTILIB_SUFFIX}") |
| 34 | } | 34 | } |
diff --git a/meta/classes/native.bbclass b/meta/classes/native.bbclass index 561cc23f68..61becfb6d5 100644 --- a/meta/classes/native.bbclass +++ b/meta/classes/native.bbclass | |||
| @@ -9,7 +9,7 @@ PACKAGE_ARCH = "${BUILD_ARCH}" | |||
| 9 | 9 | ||
| 10 | # used by cmake class | 10 | # used by cmake class |
| 11 | OECMAKE_RPATH = "${libdir}" | 11 | OECMAKE_RPATH = "${libdir}" |
| 12 | OECMAKE_RPATH_class-native = "${libdir}" | 12 | OECMAKE_RPATH:class-native = "${libdir}" |
| 13 | 13 | ||
| 14 | TARGET_ARCH = "${BUILD_ARCH}" | 14 | TARGET_ARCH = "${BUILD_ARCH}" |
| 15 | TARGET_OS = "${BUILD_OS}" | 15 | TARGET_OS = "${BUILD_OS}" |
| @@ -106,7 +106,7 @@ CLASSOVERRIDE = "class-native" | |||
| 106 | MACHINEOVERRIDES = "" | 106 | MACHINEOVERRIDES = "" |
| 107 | MACHINE_FEATURES = "" | 107 | MACHINE_FEATURES = "" |
| 108 | 108 | ||
| 109 | PATH_prepend = "${COREBASE}/scripts/native-intercept:" | 109 | PATH:prepend = "${COREBASE}/scripts/native-intercept:" |
| 110 | 110 | ||
| 111 | # This class encodes staging paths into its scripts data so can only be | 111 | # This class encodes staging paths into its scripts data so can only be |
| 112 | # reused if we manipulate the paths. | 112 | # reused if we manipulate the paths. |
diff --git a/meta/classes/nativesdk.bbclass b/meta/classes/nativesdk.bbclass index 7f2692c51a..c66de8c787 100644 --- a/meta/classes/nativesdk.bbclass +++ b/meta/classes/nativesdk.bbclass | |||
| @@ -28,7 +28,7 @@ PACKAGE_ARCHS = "${SDK_PACKAGE_ARCHS}" | |||
| 28 | # We need chrpath >= 0.14 to ensure we can deal with 32 and 64 bit | 28 | # We need chrpath >= 0.14 to ensure we can deal with 32 and 64 bit |
| 29 | # binaries | 29 | # binaries |
| 30 | # | 30 | # |
| 31 | DEPENDS_append = " chrpath-replacement-native" | 31 | DEPENDS:append = " chrpath-replacement-native" |
| 32 | EXTRANATIVEPATH += "chrpath-native" | 32 | EXTRANATIVEPATH += "chrpath-native" |
| 33 | 33 | ||
| 34 | PKGDATA_DIR = "${TMPDIR}/pkgdata/${SDK_SYS}" | 34 | PKGDATA_DIR = "${TMPDIR}/pkgdata/${SDK_SYS}" |
diff --git a/meta/classes/npm.bbclass b/meta/classes/npm.bbclass index 8f8712a024..91a2321116 100644 --- a/meta/classes/npm.bbclass +++ b/meta/classes/npm.bbclass | |||
| @@ -19,8 +19,8 @@ | |||
| 19 | 19 | ||
| 20 | inherit python3native | 20 | inherit python3native |
| 21 | 21 | ||
| 22 | DEPENDS_prepend = "nodejs-native " | 22 | DEPENDS:prepend = "nodejs-native " |
| 23 | RDEPENDS_${PN}_append_class-target = " nodejs" | 23 | RDEPENDS:${PN}:append:class-target = " nodejs" |
| 24 | 24 | ||
| 25 | NPM_INSTALL_DEV ?= "0" | 25 | NPM_INSTALL_DEV ?= "0" |
| 26 | 26 | ||
| @@ -312,7 +312,7 @@ npm_do_install() { | |||
| 312 | ln -fs node_modules ${D}/${nonarch_libdir}/node | 312 | ln -fs node_modules ${D}/${nonarch_libdir}/node |
| 313 | } | 313 | } |
| 314 | 314 | ||
| 315 | FILES_${PN} += " \ | 315 | FILES:${PN} += " \ |
| 316 | ${bindir} \ | 316 | ${bindir} \ |
| 317 | ${nonarch_libdir} \ | 317 | ${nonarch_libdir} \ |
| 318 | " | 318 | " |
diff --git a/meta/classes/own-mirrors.bbclass b/meta/classes/own-mirrors.bbclass index d58a61fe0a..e244de592d 100644 --- a/meta/classes/own-mirrors.bbclass +++ b/meta/classes/own-mirrors.bbclass | |||
| @@ -1,4 +1,4 @@ | |||
| 1 | PREMIRRORS_prepend = " \ | 1 | PREMIRRORS:prepend = " \ |
| 2 | cvs://.*/.* ${SOURCE_MIRROR_URL} \n \ | 2 | cvs://.*/.* ${SOURCE_MIRROR_URL} \n \ |
| 3 | svn://.*/.* ${SOURCE_MIRROR_URL} \n \ | 3 | svn://.*/.* ${SOURCE_MIRROR_URL} \n \ |
| 4 | git://.*/.* ${SOURCE_MIRROR_URL} \n \ | 4 | git://.*/.* ${SOURCE_MIRROR_URL} \n \ |
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass index cf30f33f3d..fb8e9514be 100644 --- a/meta/classes/package.bbclass +++ b/meta/classes/package.bbclass | |||
| @@ -199,7 +199,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst | |||
| 199 | packages = [pkg] + packages | 199 | packages = [pkg] + packages |
| 200 | else: | 200 | else: |
| 201 | packages.append(pkg) | 201 | packages.append(pkg) |
| 202 | oldfiles = d.getVar('FILES_' + pkg) | 202 | oldfiles = d.getVar('FILES:' + pkg) |
| 203 | newfile = os.path.join(root, o) | 203 | newfile = os.path.join(root, o) |
| 204 | # These names will be passed through glob() so if the filename actually | 204 | # These names will be passed through glob() so if the filename actually |
| 205 | # contains * or ? (rare, but possible) we need to handle that specially | 205 | # contains * or ? (rare, but possible) we need to handle that specially |
| @@ -219,19 +219,19 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst | |||
| 219 | the_files.append(fp % m.group(1)) | 219 | the_files.append(fp % m.group(1)) |
| 220 | else: | 220 | else: |
| 221 | the_files.append(aux_files_pattern_verbatim % m.group(1)) | 221 | the_files.append(aux_files_pattern_verbatim % m.group(1)) |
| 222 | d.setVar('FILES_' + pkg, " ".join(the_files)) | 222 | d.setVar('FILES:' + pkg, " ".join(the_files)) |
| 223 | else: | 223 | else: |
| 224 | d.setVar('FILES_' + pkg, oldfiles + " " + newfile) | 224 | d.setVar('FILES:' + pkg, oldfiles + " " + newfile) |
| 225 | if extra_depends != '': | 225 | if extra_depends != '': |
| 226 | d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends) | 226 | d.appendVar('RDEPENDS:' + pkg, ' ' + extra_depends) |
| 227 | if not d.getVar('DESCRIPTION_' + pkg): | 227 | if not d.getVar('DESCRIPTION:' + pkg): |
| 228 | d.setVar('DESCRIPTION_' + pkg, description % on) | 228 | d.setVar('DESCRIPTION:' + pkg, description % on) |
| 229 | if not d.getVar('SUMMARY_' + pkg): | 229 | if not d.getVar('SUMMARY:' + pkg): |
| 230 | d.setVar('SUMMARY_' + pkg, summary % on) | 230 | d.setVar('SUMMARY:' + pkg, summary % on) |
| 231 | if postinst: | 231 | if postinst: |
| 232 | d.setVar('pkg_postinst_' + pkg, postinst) | 232 | d.setVar('pkg_postinst:' + pkg, postinst) |
| 233 | if postrm: | 233 | if postrm: |
| 234 | d.setVar('pkg_postrm_' + pkg, postrm) | 234 | d.setVar('pkg_postrm:' + pkg, postrm) |
| 235 | if callable(hook): | 235 | if callable(hook): |
| 236 | hook(f, pkg, file_regex, output_pattern, m.group(1)) | 236 | hook(f, pkg, file_regex, output_pattern, m.group(1)) |
| 237 | 237 | ||
| @@ -303,7 +303,7 @@ def get_conffiles(pkg, d): | |||
| 303 | cwd = os.getcwd() | 303 | cwd = os.getcwd() |
| 304 | os.chdir(root) | 304 | os.chdir(root) |
| 305 | 305 | ||
| 306 | conffiles = d.getVar('CONFFILES_%s' % pkg); | 306 | conffiles = d.getVar('CONFFILES:%s' % pkg); |
| 307 | if conffiles == None: | 307 | if conffiles == None: |
| 308 | conffiles = d.getVar('CONFFILES') | 308 | conffiles = d.getVar('CONFFILES') |
| 309 | if conffiles == None: | 309 | if conffiles == None: |
| @@ -618,7 +618,7 @@ def get_package_mapping (pkg, basepkg, d, depversions=None): | |||
| 618 | import oe.packagedata | 618 | import oe.packagedata |
| 619 | 619 | ||
| 620 | data = oe.packagedata.read_subpkgdata(pkg, d) | 620 | data = oe.packagedata.read_subpkgdata(pkg, d) |
| 621 | key = "PKG_%s" % pkg | 621 | key = "PKG:%s" % pkg |
| 622 | 622 | ||
| 623 | if key in data: | 623 | if key in data: |
| 624 | # Have to avoid undoing the write_extra_pkgs(global_variants...) | 624 | # Have to avoid undoing the write_extra_pkgs(global_variants...) |
| @@ -627,7 +627,7 @@ def get_package_mapping (pkg, basepkg, d, depversions=None): | |||
| 627 | return pkg | 627 | return pkg |
| 628 | if depversions == []: | 628 | if depversions == []: |
| 629 | # Avoid returning a mapping if the renamed package rprovides its original name | 629 | # Avoid returning a mapping if the renamed package rprovides its original name |
| 630 | rprovkey = "RPROVIDES_%s" % pkg | 630 | rprovkey = "RPROVIDES:%s" % pkg |
| 631 | if rprovkey in data: | 631 | if rprovkey in data: |
| 632 | if pkg in bb.utils.explode_dep_versions2(data[rprovkey]): | 632 | if pkg in bb.utils.explode_dep_versions2(data[rprovkey]): |
| 633 | bb.note("%s rprovides %s, not replacing the latter" % (data[key], pkg)) | 633 | bb.note("%s rprovides %s, not replacing the latter" % (data[key], pkg)) |
| @@ -784,13 +784,13 @@ python package_do_split_locales() { | |||
| 784 | ln = legitimize_package_name(l) | 784 | ln = legitimize_package_name(l) |
| 785 | pkg = pn + '-locale-' + ln | 785 | pkg = pn + '-locale-' + ln |
| 786 | packages.append(pkg) | 786 | packages.append(pkg) |
| 787 | d.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l)) | 787 | d.setVar('FILES:' + pkg, os.path.join(datadir, 'locale', l)) |
| 788 | d.setVar('RRECOMMENDS_' + pkg, '%svirtual-locale-%s' % (mlprefix, ln)) | 788 | d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln)) |
| 789 | d.setVar('RPROVIDES_' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln)) | 789 | d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln)) |
| 790 | d.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l)) | 790 | d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l)) |
| 791 | d.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l)) | 791 | d.setVar('DESCRIPTION:' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l)) |
| 792 | if locale_section: | 792 | if locale_section: |
| 793 | d.setVar('SECTION_' + pkg, locale_section) | 793 | d.setVar('SECTION:' + pkg, locale_section) |
| 794 | 794 | ||
| 795 | d.setVar('PACKAGES', ' '.join(packages)) | 795 | d.setVar('PACKAGES', ' '.join(packages)) |
| 796 | 796 | ||
| @@ -800,9 +800,9 @@ python package_do_split_locales() { | |||
| 800 | # glibc-localedata-translit* won't install as a dependency | 800 | # glibc-localedata-translit* won't install as a dependency |
| 801 | # for some other package which breaks meta-toolchain | 801 | # for some other package which breaks meta-toolchain |
| 802 | # Probably breaks since virtual-locale- isn't provided anywhere | 802 | # Probably breaks since virtual-locale- isn't provided anywhere |
| 803 | #rdep = (d.getVar('RDEPENDS_%s' % pn) or "").split() | 803 | #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split() |
| 804 | #rdep.append('%s-locale*' % pn) | 804 | #rdep.append('%s-locale*' % pn) |
| 805 | #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep)) | 805 | #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep)) |
| 806 | } | 806 | } |
| 807 | 807 | ||
| 808 | python perform_packagecopy () { | 808 | python perform_packagecopy () { |
| @@ -1199,7 +1199,7 @@ python split_and_strip_files () { | |||
| 1199 | # ...but is it ELF, and is it already stripped? | 1199 | # ...but is it ELF, and is it already stripped? |
| 1200 | if elf_file & 1: | 1200 | if elf_file & 1: |
| 1201 | if elf_file & 2: | 1201 | if elf_file & 2: |
| 1202 | if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn) or "").split(): | 1202 | if 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split(): |
| 1203 | bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn)) | 1203 | bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn)) |
| 1204 | else: | 1204 | else: |
| 1205 | msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn) | 1205 | msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn) |
| @@ -1344,7 +1344,7 @@ python populate_packages () { | |||
| 1344 | src_package_name = ('%s-src' % d.getVar('PN')) | 1344 | src_package_name = ('%s-src' % d.getVar('PN')) |
| 1345 | if not src_package_name in packages: | 1345 | if not src_package_name in packages: |
| 1346 | packages.append(src_package_name) | 1346 | packages.append(src_package_name) |
| 1347 | d.setVar('FILES_%s' % src_package_name, '/usr/src/debug') | 1347 | d.setVar('FILES:%s' % src_package_name, '/usr/src/debug') |
| 1348 | 1348 | ||
| 1349 | # Sanity check PACKAGES for duplicates | 1349 | # Sanity check PACKAGES for duplicates |
| 1350 | # Sanity should be moved to sanity.bbclass once we have the infrastructure | 1350 | # Sanity should be moved to sanity.bbclass once we have the infrastructure |
| @@ -1387,7 +1387,7 @@ python populate_packages () { | |||
| 1387 | root = os.path.join(pkgdest, pkg) | 1387 | root = os.path.join(pkgdest, pkg) |
| 1388 | bb.utils.mkdirhier(root) | 1388 | bb.utils.mkdirhier(root) |
| 1389 | 1389 | ||
| 1390 | filesvar = d.getVar('FILES_%s' % pkg) or "" | 1390 | filesvar = d.getVar('FILES:%s' % pkg) or "" |
| 1391 | if "//" in filesvar: | 1391 | if "//" in filesvar: |
| 1392 | msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg | 1392 | msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg |
| 1393 | package_qa_handle_error("files-invalid", msg, d) | 1393 | package_qa_handle_error("files-invalid", msg, d) |
| @@ -1476,7 +1476,7 @@ python populate_packages () { | |||
| 1476 | 1476 | ||
| 1477 | if unshipped != []: | 1477 | if unshipped != []: |
| 1478 | msg = pn + ": Files/directories were installed but not shipped in any package:" | 1478 | msg = pn + ": Files/directories were installed but not shipped in any package:" |
| 1479 | if "installed-vs-shipped" in (d.getVar('INSANE_SKIP_' + pn) or "").split(): | 1479 | if "installed-vs-shipped" in (d.getVar('INSANE_SKIP:' + pn) or "").split(): |
| 1480 | bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn) | 1480 | bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn) |
| 1481 | else: | 1481 | else: |
| 1482 | for f in unshipped: | 1482 | for f in unshipped: |
| @@ -1524,11 +1524,11 @@ python package_fixsymlinks () { | |||
| 1524 | bb.note("%s contains dangling symlink to %s" % (pkg, l)) | 1524 | bb.note("%s contains dangling symlink to %s" % (pkg, l)) |
| 1525 | 1525 | ||
| 1526 | for pkg in newrdepends: | 1526 | for pkg in newrdepends: |
| 1527 | rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg) or "") | 1527 | rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "") |
| 1528 | for p in newrdepends[pkg]: | 1528 | for p in newrdepends[pkg]: |
| 1529 | if p not in rdepends: | 1529 | if p not in rdepends: |
| 1530 | rdepends[p] = [] | 1530 | rdepends[p] = [] |
| 1531 | d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False)) | 1531 | d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False)) |
| 1532 | } | 1532 | } |
| 1533 | 1533 | ||
| 1534 | 1534 | ||
| @@ -1552,7 +1552,7 @@ python emit_pkgdata() { | |||
| 1552 | import json | 1552 | import json |
| 1553 | 1553 | ||
| 1554 | def process_postinst_on_target(pkg, mlprefix): | 1554 | def process_postinst_on_target(pkg, mlprefix): |
| 1555 | pkgval = d.getVar('PKG_%s' % pkg) | 1555 | pkgval = d.getVar('PKG:%s' % pkg) |
| 1556 | if pkgval is None: | 1556 | if pkgval is None: |
| 1557 | pkgval = pkg | 1557 | pkgval = pkg |
| 1558 | 1558 | ||
| @@ -1563,8 +1563,8 @@ if [ -n "$D" ]; then | |||
| 1563 | fi | 1563 | fi |
| 1564 | """ % (pkgval, mlprefix) | 1564 | """ % (pkgval, mlprefix) |
| 1565 | 1565 | ||
| 1566 | postinst = d.getVar('pkg_postinst_%s' % pkg) | 1566 | postinst = d.getVar('pkg_postinst:%s' % pkg) |
| 1567 | postinst_ontarget = d.getVar('pkg_postinst_ontarget_%s' % pkg) | 1567 | postinst_ontarget = d.getVar('pkg_postinst_ontarget:%s' % pkg) |
| 1568 | 1568 | ||
| 1569 | if postinst_ontarget: | 1569 | if postinst_ontarget: |
| 1570 | bb.debug(1, 'adding deferred pkg_postinst_ontarget() to pkg_postinst() for %s' % pkg) | 1570 | bb.debug(1, 'adding deferred pkg_postinst_ontarget() to pkg_postinst() for %s' % pkg) |
| @@ -1572,11 +1572,11 @@ fi | |||
| 1572 | postinst = '#!/bin/sh\n' | 1572 | postinst = '#!/bin/sh\n' |
| 1573 | postinst += defer_fragment | 1573 | postinst += defer_fragment |
| 1574 | postinst += postinst_ontarget | 1574 | postinst += postinst_ontarget |
| 1575 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 1575 | d.setVar('pkg_postinst:%s' % pkg, postinst) |
| 1576 | 1576 | ||
| 1577 | def add_set_e_to_scriptlets(pkg): | 1577 | def add_set_e_to_scriptlets(pkg): |
| 1578 | for scriptlet_name in ('pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm'): | 1578 | for scriptlet_name in ('pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm'): |
| 1579 | scriptlet = d.getVar('%s_%s' % (scriptlet_name, pkg)) | 1579 | scriptlet = d.getVar('%s:%s' % (scriptlet_name, pkg)) |
| 1580 | if scriptlet: | 1580 | if scriptlet: |
| 1581 | scriptlet_split = scriptlet.split('\n') | 1581 | scriptlet_split = scriptlet.split('\n') |
| 1582 | if scriptlet_split[0].startswith("#!"): | 1582 | if scriptlet_split[0].startswith("#!"): |
| @@ -1591,9 +1591,9 @@ fi | |||
| 1591 | c = codecs.getencoder("unicode_escape") | 1591 | c = codecs.getencoder("unicode_escape") |
| 1592 | return c(str)[0].decode("latin1") | 1592 | return c(str)[0].decode("latin1") |
| 1593 | 1593 | ||
| 1594 | val = d.getVar('%s_%s' % (var, pkg)) | 1594 | val = d.getVar('%s:%s' % (var, pkg)) |
| 1595 | if val: | 1595 | if val: |
| 1596 | f.write('%s_%s: %s\n' % (var, pkg, encode(val))) | 1596 | f.write('%s:%s: %s\n' % (var, pkg, encode(val))) |
| 1597 | return val | 1597 | return val |
| 1598 | val = d.getVar('%s' % (var)) | 1598 | val = d.getVar('%s' % (var)) |
| 1599 | if val: | 1599 | if val: |
| @@ -1612,7 +1612,7 @@ fi | |||
| 1612 | ml_pkg = "%s-%s" % (variant, pkg) | 1612 | ml_pkg = "%s-%s" % (variant, pkg) |
| 1613 | subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg) | 1613 | subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg) |
| 1614 | with open(subdata_file, 'w') as fd: | 1614 | with open(subdata_file, 'w') as fd: |
| 1615 | fd.write("PKG_%s: %s" % (ml_pkg, pkg)) | 1615 | fd.write("PKG:%s: %s" % (ml_pkg, pkg)) |
| 1616 | 1616 | ||
| 1617 | packages = d.getVar('PACKAGES') | 1617 | packages = d.getVar('PACKAGES') |
| 1618 | pkgdest = d.getVar('PKGDEST') | 1618 | pkgdest = d.getVar('PKGDEST') |
| @@ -1636,10 +1636,10 @@ fi | |||
| 1636 | workdir = d.getVar('WORKDIR') | 1636 | workdir = d.getVar('WORKDIR') |
| 1637 | 1637 | ||
| 1638 | for pkg in packages.split(): | 1638 | for pkg in packages.split(): |
| 1639 | pkgval = d.getVar('PKG_%s' % pkg) | 1639 | pkgval = d.getVar('PKG:%s' % pkg) |
| 1640 | if pkgval is None: | 1640 | if pkgval is None: |
| 1641 | pkgval = pkg | 1641 | pkgval = pkg |
| 1642 | d.setVar('PKG_%s' % pkg, pkg) | 1642 | d.setVar('PKG:%s' % pkg, pkg) |
| 1643 | 1643 | ||
| 1644 | pkgdestpkg = os.path.join(pkgdest, pkg) | 1644 | pkgdestpkg = os.path.join(pkgdest, pkg) |
| 1645 | files = {} | 1645 | files = {} |
| @@ -1673,14 +1673,14 @@ fi | |||
| 1673 | sf.write('%s_%s: %d\n' % ('PKGSIZE', pkg, total_size)) | 1673 | sf.write('%s_%s: %d\n' % ('PKGSIZE', pkg, total_size)) |
| 1674 | 1674 | ||
| 1675 | # Symlinks needed for rprovides lookup | 1675 | # Symlinks needed for rprovides lookup |
| 1676 | rprov = d.getVar('RPROVIDES_%s' % pkg) or d.getVar('RPROVIDES') | 1676 | rprov = d.getVar('RPROVIDES:%s' % pkg) or d.getVar('RPROVIDES') |
| 1677 | if rprov: | 1677 | if rprov: |
| 1678 | for p in bb.utils.explode_deps(rprov): | 1678 | for p in bb.utils.explode_deps(rprov): |
| 1679 | subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg) | 1679 | subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg) |
| 1680 | bb.utils.mkdirhier(os.path.dirname(subdata_sym)) | 1680 | bb.utils.mkdirhier(os.path.dirname(subdata_sym)) |
| 1681 | oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True) | 1681 | oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True) |
| 1682 | 1682 | ||
| 1683 | allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg) | 1683 | allow_empty = d.getVar('ALLOW_EMPTY:%s' % pkg) |
| 1684 | if not allow_empty: | 1684 | if not allow_empty: |
| 1685 | allow_empty = d.getVar('ALLOW_EMPTY') | 1685 | allow_empty = d.getVar('ALLOW_EMPTY') |
| 1686 | root = "%s/%s" % (pkgdest, pkg) | 1686 | root = "%s/%s" % (pkgdest, pkg) |
| @@ -1733,7 +1733,7 @@ python package_do_filedeps() { | |||
| 1733 | 1733 | ||
| 1734 | pkglist = [] | 1734 | pkglist = [] |
| 1735 | for pkg in packages.split(): | 1735 | for pkg in packages.split(): |
| 1736 | if d.getVar('SKIP_FILEDEPS_' + pkg) == '1': | 1736 | if d.getVar('SKIP_FILEDEPS:' + pkg) == '1': |
| 1737 | continue | 1737 | continue |
| 1738 | if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'): | 1738 | if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'): |
| 1739 | continue | 1739 | continue |
| @@ -1930,12 +1930,12 @@ python package_do_shlibs() { | |||
| 1930 | shlib_provider = oe.package.read_shlib_providers(d) | 1930 | shlib_provider = oe.package.read_shlib_providers(d) |
| 1931 | 1931 | ||
| 1932 | for pkg in shlib_pkgs: | 1932 | for pkg in shlib_pkgs: |
| 1933 | private_libs = d.getVar('PRIVATE_LIBS_' + pkg) or d.getVar('PRIVATE_LIBS') or "" | 1933 | private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or "" |
| 1934 | private_libs = private_libs.split() | 1934 | private_libs = private_libs.split() |
| 1935 | needs_ldconfig = False | 1935 | needs_ldconfig = False |
| 1936 | bb.debug(2, "calculating shlib provides for %s" % pkg) | 1936 | bb.debug(2, "calculating shlib provides for %s" % pkg) |
| 1937 | 1937 | ||
| 1938 | pkgver = d.getVar('PKGV_' + pkg) | 1938 | pkgver = d.getVar('PKGV:' + pkg) |
| 1939 | if not pkgver: | 1939 | if not pkgver: |
| 1940 | pkgver = d.getVar('PV_' + pkg) | 1940 | pkgver = d.getVar('PV_' + pkg) |
| 1941 | if not pkgver: | 1941 | if not pkgver: |
| @@ -1985,11 +1985,11 @@ python package_do_shlibs() { | |||
| 1985 | shlib_provider[s[0]][s[1]] = (pkg, pkgver) | 1985 | shlib_provider[s[0]][s[1]] = (pkg, pkgver) |
| 1986 | if needs_ldconfig: | 1986 | if needs_ldconfig: |
| 1987 | bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg) | 1987 | bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg) |
| 1988 | postinst = d.getVar('pkg_postinst_%s' % pkg) | 1988 | postinst = d.getVar('pkg_postinst:%s' % pkg) |
| 1989 | if not postinst: | 1989 | if not postinst: |
| 1990 | postinst = '#!/bin/sh\n' | 1990 | postinst = '#!/bin/sh\n' |
| 1991 | postinst += d.getVar('ldconfig_postinst_fragment') | 1991 | postinst += d.getVar('ldconfig_postinst_fragment') |
| 1992 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 1992 | d.setVar('pkg_postinst:%s' % pkg, postinst) |
| 1993 | bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames)) | 1993 | bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames)) |
| 1994 | 1994 | ||
| 1995 | assumed_libs = d.getVar('ASSUME_SHLIBS') | 1995 | assumed_libs = d.getVar('ASSUME_SHLIBS') |
| @@ -2011,7 +2011,7 @@ python package_do_shlibs() { | |||
| 2011 | for pkg in shlib_pkgs: | 2011 | for pkg in shlib_pkgs: |
| 2012 | bb.debug(2, "calculating shlib requirements for %s" % pkg) | 2012 | bb.debug(2, "calculating shlib requirements for %s" % pkg) |
| 2013 | 2013 | ||
| 2014 | private_libs = d.getVar('PRIVATE_LIBS_' + pkg) or d.getVar('PRIVATE_LIBS') or "" | 2014 | private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or "" |
| 2015 | private_libs = private_libs.split() | 2015 | private_libs = private_libs.split() |
| 2016 | 2016 | ||
| 2017 | deps = list() | 2017 | deps = list() |
| @@ -2165,7 +2165,7 @@ python read_shlibdeps () { | |||
| 2165 | 2165 | ||
| 2166 | packages = d.getVar('PACKAGES').split() | 2166 | packages = d.getVar('PACKAGES').split() |
| 2167 | for pkg in packages: | 2167 | for pkg in packages: |
| 2168 | rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg) or "") | 2168 | rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "") |
| 2169 | for dep in sorted(pkglibdeps[pkg]): | 2169 | for dep in sorted(pkglibdeps[pkg]): |
| 2170 | # Add the dep if it's not already there, or if no comparison is set | 2170 | # Add the dep if it's not already there, or if no comparison is set |
| 2171 | if dep not in rdepends: | 2171 | if dep not in rdepends: |
| @@ -2173,7 +2173,7 @@ python read_shlibdeps () { | |||
| 2173 | for v in pkglibdeps[pkg][dep]: | 2173 | for v in pkglibdeps[pkg][dep]: |
| 2174 | if v not in rdepends[dep]: | 2174 | if v not in rdepends[dep]: |
| 2175 | rdepends[dep].append(v) | 2175 | rdepends[dep].append(v) |
| 2176 | d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False)) | 2176 | d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False)) |
| 2177 | } | 2177 | } |
| 2178 | 2178 | ||
| 2179 | python package_depchains() { | 2179 | python package_depchains() { |
| @@ -2197,7 +2197,7 @@ python package_depchains() { | |||
| 2197 | def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d): | 2197 | def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d): |
| 2198 | 2198 | ||
| 2199 | #bb.note('depends for %s is %s' % (base, depends)) | 2199 | #bb.note('depends for %s is %s' % (base, depends)) |
| 2200 | rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg) or "") | 2200 | rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "") |
| 2201 | 2201 | ||
| 2202 | for depend in sorted(depends): | 2202 | for depend in sorted(depends): |
| 2203 | if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'): | 2203 | if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'): |
| @@ -2212,13 +2212,13 @@ python package_depchains() { | |||
| 2212 | if pkgname not in rreclist and pkgname != pkg: | 2212 | if pkgname not in rreclist and pkgname != pkg: |
| 2213 | rreclist[pkgname] = [] | 2213 | rreclist[pkgname] = [] |
| 2214 | 2214 | ||
| 2215 | #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist))) | 2215 | #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist))) |
| 2216 | d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) | 2216 | d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) |
| 2217 | 2217 | ||
| 2218 | def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): | 2218 | def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): |
| 2219 | 2219 | ||
| 2220 | #bb.note('rdepends for %s is %s' % (base, rdepends)) | 2220 | #bb.note('rdepends for %s is %s' % (base, rdepends)) |
| 2221 | rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg) or "") | 2221 | rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "") |
| 2222 | 2222 | ||
| 2223 | for depend in sorted(rdepends): | 2223 | for depend in sorted(rdepends): |
| 2224 | if depend.find('virtual-locale-') != -1: | 2224 | if depend.find('virtual-locale-') != -1: |
| @@ -2233,8 +2233,8 @@ python package_depchains() { | |||
| 2233 | if pkgname not in rreclist and pkgname != pkg: | 2233 | if pkgname not in rreclist and pkgname != pkg: |
| 2234 | rreclist[pkgname] = [] | 2234 | rreclist[pkgname] = [] |
| 2235 | 2235 | ||
| 2236 | #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist))) | 2236 | #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist))) |
| 2237 | d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) | 2237 | d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False)) |
| 2238 | 2238 | ||
| 2239 | def add_dep(list, dep): | 2239 | def add_dep(list, dep): |
| 2240 | if dep not in list: | 2240 | if dep not in list: |
| @@ -2246,7 +2246,7 @@ python package_depchains() { | |||
| 2246 | 2246 | ||
| 2247 | rdepends = [] | 2247 | rdepends = [] |
| 2248 | for pkg in packages.split(): | 2248 | for pkg in packages.split(): |
| 2249 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg) or ""): | 2249 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + pkg) or ""): |
| 2250 | add_dep(rdepends, dep) | 2250 | add_dep(rdepends, dep) |
| 2251 | 2251 | ||
| 2252 | #bb.note('rdepends is %s' % rdepends) | 2252 | #bb.note('rdepends is %s' % rdepends) |
| @@ -2280,7 +2280,7 @@ python package_depchains() { | |||
| 2280 | 2280 | ||
| 2281 | for suffix in pkgs: | 2281 | for suffix in pkgs: |
| 2282 | for pkg in pkgs[suffix]: | 2282 | for pkg in pkgs[suffix]: |
| 2283 | if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs'): | 2283 | if d.getVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs'): |
| 2284 | continue | 2284 | continue |
| 2285 | (base, func) = pkgs[suffix][pkg] | 2285 | (base, func) = pkgs[suffix][pkg] |
| 2286 | if suffix == "-dev": | 2286 | if suffix == "-dev": |
| @@ -2293,7 +2293,7 @@ python package_depchains() { | |||
| 2293 | pkg_addrrecs(pkg, base, suffix, func, rdepends, d) | 2293 | pkg_addrrecs(pkg, base, suffix, func, rdepends, d) |
| 2294 | else: | 2294 | else: |
| 2295 | rdeps = [] | 2295 | rdeps = [] |
| 2296 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base) or ""): | 2296 | for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + base) or ""): |
| 2297 | add_dep(rdeps, dep) | 2297 | add_dep(rdeps, dep) |
| 2298 | pkg_addrrecs(pkg, base, suffix, func, rdeps, d) | 2298 | pkg_addrrecs(pkg, base, suffix, func, rdeps, d) |
| 2299 | } | 2299 | } |
| @@ -2310,7 +2310,7 @@ def gen_packagevar(d, pkgvars="PACKAGEVARS"): | |||
| 2310 | ret.append(v) | 2310 | ret.append(v) |
| 2311 | for p in pkgs: | 2311 | for p in pkgs: |
| 2312 | for v in vars: | 2312 | for v in vars: |
| 2313 | ret.append(v + "_" + p) | 2313 | ret.append(v + ":" + p) |
| 2314 | 2314 | ||
| 2315 | # Ensure that changes to INCOMPATIBLE_LICENSE re-run do_package for | 2315 | # Ensure that changes to INCOMPATIBLE_LICENSE re-run do_package for |
| 2316 | # affected recipes. | 2316 | # affected recipes. |
diff --git a/meta/classes/package_deb.bbclass b/meta/classes/package_deb.bbclass index b3d8ce330e..eca43e1787 100644 --- a/meta/classes/package_deb.bbclass +++ b/meta/classes/package_deb.bbclass | |||
| @@ -81,7 +81,7 @@ def deb_write_pkg(pkg, d): | |||
| 81 | 81 | ||
| 82 | localdata.setVar('ROOT', '') | 82 | localdata.setVar('ROOT', '') |
| 83 | localdata.setVar('ROOT_%s' % pkg, root) | 83 | localdata.setVar('ROOT_%s' % pkg, root) |
| 84 | pkgname = localdata.getVar('PKG_%s' % pkg) | 84 | pkgname = localdata.getVar('PKG:%s' % pkg) |
| 85 | if not pkgname: | 85 | if not pkgname: |
| 86 | pkgname = pkg | 86 | pkgname = pkg |
| 87 | localdata.setVar('PKG', pkgname) | 87 | localdata.setVar('PKG', pkgname) |
diff --git a/meta/classes/package_ipk.bbclass b/meta/classes/package_ipk.bbclass index 600b3ac90c..c3b53854e8 100644 --- a/meta/classes/package_ipk.bbclass +++ b/meta/classes/package_ipk.bbclass | |||
| @@ -65,7 +65,7 @@ def ipk_write_pkg(pkg, d): | |||
| 65 | try: | 65 | try: |
| 66 | localdata.setVar('ROOT', '') | 66 | localdata.setVar('ROOT', '') |
| 67 | localdata.setVar('ROOT_%s' % pkg, root) | 67 | localdata.setVar('ROOT_%s' % pkg, root) |
| 68 | pkgname = localdata.getVar('PKG_%s' % pkg) | 68 | pkgname = localdata.getVar('PKG:%s' % pkg) |
| 69 | if not pkgname: | 69 | if not pkgname: |
| 70 | pkgname = pkg | 70 | pkgname = pkg |
| 71 | localdata.setVar('PKG', pkgname) | 71 | localdata.setVar('PKG', pkgname) |
diff --git a/meta/classes/package_rpm.bbclass b/meta/classes/package_rpm.bbclass index 86706da842..aebeaf6932 100644 --- a/meta/classes/package_rpm.bbclass +++ b/meta/classes/package_rpm.bbclass | |||
| @@ -332,7 +332,7 @@ python write_specfile () { | |||
| 332 | 332 | ||
| 333 | localdata.setVar('ROOT', '') | 333 | localdata.setVar('ROOT', '') |
| 334 | localdata.setVar('ROOT_%s' % pkg, root) | 334 | localdata.setVar('ROOT_%s' % pkg, root) |
| 335 | pkgname = localdata.getVar('PKG_%s' % pkg) | 335 | pkgname = localdata.getVar('PKG:%s' % pkg) |
| 336 | if not pkgname: | 336 | if not pkgname: |
| 337 | pkgname = pkg | 337 | pkgname = pkg |
| 338 | localdata.setVar('PKG', pkgname) | 338 | localdata.setVar('PKG', pkgname) |
diff --git a/meta/classes/packagedata.bbclass b/meta/classes/packagedata.bbclass index a903e5cfd2..3b962f29b5 100644 --- a/meta/classes/packagedata.bbclass +++ b/meta/classes/packagedata.bbclass | |||
| @@ -24,7 +24,7 @@ python read_subpackage_metadata () { | |||
| 24 | continue | 24 | continue |
| 25 | # | 25 | # |
| 26 | # If we set unsuffixed variables here there is a chance they could clobber override versions | 26 | # If we set unsuffixed variables here there is a chance they could clobber override versions |
| 27 | # of that variable, e.g. DESCRIPTION could clobber DESCRIPTION_<pkgname> | 27 | # of that variable, e.g. DESCRIPTION could clobber DESCRIPTION:<pkgname> |
| 28 | # We therefore don't clobber for the unsuffixed variable versions | 28 | # We therefore don't clobber for the unsuffixed variable versions |
| 29 | # | 29 | # |
| 30 | if key.endswith("_" + pkg): | 30 | if key.endswith("_" + pkg): |
diff --git a/meta/classes/packagegroup.bbclass b/meta/classes/packagegroup.bbclass index 1541c8fbff..557b1b6382 100644 --- a/meta/classes/packagegroup.bbclass +++ b/meta/classes/packagegroup.bbclass | |||
| @@ -32,7 +32,7 @@ python () { | |||
| 32 | for suffix in types] | 32 | for suffix in types] |
| 33 | d.setVar('PACKAGES', ' '.join(packages)) | 33 | d.setVar('PACKAGES', ' '.join(packages)) |
| 34 | for pkg in packages: | 34 | for pkg in packages: |
| 35 | d.setVar('ALLOW_EMPTY_%s' % pkg, '1') | 35 | d.setVar('ALLOW_EMPTY:%s' % pkg, '1') |
| 36 | } | 36 | } |
| 37 | 37 | ||
| 38 | # We don't want to look at shared library dependencies for the | 38 | # We don't want to look at shared library dependencies for the |
diff --git a/meta/classes/patch.bbclass b/meta/classes/patch.bbclass index cd491a563d..388773a237 100644 --- a/meta/classes/patch.bbclass +++ b/meta/classes/patch.bbclass | |||
| @@ -10,7 +10,7 @@ PATCHDEPENDENCY = "${PATCHTOOL}-native:do_populate_sysroot" | |||
| 10 | # http://git.savannah.gnu.org/cgit/patch.git/patch/?id=82b800c9552a088a241457948219d25ce0a407a4 | 10 | # http://git.savannah.gnu.org/cgit/patch.git/patch/?id=82b800c9552a088a241457948219d25ce0a407a4 |
| 11 | # This leaks into debug sources in particular. Add the dependency | 11 | # This leaks into debug sources in particular. Add the dependency |
| 12 | # to target recipes to avoid this problem until we can rely on 2.7.4 or later. | 12 | # to target recipes to avoid this problem until we can rely on 2.7.4 or later. |
| 13 | PATCHDEPENDENCY_append_class-target = " patch-replacement-native:do_populate_sysroot" | 13 | PATCHDEPENDENCY:append:class-target = " patch-replacement-native:do_populate_sysroot" |
| 14 | 14 | ||
| 15 | PATCH_GIT_USER_NAME ?= "OpenEmbedded" | 15 | PATCH_GIT_USER_NAME ?= "OpenEmbedded" |
| 16 | PATCH_GIT_USER_EMAIL ?= "oe.patch@oe" | 16 | PATCH_GIT_USER_EMAIL ?= "oe.patch@oe" |
diff --git a/meta/classes/pixbufcache.bbclass b/meta/classes/pixbufcache.bbclass index b07f51ed56..886bf195b3 100644 --- a/meta/classes/pixbufcache.bbclass +++ b/meta/classes/pixbufcache.bbclass | |||
| @@ -3,7 +3,7 @@ | |||
| 3 | # packages. | 3 | # packages. |
| 4 | # | 4 | # |
| 5 | 5 | ||
| 6 | DEPENDS_append_class-target = " qemu-native" | 6 | DEPENDS:append:class-target = " qemu-native" |
| 7 | inherit qemu | 7 | inherit qemu |
| 8 | 8 | ||
| 9 | PIXBUF_PACKAGES ??= "${PN}" | 9 | PIXBUF_PACKAGES ??= "${PN}" |
| @@ -29,30 +29,30 @@ else | |||
| 29 | fi | 29 | fi |
| 30 | } | 30 | } |
| 31 | 31 | ||
| 32 | python populate_packages_append() { | 32 | python populate_packages:append() { |
| 33 | pixbuf_pkgs = d.getVar('PIXBUF_PACKAGES').split() | 33 | pixbuf_pkgs = d.getVar('PIXBUF_PACKAGES').split() |
| 34 | 34 | ||
| 35 | for pkg in pixbuf_pkgs: | 35 | for pkg in pixbuf_pkgs: |
| 36 | bb.note("adding pixbuf postinst and postrm scripts to %s" % pkg) | 36 | bb.note("adding pixbuf postinst and postrm scripts to %s" % pkg) |
| 37 | postinst = d.getVar('pkg_postinst_%s' % pkg) or d.getVar('pkg_postinst') | 37 | postinst = d.getVar('pkg_postinst:%s' % pkg) or d.getVar('pkg_postinst') |
| 38 | if not postinst: | 38 | if not postinst: |
| 39 | postinst = '#!/bin/sh\n' | 39 | postinst = '#!/bin/sh\n' |
| 40 | postinst += d.getVar('pixbufcache_common') | 40 | postinst += d.getVar('pixbufcache_common') |
| 41 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 41 | d.setVar('pkg_postinst:%s' % pkg, postinst) |
| 42 | 42 | ||
| 43 | postrm = d.getVar('pkg_postrm_%s' % pkg) or d.getVar('pkg_postrm') | 43 | postrm = d.getVar('pkg_postrm:%s' % pkg) or d.getVar('pkg_postrm') |
| 44 | if not postrm: | 44 | if not postrm: |
| 45 | postrm = '#!/bin/sh\n' | 45 | postrm = '#!/bin/sh\n' |
| 46 | postrm += d.getVar('pixbufcache_common') | 46 | postrm += d.getVar('pixbufcache_common') |
| 47 | d.setVar('pkg_postrm_%s' % pkg, postrm) | 47 | d.setVar('pkg_postrm:%s' % pkg, postrm) |
| 48 | } | 48 | } |
| 49 | 49 | ||
| 50 | gdkpixbuf_complete() { | 50 | gdkpixbuf_complete() { |
| 51 | GDK_PIXBUF_FATAL_LOADER=1 ${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders --update-cache || exit 1 | 51 | GDK_PIXBUF_FATAL_LOADER=1 ${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders --update-cache || exit 1 |
| 52 | } | 52 | } |
| 53 | 53 | ||
| 54 | DEPENDS_append_class-native = " gdk-pixbuf-native" | 54 | DEPENDS:append:class-native = " gdk-pixbuf-native" |
| 55 | SYSROOT_PREPROCESS_FUNCS_append_class-native = " pixbufcache_sstate_postinst" | 55 | SYSROOT_PREPROCESS_FUNCS:append:class-native = " pixbufcache_sstate_postinst" |
| 56 | 56 | ||
| 57 | pixbufcache_sstate_postinst() { | 57 | pixbufcache_sstate_postinst() { |
| 58 | mkdir -p ${SYSROOT_DESTDIR}${bindir} | 58 | mkdir -p ${SYSROOT_DESTDIR}${bindir} |
diff --git a/meta/classes/pkgconfig.bbclass b/meta/classes/pkgconfig.bbclass index ad1f84f506..fa94527ce9 100644 --- a/meta/classes/pkgconfig.bbclass +++ b/meta/classes/pkgconfig.bbclass | |||
| @@ -1,2 +1,2 @@ | |||
| 1 | DEPENDS_prepend = "pkgconfig-native " | 1 | DEPENDS:prepend = "pkgconfig-native " |
| 2 | 2 | ||
diff --git a/meta/classes/populate_sdk_base.bbclass b/meta/classes/populate_sdk_base.bbclass index f8072a9d37..3c1eb454b3 100644 --- a/meta/classes/populate_sdk_base.bbclass +++ b/meta/classes/populate_sdk_base.bbclass | |||
| @@ -23,7 +23,7 @@ SDKIMAGE_FEATURES ??= "dev-pkgs dbg-pkgs src-pkgs ${@bb.utils.contains('DISTRO_F | |||
| 23 | SDKIMAGE_INSTALL_COMPLEMENTARY = '${@complementary_globs("SDKIMAGE_FEATURES", d)}' | 23 | SDKIMAGE_INSTALL_COMPLEMENTARY = '${@complementary_globs("SDKIMAGE_FEATURES", d)}' |
| 24 | SDKIMAGE_INSTALL_COMPLEMENTARY[vardeps] += "SDKIMAGE_FEATURES" | 24 | SDKIMAGE_INSTALL_COMPLEMENTARY[vardeps] += "SDKIMAGE_FEATURES" |
| 25 | 25 | ||
| 26 | PACKAGE_ARCHS_append_task-populate-sdk = " sdk-provides-dummy-target" | 26 | PACKAGE_ARCHS:append:task-populate-sdk = " sdk-provides-dummy-target" |
| 27 | SDK_PACKAGE_ARCHS += "sdk-provides-dummy-${SDKPKGSUFFIX}" | 27 | SDK_PACKAGE_ARCHS += "sdk-provides-dummy-${SDKPKGSUFFIX}" |
| 28 | 28 | ||
| 29 | # List of locales to install, or "all" for all of them, or unset for none. | 29 | # List of locales to install, or "all" for all of them, or unset for none. |
| @@ -37,7 +37,7 @@ SDK_DEPLOY = "${DEPLOY_DIR}/sdk" | |||
| 37 | 37 | ||
| 38 | SDKDEPLOYDIR = "${WORKDIR}/${SDKMACHINE}-deploy-${PN}-populate-sdk" | 38 | SDKDEPLOYDIR = "${WORKDIR}/${SDKMACHINE}-deploy-${PN}-populate-sdk" |
| 39 | 39 | ||
| 40 | B_task-populate-sdk = "${SDK_DIR}" | 40 | B:task-populate-sdk = "${SDK_DIR}" |
| 41 | 41 | ||
| 42 | SDKTARGETSYSROOT = "${SDKPATH}/sysroots/${REAL_MULTIMACH_TARGET_SYS}" | 42 | SDKTARGETSYSROOT = "${SDKPATH}/sysroots/${REAL_MULTIMACH_TARGET_SYS}" |
| 43 | 43 | ||
| @@ -66,7 +66,7 @@ python () { | |||
| 66 | 66 | ||
| 67 | SDK_RDEPENDS = "${TOOLCHAIN_TARGET_TASK} ${TOOLCHAIN_HOST_TASK}" | 67 | SDK_RDEPENDS = "${TOOLCHAIN_TARGET_TASK} ${TOOLCHAIN_HOST_TASK}" |
| 68 | SDK_DEPENDS = "virtual/fakeroot-native ${SDK_ARCHIVE_DEPENDS} cross-localedef-native nativesdk-qemuwrapper-cross ${@' '.join(["%s-qemuwrapper-cross" % m for m in d.getVar("MULTILIB_VARIANTS").split()])} qemuwrapper-cross" | 68 | SDK_DEPENDS = "virtual/fakeroot-native ${SDK_ARCHIVE_DEPENDS} cross-localedef-native nativesdk-qemuwrapper-cross ${@' '.join(["%s-qemuwrapper-cross" % m for m in d.getVar("MULTILIB_VARIANTS").split()])} qemuwrapper-cross" |
| 69 | PATH_prepend = "${WORKDIR}/recipe-sysroot/${SDKPATHNATIVE}${bindir}/crossscripts:${@":".join(all_multilib_tune_values(d, 'STAGING_BINDIR_CROSS').split())}:" | 69 | PATH:prepend = "${WORKDIR}/recipe-sysroot/${SDKPATHNATIVE}${bindir}/crossscripts:${@":".join(all_multilib_tune_values(d, 'STAGING_BINDIR_CROSS').split())}:" |
| 70 | SDK_DEPENDS += "nativesdk-glibc-locale" | 70 | SDK_DEPENDS += "nativesdk-glibc-locale" |
| 71 | 71 | ||
| 72 | # We want the MULTIARCH_TARGET_SYS to point to the TUNE_PKGARCH, not PACKAGE_ARCH as it | 72 | # We want the MULTIARCH_TARGET_SYS to point to the TUNE_PKGARCH, not PACKAGE_ARCH as it |
| @@ -119,9 +119,9 @@ python write_host_sdk_manifest () { | |||
| 119 | output.write(format_pkg_list(pkgs, 'ver')) | 119 | output.write(format_pkg_list(pkgs, 'ver')) |
| 120 | } | 120 | } |
| 121 | 121 | ||
| 122 | POPULATE_SDK_POST_TARGET_COMMAND_append = " write_sdk_test_data ; " | 122 | POPULATE_SDK_POST_TARGET_COMMAND:append = " write_sdk_test_data ; " |
| 123 | POPULATE_SDK_POST_TARGET_COMMAND_append_task-populate-sdk = " write_target_sdk_manifest ; " | 123 | POPULATE_SDK_POST_TARGET_COMMAND:append:task-populate-sdk = " write_target_sdk_manifest ; " |
| 124 | POPULATE_SDK_POST_HOST_COMMAND_append_task-populate-sdk = " write_host_sdk_manifest; " | 124 | POPULATE_SDK_POST_HOST_COMMAND:append:task-populate-sdk = " write_host_sdk_manifest; " |
| 125 | SDK_PACKAGING_COMMAND = "${@'${SDK_PACKAGING_FUNC};' if '${SDK_PACKAGING_FUNC}' else ''}" | 125 | SDK_PACKAGING_COMMAND = "${@'${SDK_PACKAGING_FUNC};' if '${SDK_PACKAGING_FUNC}' else ''}" |
| 126 | SDK_POSTPROCESS_COMMAND = " create_sdk_files; check_sdk_sysroots; archive_sdk; ${SDK_PACKAGING_COMMAND} " | 126 | SDK_POSTPROCESS_COMMAND = " create_sdk_files; check_sdk_sysroots; archive_sdk; ${SDK_PACKAGING_COMMAND} " |
| 127 | 127 | ||
| @@ -172,7 +172,7 @@ fakeroot python do_populate_sdk() { | |||
| 172 | populate_sdk_common(d) | 172 | populate_sdk_common(d) |
| 173 | } | 173 | } |
| 174 | SSTATETASKS += "do_populate_sdk" | 174 | SSTATETASKS += "do_populate_sdk" |
| 175 | SSTATE_SKIP_CREATION_task-populate-sdk = '1' | 175 | SSTATE_SKIP_CREATION:task-populate-sdk = '1' |
| 176 | do_populate_sdk[cleandirs] = "${SDKDEPLOYDIR}" | 176 | do_populate_sdk[cleandirs] = "${SDKDEPLOYDIR}" |
| 177 | do_populate_sdk[sstate-inputdirs] = "${SDKDEPLOYDIR}" | 177 | do_populate_sdk[sstate-inputdirs] = "${SDKDEPLOYDIR}" |
| 178 | do_populate_sdk[sstate-outputdirs] = "${SDK_DEPLOY}" | 178 | do_populate_sdk[sstate-outputdirs] = "${SDK_DEPLOY}" |
diff --git a/meta/classes/populate_sdk_ext.bbclass b/meta/classes/populate_sdk_ext.bbclass index 4aabafa079..f67849708c 100644 --- a/meta/classes/populate_sdk_ext.bbclass +++ b/meta/classes/populate_sdk_ext.bbclass | |||
| @@ -5,16 +5,16 @@ inherit populate_sdk_base | |||
| 5 | # NOTE: normally you cannot use task overrides for this kind of thing - this | 5 | # NOTE: normally you cannot use task overrides for this kind of thing - this |
| 6 | # only works because of get_sdk_ext_rdepends() | 6 | # only works because of get_sdk_ext_rdepends() |
| 7 | 7 | ||
| 8 | TOOLCHAIN_HOST_TASK_task-populate-sdk-ext = " \ | 8 | TOOLCHAIN_HOST_TASK:task-populate-sdk-ext = " \ |
| 9 | meta-environment-extsdk-${MACHINE} \ | 9 | meta-environment-extsdk-${MACHINE} \ |
| 10 | " | 10 | " |
| 11 | 11 | ||
| 12 | TOOLCHAIN_TARGET_TASK_task-populate-sdk-ext = "" | 12 | TOOLCHAIN_TARGET_TASK:task-populate-sdk-ext = "" |
| 13 | 13 | ||
| 14 | SDK_RELOCATE_AFTER_INSTALL_task-populate-sdk-ext = "0" | 14 | SDK_RELOCATE_AFTER_INSTALL:task-populate-sdk-ext = "0" |
| 15 | 15 | ||
| 16 | SDK_EXT = "" | 16 | SDK_EXT = "" |
| 17 | SDK_EXT_task-populate-sdk-ext = "-ext" | 17 | SDK_EXT:task-populate-sdk-ext = "-ext" |
| 18 | 18 | ||
| 19 | # Options are full or minimal | 19 | # Options are full or minimal |
| 20 | SDK_EXT_TYPE ?= "full" | 20 | SDK_EXT_TYPE ?= "full" |
| @@ -78,10 +78,10 @@ COREBASE_FILES ?= " \ | |||
| 78 | .templateconf \ | 78 | .templateconf \ |
| 79 | " | 79 | " |
| 80 | 80 | ||
| 81 | SDK_DIR_task-populate-sdk-ext = "${WORKDIR}/sdk-ext" | 81 | SDK_DIR:task-populate-sdk-ext = "${WORKDIR}/sdk-ext" |
| 82 | B_task-populate-sdk-ext = "${SDK_DIR}" | 82 | B:task-populate-sdk-ext = "${SDK_DIR}" |
| 83 | TOOLCHAINEXT_OUTPUTNAME ?= "${SDK_NAME}-toolchain-ext-${SDK_VERSION}" | 83 | TOOLCHAINEXT_OUTPUTNAME ?= "${SDK_NAME}-toolchain-ext-${SDK_VERSION}" |
| 84 | TOOLCHAIN_OUTPUTNAME_task-populate-sdk-ext = "${TOOLCHAINEXT_OUTPUTNAME}" | 84 | TOOLCHAIN_OUTPUTNAME:task-populate-sdk-ext = "${TOOLCHAINEXT_OUTPUTNAME}" |
| 85 | 85 | ||
| 86 | SDK_EXT_TARGET_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.target.manifest" | 86 | SDK_EXT_TARGET_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.target.manifest" |
| 87 | SDK_EXT_HOST_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.host.manifest" | 87 | SDK_EXT_HOST_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.host.manifest" |
| @@ -118,9 +118,9 @@ python write_host_sdk_ext_manifest () { | |||
| 118 | f.write("%s %s %s\n" % (info[1], info[2], info[3])) | 118 | f.write("%s %s %s\n" % (info[1], info[2], info[3])) |
| 119 | } | 119 | } |
| 120 | 120 | ||
| 121 | SDK_POSTPROCESS_COMMAND_append_task-populate-sdk-ext = "write_target_sdk_ext_manifest; write_host_sdk_ext_manifest; " | 121 | SDK_POSTPROCESS_COMMAND:append:task-populate-sdk-ext = "write_target_sdk_ext_manifest; write_host_sdk_ext_manifest; " |
| 122 | 122 | ||
| 123 | SDK_TITLE_task-populate-sdk-ext = "${@d.getVar('DISTRO_NAME') or d.getVar('DISTRO')} Extensible SDK" | 123 | SDK_TITLE:task-populate-sdk-ext = "${@d.getVar('DISTRO_NAME') or d.getVar('DISTRO')} Extensible SDK" |
| 124 | 124 | ||
| 125 | def clean_esdk_builddir(d, sdkbasepath): | 125 | def clean_esdk_builddir(d, sdkbasepath): |
| 126 | """Clean up traces of the fake build for create_filtered_tasklist()""" | 126 | """Clean up traces of the fake build for create_filtered_tasklist()""" |
| @@ -148,14 +148,14 @@ def create_filtered_tasklist(d, sdkbasepath, tasklistfile, conf_initpath): | |||
| 148 | with open(sdkbasepath + '/conf/local.conf', 'a') as f: | 148 | with open(sdkbasepath + '/conf/local.conf', 'a') as f: |
| 149 | # Force the use of sstate from the build system | 149 | # Force the use of sstate from the build system |
| 150 | f.write('\nSSTATE_DIR_forcevariable = "%s"\n' % d.getVar('SSTATE_DIR')) | 150 | f.write('\nSSTATE_DIR_forcevariable = "%s"\n' % d.getVar('SSTATE_DIR')) |
| 151 | f.write('SSTATE_MIRRORS_forcevariable = "file://universal/(.*) file://universal-4.9/\\1 file://universal-4.9/(.*) file://universal-4.8/\\1"\n') | 151 | f.write('SSTATE_MIRRORS:forcevariable = "file://universal/(.*) file://universal-4.9/\\1 file://universal-4.9/(.*) file://universal-4.8/\\1"\n') |
| 152 | # Ensure TMPDIR is the default so that clean_esdk_builddir() can delete it | 152 | # Ensure TMPDIR is the default so that clean_esdk_builddir() can delete it |
| 153 | f.write('TMPDIR_forcevariable = "${TOPDIR}/tmp"\n') | 153 | f.write('TMPDIR:forcevariable = "${TOPDIR}/tmp"\n') |
| 154 | f.write('TCLIBCAPPEND_forcevariable = ""\n') | 154 | f.write('TCLIBCAPPEND:forcevariable = ""\n') |
| 155 | # Drop uninative if the build isn't using it (or else NATIVELSBSTRING will | 155 | # Drop uninative if the build isn't using it (or else NATIVELSBSTRING will |
| 156 | # be different and we won't be able to find our native sstate) | 156 | # be different and we won't be able to find our native sstate) |
| 157 | if not bb.data.inherits_class('uninative', d): | 157 | if not bb.data.inherits_class('uninative', d): |
| 158 | f.write('INHERIT_remove = "uninative"\n') | 158 | f.write('INHERIT:remove = "uninative"\n') |
| 159 | 159 | ||
| 160 | # Unfortunately the default SDKPATH (or even a custom value) may contain characters that bitbake | 160 | # Unfortunately the default SDKPATH (or even a custom value) may contain characters that bitbake |
| 161 | # will not allow in its COREBASE path, so we need to rename the directory temporarily | 161 | # will not allow in its COREBASE path, so we need to rename the directory temporarily |
| @@ -342,7 +342,7 @@ python copy_buildsystem () { | |||
| 342 | f.write('CONF_VERSION = "%s"\n\n' % d.getVar('CONF_VERSION', False)) | 342 | f.write('CONF_VERSION = "%s"\n\n' % d.getVar('CONF_VERSION', False)) |
| 343 | 343 | ||
| 344 | # Some classes are not suitable for SDK, remove them from INHERIT | 344 | # Some classes are not suitable for SDK, remove them from INHERIT |
| 345 | f.write('INHERIT_remove = "%s"\n' % d.getVar('SDK_INHERIT_BLACKLIST', False)) | 345 | f.write('INHERIT:remove = "%s"\n' % d.getVar('SDK_INHERIT_BLACKLIST', False)) |
| 346 | 346 | ||
| 347 | # Bypass the default connectivity check if any | 347 | # Bypass the default connectivity check if any |
| 348 | f.write('CONNECTIVITY_CHECK_URIS = ""\n\n') | 348 | f.write('CONNECTIVITY_CHECK_URIS = ""\n\n') |
| @@ -358,7 +358,7 @@ python copy_buildsystem () { | |||
| 358 | f.write('SIGGEN_LOCKEDSIGS_TASKSIG_CHECK = "warn"\n\n') | 358 | f.write('SIGGEN_LOCKEDSIGS_TASKSIG_CHECK = "warn"\n\n') |
| 359 | 359 | ||
| 360 | # We want to be able to set this without a full reparse | 360 | # We want to be able to set this without a full reparse |
| 361 | f.write('BB_HASHCONFIG_WHITELIST_append = " SIGGEN_UNLOCKED_RECIPES"\n\n') | 361 | f.write('BB_HASHCONFIG_WHITELIST:append = " SIGGEN_UNLOCKED_RECIPES"\n\n') |
| 362 | 362 | ||
| 363 | # Set up whitelist for run on install | 363 | # Set up whitelist for run on install |
| 364 | f.write('BB_SETSCENE_ENFORCE_WHITELIST = "%:* *:do_shared_workdir *:do_rm_work wic-tools:* *:do_addto_recipe_sysroot"\n\n') | 364 | f.write('BB_SETSCENE_ENFORCE_WHITELIST = "%:* *:do_shared_workdir *:do_rm_work wic-tools:* *:do_addto_recipe_sysroot"\n\n') |
| @@ -688,7 +688,7 @@ sdk_ext_preinst() { | |||
| 688 | EXTRA_TAR_OPTIONS="$EXTRA_TAR_OPTIONS --exclude=sstate-cache" | 688 | EXTRA_TAR_OPTIONS="$EXTRA_TAR_OPTIONS --exclude=sstate-cache" |
| 689 | fi | 689 | fi |
| 690 | } | 690 | } |
| 691 | SDK_PRE_INSTALL_COMMAND_task-populate-sdk-ext = "${sdk_ext_preinst}" | 691 | SDK_PRE_INSTALL_COMMAND:task-populate-sdk-ext = "${sdk_ext_preinst}" |
| 692 | 692 | ||
| 693 | # FIXME this preparation should be done as part of the SDK construction | 693 | # FIXME this preparation should be done as part of the SDK construction |
| 694 | sdk_ext_postinst() { | 694 | sdk_ext_postinst() { |
| @@ -739,9 +739,9 @@ sdk_ext_postinst() { | |||
| 739 | echo done | 739 | echo done |
| 740 | } | 740 | } |
| 741 | 741 | ||
| 742 | SDK_POST_INSTALL_COMMAND_task-populate-sdk-ext = "${sdk_ext_postinst}" | 742 | SDK_POST_INSTALL_COMMAND:task-populate-sdk-ext = "${sdk_ext_postinst}" |
| 743 | 743 | ||
| 744 | SDK_POSTPROCESS_COMMAND_prepend_task-populate-sdk-ext = "copy_buildsystem; install_tools; " | 744 | SDK_POSTPROCESS_COMMAND:prepend:task-populate-sdk-ext = "copy_buildsystem; install_tools; " |
| 745 | 745 | ||
| 746 | SDK_INSTALL_TARGETS = "" | 746 | SDK_INSTALL_TARGETS = "" |
| 747 | fakeroot python do_populate_sdk_ext() { | 747 | fakeroot python do_populate_sdk_ext() { |
| @@ -833,7 +833,7 @@ do_populate_sdk_ext[nostamp] = "1" | |||
| 833 | SDKEXTDEPLOYDIR = "${WORKDIR}/deploy-${PN}-populate-sdk-ext" | 833 | SDKEXTDEPLOYDIR = "${WORKDIR}/deploy-${PN}-populate-sdk-ext" |
| 834 | 834 | ||
| 835 | SSTATETASKS += "do_populate_sdk_ext" | 835 | SSTATETASKS += "do_populate_sdk_ext" |
| 836 | SSTATE_SKIP_CREATION_task-populate-sdk-ext = '1' | 836 | SSTATE_SKIP_CREATION:task-populate-sdk-ext = '1' |
| 837 | do_populate_sdk_ext[cleandirs] = "${SDKEXTDEPLOYDIR}" | 837 | do_populate_sdk_ext[cleandirs] = "${SDKEXTDEPLOYDIR}" |
| 838 | do_populate_sdk_ext[sstate-inputdirs] = "${SDKEXTDEPLOYDIR}" | 838 | do_populate_sdk_ext[sstate-inputdirs] = "${SDKEXTDEPLOYDIR}" |
| 839 | do_populate_sdk_ext[sstate-outputdirs] = "${SDK_DEPLOY}" | 839 | do_populate_sdk_ext[sstate-outputdirs] = "${SDK_DEPLOY}" |
diff --git a/meta/classes/ptest-gnome.bbclass b/meta/classes/ptest-gnome.bbclass index 478a33474d..18bd3dbff9 100644 --- a/meta/classes/ptest-gnome.bbclass +++ b/meta/classes/ptest-gnome.bbclass | |||
| @@ -1,8 +1,8 @@ | |||
| 1 | inherit ptest | 1 | inherit ptest |
| 2 | 2 | ||
| 3 | EXTRA_OECONF_append = " ${@bb.utils.contains('PTEST_ENABLED', '1', '--enable-installed-tests', '--disable-installed-tests', d)}" | 3 | EXTRA_OECONF:append = " ${@bb.utils.contains('PTEST_ENABLED', '1', '--enable-installed-tests', '--disable-installed-tests', d)}" |
| 4 | 4 | ||
| 5 | FILES_${PN}-ptest += "${libexecdir}/installed-tests/ \ | 5 | FILES:${PN}-ptest += "${libexecdir}/installed-tests/ \ |
| 6 | ${datadir}/installed-tests/" | 6 | ${datadir}/installed-tests/" |
| 7 | 7 | ||
| 8 | RDEPENDS_${PN}-ptest += "gnome-desktop-testing" | 8 | RDEPENDS:${PN}-ptest += "gnome-desktop-testing" |
diff --git a/meta/classes/ptest-perl.bbclass b/meta/classes/ptest-perl.bbclass index a4bc40b51a..5dd72c9dad 100644 --- a/meta/classes/ptest-perl.bbclass +++ b/meta/classes/ptest-perl.bbclass | |||
| @@ -1,6 +1,6 @@ | |||
| 1 | inherit ptest | 1 | inherit ptest |
| 2 | 2 | ||
| 3 | FILESEXTRAPATHS_prepend := "${COREBASE}/meta/files:" | 3 | FILESEXTRAPATHS:prepend := "${COREBASE}/meta/files:" |
| 4 | 4 | ||
| 5 | SRC_URI += "file://ptest-perl/run-ptest" | 5 | SRC_URI += "file://ptest-perl/run-ptest" |
| 6 | 6 | ||
| @@ -13,9 +13,9 @@ do_install_ptest_perl() { | |||
| 13 | chown -R root:root ${D}${PTEST_PATH} | 13 | chown -R root:root ${D}${PTEST_PATH} |
| 14 | } | 14 | } |
| 15 | 15 | ||
| 16 | FILES_${PN}-ptest_prepend = "${PTEST_PATH}/t/* ${PTEST_PATH}/run-ptest " | 16 | FILES:${PN}-ptest:prepend = "${PTEST_PATH}/t/* ${PTEST_PATH}/run-ptest " |
| 17 | 17 | ||
| 18 | RDEPENDS_${PN}-ptest_prepend = "perl " | 18 | RDEPENDS:${PN}-ptest:prepend = "perl " |
| 19 | 19 | ||
| 20 | addtask install_ptest_perl after do_install_ptest_base before do_package | 20 | addtask install_ptest_perl after do_install_ptest_base before do_package |
| 21 | 21 | ||
diff --git a/meta/classes/ptest.bbclass b/meta/classes/ptest.bbclass index 466916299a..200446e52b 100644 --- a/meta/classes/ptest.bbclass +++ b/meta/classes/ptest.bbclass | |||
| @@ -1,22 +1,22 @@ | |||
| 1 | SUMMARY_${PN}-ptest ?= "${SUMMARY} - Package test files" | 1 | SUMMARY:${PN}-ptest ?= "${SUMMARY} - Package test files" |
| 2 | DESCRIPTION_${PN}-ptest ?= "${DESCRIPTION} \ | 2 | DESCRIPTION:${PN}-ptest ?= "${DESCRIPTION} \ |
| 3 | This package contains a test directory ${PTEST_PATH} for package test purposes." | 3 | This package contains a test directory ${PTEST_PATH} for package test purposes." |
| 4 | 4 | ||
| 5 | PTEST_PATH ?= "${libdir}/${BPN}/ptest" | 5 | PTEST_PATH ?= "${libdir}/${BPN}/ptest" |
| 6 | PTEST_BUILD_HOST_FILES ?= "Makefile" | 6 | PTEST_BUILD_HOST_FILES ?= "Makefile" |
| 7 | PTEST_BUILD_HOST_PATTERN ?= "" | 7 | PTEST_BUILD_HOST_PATTERN ?= "" |
| 8 | 8 | ||
| 9 | FILES_${PN}-ptest += "${PTEST_PATH}" | 9 | FILES:${PN}-ptest += "${PTEST_PATH}" |
| 10 | SECTION_${PN}-ptest = "devel" | 10 | SECTION:${PN}-ptest = "devel" |
| 11 | ALLOW_EMPTY_${PN}-ptest = "1" | 11 | ALLOW_EMPTY:${PN}-ptest = "1" |
| 12 | PTEST_ENABLED = "${@bb.utils.contains('DISTRO_FEATURES', 'ptest', '1', '0', d)}" | 12 | PTEST_ENABLED = "${@bb.utils.contains('DISTRO_FEATURES', 'ptest', '1', '0', d)}" |
| 13 | PTEST_ENABLED_class-native = "" | 13 | PTEST_ENABLED:class-native = "" |
| 14 | PTEST_ENABLED_class-nativesdk = "" | 14 | PTEST_ENABLED:class-nativesdk = "" |
| 15 | PTEST_ENABLED_class-cross-canadian = "" | 15 | PTEST_ENABLED:class-cross-canadian = "" |
| 16 | RDEPENDS_${PN}-ptest += "${PN}" | 16 | RDEPENDS:${PN}-ptest += "${PN}" |
| 17 | RDEPENDS_${PN}-ptest_class-native = "" | 17 | RDEPENDS:${PN}-ptest:class-native = "" |
| 18 | RDEPENDS_${PN}-ptest_class-nativesdk = "" | 18 | RDEPENDS:${PN}-ptest:class-nativesdk = "" |
| 19 | RRECOMMENDS_${PN}-ptest += "ptest-runner" | 19 | RRECOMMENDS:${PN}-ptest += "ptest-runner" |
| 20 | 20 | ||
| 21 | PACKAGES =+ "${@bb.utils.contains('PTEST_ENABLED', '1', '${PN}-ptest', '', d)}" | 21 | PACKAGES =+ "${@bb.utils.contains('PTEST_ENABLED', '1', '${PN}-ptest', '', d)}" |
| 22 | 22 | ||
| @@ -73,7 +73,7 @@ PTEST_BINDIR_PKGD_PATH = "${PKGD}${PTEST_PATH}/bin" | |||
| 73 | # This function needs to run after apply_update_alternative_renames because the | 73 | # This function needs to run after apply_update_alternative_renames because the |
| 74 | # aforementioned function will update the ALTERNATIVE_LINK_NAME flag. Append is | 74 | # aforementioned function will update the ALTERNATIVE_LINK_NAME flag. Append is |
| 75 | # used here to make this function to run as late as possible. | 75 | # used here to make this function to run as late as possible. |
| 76 | PACKAGE_PREPROCESS_FUNCS_append = "${@bb.utils.contains('PTEST_BINDIR', '1', \ | 76 | PACKAGE_PREPROCESS_FUNCS:append = "${@bb.utils.contains('PTEST_BINDIR', '1', \ |
| 77 | bb.utils.contains('PTEST_ENABLED', '1', ' ptest_update_alternatives', '', d), '', d)}" | 77 | bb.utils.contains('PTEST_ENABLED', '1', ' ptest_update_alternatives', '', d), '', d)}" |
| 78 | 78 | ||
| 79 | python ptest_update_alternatives() { | 79 | python ptest_update_alternatives() { |
diff --git a/meta/classes/pypi.bbclass b/meta/classes/pypi.bbclass index 384a209874..272c220bca 100644 --- a/meta/classes/pypi.bbclass +++ b/meta/classes/pypi.bbclass | |||
| @@ -19,7 +19,7 @@ PYPI_SRC_URI ?= "${@pypi_src_uri(d)}" | |||
| 19 | 19 | ||
| 20 | HOMEPAGE ?= "https://pypi.python.org/pypi/${PYPI_PACKAGE}/" | 20 | HOMEPAGE ?= "https://pypi.python.org/pypi/${PYPI_PACKAGE}/" |
| 21 | SECTION = "devel/python" | 21 | SECTION = "devel/python" |
| 22 | SRC_URI_prepend = "${PYPI_SRC_URI} " | 22 | SRC_URI:prepend = "${PYPI_SRC_URI} " |
| 23 | S = "${WORKDIR}/${PYPI_PACKAGE}-${PV}" | 23 | S = "${WORKDIR}/${PYPI_PACKAGE}-${PV}" |
| 24 | 24 | ||
| 25 | UPSTREAM_CHECK_URI ?= "https://pypi.org/project/${PYPI_PACKAGE}/" | 25 | UPSTREAM_CHECK_URI ?= "https://pypi.org/project/${PYPI_PACKAGE}/" |
diff --git a/meta/classes/python3native.bbclass b/meta/classes/python3native.bbclass index 2e3a88c126..13fbaa5f9c 100644 --- a/meta/classes/python3native.bbclass +++ b/meta/classes/python3native.bbclass | |||
| @@ -2,7 +2,7 @@ inherit python3-dir | |||
| 2 | 2 | ||
| 3 | PYTHON="${STAGING_BINDIR_NATIVE}/python3-native/python3" | 3 | PYTHON="${STAGING_BINDIR_NATIVE}/python3-native/python3" |
| 4 | EXTRANATIVEPATH += "python3-native" | 4 | EXTRANATIVEPATH += "python3-native" |
| 5 | DEPENDS_append = " python3-native " | 5 | DEPENDS:append = " python3-native " |
| 6 | 6 | ||
| 7 | # python-config and other scripts are using distutils modules | 7 | # python-config and other scripts are using distutils modules |
| 8 | # which we patch to access these variables | 8 | # which we patch to access these variables |
diff --git a/meta/classes/python3targetconfig.bbclass b/meta/classes/python3targetconfig.bbclass index fc1025c207..5c8457acaa 100644 --- a/meta/classes/python3targetconfig.bbclass +++ b/meta/classes/python3targetconfig.bbclass | |||
| @@ -1,17 +1,17 @@ | |||
| 1 | inherit python3native | 1 | inherit python3native |
| 2 | 2 | ||
| 3 | EXTRA_PYTHON_DEPENDS ?= "" | 3 | EXTRA_PYTHON_DEPENDS ?= "" |
| 4 | EXTRA_PYTHON_DEPENDS_class-target = "python3" | 4 | EXTRA_PYTHON_DEPENDS:class-target = "python3" |
| 5 | DEPENDS_append = " ${EXTRA_PYTHON_DEPENDS}" | 5 | DEPENDS:append = " ${EXTRA_PYTHON_DEPENDS}" |
| 6 | 6 | ||
| 7 | do_configure_prepend_class-target() { | 7 | do_configure:prepend:class-target() { |
| 8 | export _PYTHON_SYSCONFIGDATA_NAME="_sysconfigdata" | 8 | export _PYTHON_SYSCONFIGDATA_NAME="_sysconfigdata" |
| 9 | } | 9 | } |
| 10 | 10 | ||
| 11 | do_compile_prepend_class-target() { | 11 | do_compile:prepend:class-target() { |
| 12 | export _PYTHON_SYSCONFIGDATA_NAME="_sysconfigdata" | 12 | export _PYTHON_SYSCONFIGDATA_NAME="_sysconfigdata" |
| 13 | } | 13 | } |
| 14 | 14 | ||
| 15 | do_install_prepend_class-target() { | 15 | do_install:prepend:class-target() { |
| 16 | export _PYTHON_SYSCONFIGDATA_NAME="_sysconfigdata" | 16 | export _PYTHON_SYSCONFIGDATA_NAME="_sysconfigdata" |
| 17 | } | 17 | } |
diff --git a/meta/classes/qemu.bbclass b/meta/classes/qemu.bbclass index 55bdff816b..01a7b86ae1 100644 --- a/meta/classes/qemu.bbclass +++ b/meta/classes/qemu.bbclass | |||
| @@ -64,4 +64,4 @@ QEMU_EXTRAOPTIONS_ppc64e5500 = " -cpu e500mc" | |||
| 64 | QEMU_EXTRAOPTIONS_ppce6500 = " -cpu e500mc" | 64 | QEMU_EXTRAOPTIONS_ppce6500 = " -cpu e500mc" |
| 65 | QEMU_EXTRAOPTIONS_ppc64e6500 = " -cpu e500mc" | 65 | QEMU_EXTRAOPTIONS_ppc64e6500 = " -cpu e500mc" |
| 66 | QEMU_EXTRAOPTIONS_ppc7400 = " -cpu 7400" | 66 | QEMU_EXTRAOPTIONS_ppc7400 = " -cpu 7400" |
| 67 | QEMU_EXTRAOPTIONS_powerpc64le = " -cpu POWER8" | 67 | QEMU_EXTRAOPTIONS:powerpc64le = " -cpu POWER8" |
diff --git a/meta/classes/qemuboot.bbclass b/meta/classes/qemuboot.bbclass index 2b50ddaa22..aaf821d6dd 100644 --- a/meta/classes/qemuboot.bbclass +++ b/meta/classes/qemuboot.bbclass | |||
| @@ -43,7 +43,7 @@ | |||
| 43 | # a custom one, but that may cause conflicts when multiple qemus are | 43 | # a custom one, but that may cause conflicts when multiple qemus are |
| 44 | # running on the same host. | 44 | # running on the same host. |
| 45 | # Note: If more than one interface of type -device virtio-net-device gets added, | 45 | # Note: If more than one interface of type -device virtio-net-device gets added, |
| 46 | # QB_NETWORK_DEVICE_prepend might be used, since Qemu enumerates the eth* | 46 | # QB_NETWORK_DEVICE:prepend might be used, since Qemu enumerates the eth* |
| 47 | # devices in reverse order to -device arguments. | 47 | # devices in reverse order to -device arguments. |
| 48 | # | 48 | # |
| 49 | # QB_TAP_OPT: network option for 'tap' mode, e.g., | 49 | # QB_TAP_OPT: network option for 'tap' mode, e.g., |
diff --git a/meta/classes/reproducible_build.bbclass b/meta/classes/reproducible_build.bbclass index 278eeedc74..378121903d 100644 --- a/meta/classes/reproducible_build.bbclass +++ b/meta/classes/reproducible_build.bbclass | |||
| @@ -42,7 +42,7 @@ SDE_FILE = "${SDE_DIR}/__source_date_epoch.txt" | |||
| 42 | SDE_DEPLOYDIR = "${WORKDIR}/deploy-source-date-epoch" | 42 | SDE_DEPLOYDIR = "${WORKDIR}/deploy-source-date-epoch" |
| 43 | 43 | ||
| 44 | # Enable compiler warning when the __TIME__, __DATE__ and __TIMESTAMP__ macros are used. | 44 | # Enable compiler warning when the __TIME__, __DATE__ and __TIMESTAMP__ macros are used. |
| 45 | TARGET_CC_ARCH_append_class-target = " -Wdate-time" | 45 | TARGET_CC_ARCH:append:class-target = " -Wdate-time" |
| 46 | 46 | ||
| 47 | # A SOURCE_DATE_EPOCH of '0' might be misinterpreted as no SDE | 47 | # A SOURCE_DATE_EPOCH of '0' might be misinterpreted as no SDE |
| 48 | export SOURCE_DATE_EPOCH_FALLBACK ??= "1302044400" | 48 | export SOURCE_DATE_EPOCH_FALLBACK ??= "1302044400" |
diff --git a/meta/classes/rm_work.bbclass b/meta/classes/rm_work.bbclass index 01c2ab1c78..07901d7597 100644 --- a/meta/classes/rm_work.bbclass +++ b/meta/classes/rm_work.bbclass | |||
| @@ -13,7 +13,7 @@ | |||
| 13 | # Recipes can also configure which entries in their ${WORKDIR} | 13 | # Recipes can also configure which entries in their ${WORKDIR} |
| 14 | # are preserved besides temp, which already gets excluded by default | 14 | # are preserved besides temp, which already gets excluded by default |
| 15 | # because it contains logs: | 15 | # because it contains logs: |
| 16 | # do_install_append () { | 16 | # do_install:append () { |
| 17 | # echo "bar" >${WORKDIR}/foo | 17 | # echo "bar" >${WORKDIR}/foo |
| 18 | # } | 18 | # } |
| 19 | # RM_WORK_EXCLUDE_ITEMS += "foo" | 19 | # RM_WORK_EXCLUDE_ITEMS += "foo" |
| @@ -24,7 +24,7 @@ RM_WORK_EXCLUDE_ITEMS = "temp" | |||
| 24 | BB_SCHEDULER ?= "completion" | 24 | BB_SCHEDULER ?= "completion" |
| 25 | 25 | ||
| 26 | # Run the rm_work task in the idle scheduling class | 26 | # Run the rm_work task in the idle scheduling class |
| 27 | BB_TASK_IONICE_LEVEL_task-rm_work = "3.0" | 27 | BB_TASK_IONICE_LEVEL:task-rm_work = "3.0" |
| 28 | 28 | ||
| 29 | do_rm_work () { | 29 | do_rm_work () { |
| 30 | # If the recipe name is in the RM_WORK_EXCLUDE, skip the recipe. | 30 | # If the recipe name is in the RM_WORK_EXCLUDE, skip the recipe. |
diff --git a/meta/classes/rm_work_and_downloads.bbclass b/meta/classes/rm_work_and_downloads.bbclass index 7c00bea597..15e6091b9d 100644 --- a/meta/classes/rm_work_and_downloads.bbclass +++ b/meta/classes/rm_work_and_downloads.bbclass | |||
| @@ -28,6 +28,6 @@ inherit rm_work | |||
| 28 | 28 | ||
| 29 | # Instead go up one level and remove ourself. | 29 | # Instead go up one level and remove ourself. |
| 30 | DL_DIR = "${BASE_WORKDIR}/${MULTIMACH_TARGET_SYS}/${PN}/downloads" | 30 | DL_DIR = "${BASE_WORKDIR}/${MULTIMACH_TARGET_SYS}/${PN}/downloads" |
| 31 | do_rm_work_append () { | 31 | do_rm_work:append () { |
| 32 | rm -rf ${DL_DIR} | 32 | rm -rf ${DL_DIR} |
| 33 | } | 33 | } |
diff --git a/meta/classes/rootfs-postcommands.bbclass b/meta/classes/rootfs-postcommands.bbclass index e66ed5938b..fbfa63fcb3 100644 --- a/meta/classes/rootfs-postcommands.bbclass +++ b/meta/classes/rootfs-postcommands.bbclass | |||
| @@ -23,7 +23,7 @@ ROOTFS_POSTPROCESS_COMMAND += '${@bb.utils.contains("IMAGE_FEATURES", "read-only | |||
| 23 | # | 23 | # |
| 24 | # We do this with _append because the default value might get set later with ?= | 24 | # We do this with _append because the default value might get set later with ?= |
| 25 | # and we don't want to disable such a default that by setting a value here. | 25 | # and we don't want to disable such a default that by setting a value here. |
| 26 | APPEND_append = '${@bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", " ro", "", d)}' | 26 | APPEND:append = '${@bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", " ro", "", d)}' |
| 27 | 27 | ||
| 28 | # Generates test data file with data store variables expanded in json format | 28 | # Generates test data file with data store variables expanded in json format |
| 29 | ROOTFS_POSTPROCESS_COMMAND += "write_image_test_data; " | 29 | ROOTFS_POSTPROCESS_COMMAND += "write_image_test_data; " |
diff --git a/meta/classes/rootfs_rpm.bbclass b/meta/classes/rootfs_rpm.bbclass index 0af7d65b1a..3d8d784f79 100644 --- a/meta/classes/rootfs_rpm.bbclass +++ b/meta/classes/rootfs_rpm.bbclass | |||
| @@ -9,7 +9,7 @@ export STAGING_INCDIR | |||
| 9 | export STAGING_LIBDIR | 9 | export STAGING_LIBDIR |
| 10 | 10 | ||
| 11 | # Add 100Meg of extra space for dnf | 11 | # Add 100Meg of extra space for dnf |
| 12 | IMAGE_ROOTFS_EXTRA_SPACE_append = "${@bb.utils.contains("PACKAGE_INSTALL", "dnf", " + 102400", "", d)}" | 12 | IMAGE_ROOTFS_EXTRA_SPACE:append = "${@bb.utils.contains("PACKAGE_INSTALL", "dnf", " + 102400", "", d)}" |
| 13 | 13 | ||
| 14 | # Dnf is python based, so be sure python3-native is available to us. | 14 | # Dnf is python based, so be sure python3-native is available to us. |
| 15 | EXTRANATIVEPATH += "python3-native" | 15 | EXTRANATIVEPATH += "python3-native" |
diff --git a/meta/classes/sanity.bbclass b/meta/classes/sanity.bbclass index e907a3566f..29cc42efe7 100644 --- a/meta/classes/sanity.bbclass +++ b/meta/classes/sanity.bbclass | |||
| @@ -227,7 +227,7 @@ def check_toolchain_tune(data, tune, multilib): | |||
| 227 | overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + multilib | 227 | overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + multilib |
| 228 | localdata.setVar("OVERRIDES", overrides) | 228 | localdata.setVar("OVERRIDES", overrides) |
| 229 | bb.debug(2, "Sanity-checking tuning '%s' (%s) features:" % (tune, multilib)) | 229 | bb.debug(2, "Sanity-checking tuning '%s' (%s) features:" % (tune, multilib)) |
| 230 | features = (localdata.getVar("TUNE_FEATURES_tune-%s" % tune) or "").split() | 230 | features = (localdata.getVar("TUNE_FEATURES:tune-%s" % tune) or "").split() |
| 231 | if not features: | 231 | if not features: |
| 232 | return "Tuning '%s' has no defined features, and cannot be used." % tune | 232 | return "Tuning '%s' has no defined features, and cannot be used." % tune |
| 233 | valid_tunes = localdata.getVarFlags('TUNEVALID') or {} | 233 | valid_tunes = localdata.getVarFlags('TUNEVALID') or {} |
| @@ -249,7 +249,7 @@ def check_toolchain_tune(data, tune, multilib): | |||
| 249 | tune_errors.append("Feature '%s' is not defined." % feature) | 249 | tune_errors.append("Feature '%s' is not defined." % feature) |
| 250 | whitelist = localdata.getVar("TUNEABI_WHITELIST") | 250 | whitelist = localdata.getVar("TUNEABI_WHITELIST") |
| 251 | if whitelist: | 251 | if whitelist: |
| 252 | tuneabi = localdata.getVar("TUNEABI_tune-%s" % tune) | 252 | tuneabi = localdata.getVar("TUNEABI:tune-%s" % tune) |
| 253 | if not tuneabi: | 253 | if not tuneabi: |
| 254 | tuneabi = tune | 254 | tuneabi = tune |
| 255 | if True not in [x in whitelist.split() for x in tuneabi.split()]: | 255 | if True not in [x in whitelist.split() for x in tuneabi.split()]: |
| @@ -281,7 +281,7 @@ def check_toolchain(data): | |||
| 281 | seen_libs.append(lib) | 281 | seen_libs.append(lib) |
| 282 | if not lib in global_multilibs: | 282 | if not lib in global_multilibs: |
| 283 | tune_error_set.append("Multilib %s is not present in MULTILIB_GLOBAL_VARIANTS" % lib) | 283 | tune_error_set.append("Multilib %s is not present in MULTILIB_GLOBAL_VARIANTS" % lib) |
| 284 | tune = data.getVar("DEFAULTTUNE_virtclass-multilib-%s" % lib) | 284 | tune = data.getVar("DEFAULTTUNE:virtclass-multilib-%s" % lib) |
| 285 | if tune in seen_tunes: | 285 | if tune in seen_tunes: |
| 286 | tune_error_set.append("The tuning '%s' appears in more than one multilib." % tune) | 286 | tune_error_set.append("The tuning '%s' appears in more than one multilib." % tune) |
| 287 | else: | 287 | else: |
diff --git a/meta/classes/sign_package_feed.bbclass b/meta/classes/sign_package_feed.bbclass index 7ff3a35a2f..16bcd147aa 100644 --- a/meta/classes/sign_package_feed.bbclass +++ b/meta/classes/sign_package_feed.bbclass | |||
| @@ -29,7 +29,7 @@ PACKAGE_FEED_GPG_BACKEND ?= 'local' | |||
| 29 | PACKAGE_FEED_GPG_SIGNATURE_TYPE ?= 'ASC' | 29 | PACKAGE_FEED_GPG_SIGNATURE_TYPE ?= 'ASC' |
| 30 | 30 | ||
| 31 | # Make feed signing key to be present in rootfs | 31 | # Make feed signing key to be present in rootfs |
| 32 | FEATURE_PACKAGES_package-management_append = " signing-keys-packagefeed" | 32 | FEATURE_PACKAGES_package-management:append = " signing-keys-packagefeed" |
| 33 | 33 | ||
| 34 | python () { | 34 | python () { |
| 35 | # Check sanity of configuration | 35 | # Check sanity of configuration |
diff --git a/meta/classes/staging.bbclass b/meta/classes/staging.bbclass index 32a615c743..af3397bab6 100644 --- a/meta/classes/staging.bbclass +++ b/meta/classes/staging.bbclass | |||
| @@ -19,9 +19,9 @@ SYSROOT_DIRS_NATIVE = " \ | |||
| 19 | ${sysconfdir} \ | 19 | ${sysconfdir} \ |
| 20 | ${localstatedir} \ | 20 | ${localstatedir} \ |
| 21 | " | 21 | " |
| 22 | SYSROOT_DIRS_append_class-native = " ${SYSROOT_DIRS_NATIVE}" | 22 | SYSROOT_DIRS:append:class-native = " ${SYSROOT_DIRS_NATIVE}" |
| 23 | SYSROOT_DIRS_append_class-cross = " ${SYSROOT_DIRS_NATIVE}" | 23 | SYSROOT_DIRS:append:class-cross = " ${SYSROOT_DIRS_NATIVE}" |
| 24 | SYSROOT_DIRS_append_class-crosssdk = " ${SYSROOT_DIRS_NATIVE}" | 24 | SYSROOT_DIRS:append:class-crosssdk = " ${SYSROOT_DIRS_NATIVE}" |
| 25 | 25 | ||
| 26 | # These directories will not be staged in the sysroot | 26 | # These directories will not be staged in the sysroot |
| 27 | SYSROOT_DIRS_BLACKLIST = " \ | 27 | SYSROOT_DIRS_BLACKLIST = " \ |
| @@ -82,7 +82,7 @@ python sysroot_strip () { | |||
| 82 | pn = d.getVar('PN') | 82 | pn = d.getVar('PN') |
| 83 | libdir = d.getVar("libdir") | 83 | libdir = d.getVar("libdir") |
| 84 | base_libdir = d.getVar("base_libdir") | 84 | base_libdir = d.getVar("base_libdir") |
| 85 | qa_already_stripped = 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn) or "").split() | 85 | qa_already_stripped = 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split() |
| 86 | strip_cmd = d.getVar("STRIP") | 86 | strip_cmd = d.getVar("STRIP") |
| 87 | 87 | ||
| 88 | oe.package.strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, d, | 88 | oe.package.strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, d, |
| @@ -118,8 +118,8 @@ do_populate_sysroot[vardeps] += "${SYSROOT_PREPROCESS_FUNCS}" | |||
| 118 | do_populate_sysroot[vardepsexclude] += "MULTI_PROVIDER_WHITELIST" | 118 | do_populate_sysroot[vardepsexclude] += "MULTI_PROVIDER_WHITELIST" |
| 119 | 119 | ||
| 120 | POPULATESYSROOTDEPS = "" | 120 | POPULATESYSROOTDEPS = "" |
| 121 | POPULATESYSROOTDEPS_class-target = "virtual/${MLPREFIX}${TARGET_PREFIX}binutils:do_populate_sysroot" | 121 | POPULATESYSROOTDEPS:class-target = "virtual/${MLPREFIX}${TARGET_PREFIX}binutils:do_populate_sysroot" |
| 122 | POPULATESYSROOTDEPS_class-nativesdk = "virtual/${TARGET_PREFIX}binutils-crosssdk:do_populate_sysroot" | 122 | POPULATESYSROOTDEPS:class-nativesdk = "virtual/${TARGET_PREFIX}binutils-crosssdk:do_populate_sysroot" |
| 123 | do_populate_sysroot[depends] += "${POPULATESYSROOTDEPS}" | 123 | do_populate_sysroot[depends] += "${POPULATESYSROOTDEPS}" |
| 124 | 124 | ||
| 125 | SSTATETASKS += "do_populate_sysroot" | 125 | SSTATETASKS += "do_populate_sysroot" |
diff --git a/meta/classes/systemd-boot.bbclass b/meta/classes/systemd-boot.bbclass index 336c4c2ff5..57ec0acbc5 100644 --- a/meta/classes/systemd-boot.bbclass +++ b/meta/classes/systemd-boot.bbclass | |||
| @@ -28,7 +28,7 @@ efi_populate() { | |||
| 28 | done | 28 | done |
| 29 | } | 29 | } |
| 30 | 30 | ||
| 31 | efi_iso_populate_append() { | 31 | efi_iso_populate:append() { |
| 32 | cp -r $iso_dir/loader ${EFIIMGDIR} | 32 | cp -r $iso_dir/loader ${EFIIMGDIR} |
| 33 | } | 33 | } |
| 34 | 34 | ||
diff --git a/meta/classes/systemd.bbclass b/meta/classes/systemd.bbclass index db5d109545..2a272a245f 100644 --- a/meta/classes/systemd.bbclass +++ b/meta/classes/systemd.bbclass | |||
| @@ -1,9 +1,9 @@ | |||
| 1 | # The list of packages that should have systemd packaging scripts added. For | 1 | # The list of packages that should have systemd packaging scripts added. For |
| 2 | # each entry, optionally have a SYSTEMD_SERVICE_[package] that lists the service | 2 | # each entry, optionally have a SYSTEMD_SERVICE:[package] that lists the service |
| 3 | # files in this package. If this variable isn't set, [package].service is used. | 3 | # files in this package. If this variable isn't set, [package].service is used. |
| 4 | SYSTEMD_PACKAGES ?= "${PN}" | 4 | SYSTEMD_PACKAGES ?= "${PN}" |
| 5 | SYSTEMD_PACKAGES_class-native ?= "" | 5 | SYSTEMD_PACKAGES:class-native ?= "" |
| 6 | SYSTEMD_PACKAGES_class-nativesdk ?= "" | 6 | SYSTEMD_PACKAGES:class-nativesdk ?= "" |
| 7 | 7 | ||
| 8 | # Whether to enable or disable the services on installation. | 8 | # Whether to enable or disable the services on installation. |
| 9 | SYSTEMD_AUTO_ENABLE ??= "enable" | 9 | SYSTEMD_AUTO_ENABLE ??= "enable" |
| @@ -85,39 +85,39 @@ python systemd_populate_packages() { | |||
| 85 | def systemd_generate_package_scripts(pkg): | 85 | def systemd_generate_package_scripts(pkg): |
| 86 | bb.debug(1, 'adding systemd calls to postinst/postrm for %s' % pkg) | 86 | bb.debug(1, 'adding systemd calls to postinst/postrm for %s' % pkg) |
| 87 | 87 | ||
| 88 | paths_escaped = ' '.join(shlex.quote(s) for s in d.getVar('SYSTEMD_SERVICE_' + pkg).split()) | 88 | paths_escaped = ' '.join(shlex.quote(s) for s in d.getVar('SYSTEMD_SERVICE:' + pkg).split()) |
| 89 | d.setVar('SYSTEMD_SERVICE_ESCAPED_' + pkg, paths_escaped) | 89 | d.setVar('SYSTEMD_SERVICE_ESCAPED:' + pkg, paths_escaped) |
| 90 | 90 | ||
| 91 | # Add pkg to the overrides so that it finds the SYSTEMD_SERVICE_pkg | 91 | # Add pkg to the overrides so that it finds the SYSTEMD_SERVICE:pkg |
| 92 | # variable. | 92 | # variable. |
| 93 | localdata = d.createCopy() | 93 | localdata = d.createCopy() |
| 94 | localdata.prependVar("OVERRIDES", pkg + ":") | 94 | localdata.prependVar("OVERRIDES", pkg + ":") |
| 95 | 95 | ||
| 96 | postinst = d.getVar('pkg_postinst_%s' % pkg) | 96 | postinst = d.getVar('pkg_postinst:%s' % pkg) |
| 97 | if not postinst: | 97 | if not postinst: |
| 98 | postinst = '#!/bin/sh\n' | 98 | postinst = '#!/bin/sh\n' |
| 99 | postinst += localdata.getVar('systemd_postinst') | 99 | postinst += localdata.getVar('systemd_postinst') |
| 100 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 100 | d.setVar('pkg_postinst:%s' % pkg, postinst) |
| 101 | 101 | ||
| 102 | prerm = d.getVar('pkg_prerm_%s' % pkg) | 102 | prerm = d.getVar('pkg_prerm:%s' % pkg) |
| 103 | if not prerm: | 103 | if not prerm: |
| 104 | prerm = '#!/bin/sh\n' | 104 | prerm = '#!/bin/sh\n' |
| 105 | prerm += localdata.getVar('systemd_prerm') | 105 | prerm += localdata.getVar('systemd_prerm') |
| 106 | d.setVar('pkg_prerm_%s' % pkg, prerm) | 106 | d.setVar('pkg_prerm:%s' % pkg, prerm) |
| 107 | 107 | ||
| 108 | 108 | ||
| 109 | # Add files to FILES_*-systemd if existent and not already done | 109 | # Add files to FILES:*-systemd if existent and not already done |
| 110 | def systemd_append_file(pkg_systemd, file_append): | 110 | def systemd_append_file(pkg_systemd, file_append): |
| 111 | appended = False | 111 | appended = False |
| 112 | if os.path.exists(oe.path.join(d.getVar("D"), file_append)): | 112 | if os.path.exists(oe.path.join(d.getVar("D"), file_append)): |
| 113 | var_name = "FILES_" + pkg_systemd | 113 | var_name = "FILES:" + pkg_systemd |
| 114 | files = d.getVar(var_name, False) or "" | 114 | files = d.getVar(var_name, False) or "" |
| 115 | if file_append not in files.split(): | 115 | if file_append not in files.split(): |
| 116 | d.appendVar(var_name, " " + file_append) | 116 | d.appendVar(var_name, " " + file_append) |
| 117 | appended = True | 117 | appended = True |
| 118 | return appended | 118 | return appended |
| 119 | 119 | ||
| 120 | # Add systemd files to FILES_*-systemd, parse for Also= and follow recursive | 120 | # Add systemd files to FILES:*-systemd, parse for Also= and follow recursive |
| 121 | def systemd_add_files_and_parse(pkg_systemd, path, service, keys): | 121 | def systemd_add_files_and_parse(pkg_systemd, path, service, keys): |
| 122 | # avoid infinite recursion | 122 | # avoid infinite recursion |
| 123 | if systemd_append_file(pkg_systemd, oe.path.join(path, service)): | 123 | if systemd_append_file(pkg_systemd, oe.path.join(path, service)): |
| @@ -174,32 +174,32 @@ python systemd_populate_packages() { | |||
| 174 | if path_found != '': | 174 | if path_found != '': |
| 175 | systemd_add_files_and_parse(pkg_systemd, path_found, service, keys) | 175 | systemd_add_files_and_parse(pkg_systemd, path_found, service, keys) |
| 176 | else: | 176 | else: |
| 177 | bb.fatal("Didn't find service unit '{0}', specified in SYSTEMD_SERVICE_{1}. {2}".format( | 177 | bb.fatal("Didn't find service unit '{0}', specified in SYSTEMD_SERVICE:{1}. {2}".format( |
| 178 | service, pkg_systemd, "Also looked for service unit '{0}'.".format(base) if base is not None else "")) | 178 | service, pkg_systemd, "Also looked for service unit '{0}'.".format(base) if base is not None else "")) |
| 179 | 179 | ||
| 180 | def systemd_create_presets(pkg, action): | 180 | def systemd_create_presets(pkg, action): |
| 181 | presetf = oe.path.join(d.getVar("PKGD"), d.getVar("systemd_unitdir"), "system-preset/98-%s.preset" % pkg) | 181 | presetf = oe.path.join(d.getVar("PKGD"), d.getVar("systemd_unitdir"), "system-preset/98-%s.preset" % pkg) |
| 182 | bb.utils.mkdirhier(os.path.dirname(presetf)) | 182 | bb.utils.mkdirhier(os.path.dirname(presetf)) |
| 183 | with open(presetf, 'a') as fd: | 183 | with open(presetf, 'a') as fd: |
| 184 | for service in d.getVar('SYSTEMD_SERVICE_%s' % pkg).split(): | 184 | for service in d.getVar('SYSTEMD_SERVICE:%s' % pkg).split(): |
| 185 | fd.write("%s %s\n" % (action,service)) | 185 | fd.write("%s %s\n" % (action,service)) |
| 186 | d.appendVar("FILES_%s" % pkg, ' ' + oe.path.join(d.getVar("systemd_unitdir"), "system-preset/98-%s.preset" % pkg)) | 186 | d.appendVar("FILES:%s" % pkg, ' ' + oe.path.join(d.getVar("systemd_unitdir"), "system-preset/98-%s.preset" % pkg)) |
| 187 | 187 | ||
| 188 | # Run all modifications once when creating package | 188 | # Run all modifications once when creating package |
| 189 | if os.path.exists(d.getVar("D")): | 189 | if os.path.exists(d.getVar("D")): |
| 190 | for pkg in d.getVar('SYSTEMD_PACKAGES').split(): | 190 | for pkg in d.getVar('SYSTEMD_PACKAGES').split(): |
| 191 | systemd_check_package(pkg) | 191 | systemd_check_package(pkg) |
| 192 | if d.getVar('SYSTEMD_SERVICE_' + pkg): | 192 | if d.getVar('SYSTEMD_SERVICE:' + pkg): |
| 193 | systemd_generate_package_scripts(pkg) | 193 | systemd_generate_package_scripts(pkg) |
| 194 | action = get_package_var(d, 'SYSTEMD_AUTO_ENABLE', pkg) | 194 | action = get_package_var(d, 'SYSTEMD_AUTO_ENABLE', pkg) |
| 195 | if action in ("enable", "disable"): | 195 | if action in ("enable", "disable"): |
| 196 | systemd_create_presets(pkg, action) | 196 | systemd_create_presets(pkg, action) |
| 197 | elif action not in ("mask", "preset"): | 197 | elif action not in ("mask", "preset"): |
| 198 | bb.fatal("SYSTEMD_AUTO_ENABLE_%s '%s' is not 'enable', 'disable', 'mask' or 'preset'" % (pkg, action)) | 198 | bb.fatal("SYSTEMD_AUTO_ENABLE:%s '%s' is not 'enable', 'disable', 'mask' or 'preset'" % (pkg, action)) |
| 199 | systemd_check_services() | 199 | systemd_check_services() |
| 200 | } | 200 | } |
| 201 | 201 | ||
| 202 | PACKAGESPLITFUNCS_prepend = "systemd_populate_packages " | 202 | PACKAGESPLITFUNCS:prepend = "systemd_populate_packages " |
| 203 | 203 | ||
| 204 | python rm_systemd_unitdir (){ | 204 | python rm_systemd_unitdir (){ |
| 205 | import shutil | 205 | import shutil |
| @@ -227,7 +227,7 @@ python rm_sysvinit_initddir (){ | |||
| 227 | } | 227 | } |
| 228 | 228 | ||
| 229 | do_install[postfuncs] += "${RMINITDIR} " | 229 | do_install[postfuncs] += "${RMINITDIR} " |
| 230 | RMINITDIR_class-target = " rm_sysvinit_initddir rm_systemd_unitdir " | 230 | RMINITDIR:class-target = " rm_sysvinit_initddir rm_systemd_unitdir " |
| 231 | RMINITDIR_class-nativesdk = " rm_sysvinit_initddir rm_systemd_unitdir " | 231 | RMINITDIR:class-nativesdk = " rm_sysvinit_initddir rm_systemd_unitdir " |
| 232 | RMINITDIR = "" | 232 | RMINITDIR = "" |
| 233 | 233 | ||
diff --git a/meta/classes/testimage.bbclass b/meta/classes/testimage.bbclass index ed3a885bdf..3c689aec91 100644 --- a/meta/classes/testimage.bbclass +++ b/meta/classes/testimage.bbclass | |||
| @@ -65,15 +65,15 @@ BASICTESTSUITE = "\ | |||
| 65 | DEFAULT_TEST_SUITES = "${BASICTESTSUITE}" | 65 | DEFAULT_TEST_SUITES = "${BASICTESTSUITE}" |
| 66 | 66 | ||
| 67 | # aarch64 has no graphics | 67 | # aarch64 has no graphics |
| 68 | DEFAULT_TEST_SUITES_remove_aarch64 = "xorg" | 68 | DEFAULT_TEST_SUITES:remove:aarch64 = "xorg" |
| 69 | # musl doesn't support systemtap | 69 | # musl doesn't support systemtap |
| 70 | DEFAULT_TEST_SUITES_remove_libc-musl = "stap" | 70 | DEFAULT_TEST_SUITES:remove:libc-musl = "stap" |
| 71 | 71 | ||
| 72 | # qemumips is quite slow and has reached the timeout limit several times on the YP build cluster, | 72 | # qemumips is quite slow and has reached the timeout limit several times on the YP build cluster, |
| 73 | # mitigate this by removing build tests for qemumips machines. | 73 | # mitigate this by removing build tests for qemumips machines. |
| 74 | MIPSREMOVE ??= "buildcpio buildlzip buildgalculator" | 74 | MIPSREMOVE ??= "buildcpio buildlzip buildgalculator" |
| 75 | DEFAULT_TEST_SUITES_remove_qemumips = "${MIPSREMOVE}" | 75 | DEFAULT_TEST_SUITES:remove:qemumips = "${MIPSREMOVE}" |
| 76 | DEFAULT_TEST_SUITES_remove_qemumips64 = "${MIPSREMOVE}" | 76 | DEFAULT_TEST_SUITES:remove:qemumips64 = "${MIPSREMOVE}" |
| 77 | 77 | ||
| 78 | TEST_SUITES ?= "${DEFAULT_TEST_SUITES}" | 78 | TEST_SUITES ?= "${DEFAULT_TEST_SUITES}" |
| 79 | 79 | ||
| @@ -86,7 +86,7 @@ TEST_RUNQEMUPARAMS ?= "" | |||
| 86 | TESTIMAGE_BOOT_PATTERNS ?= "" | 86 | TESTIMAGE_BOOT_PATTERNS ?= "" |
| 87 | 87 | ||
| 88 | TESTIMAGEDEPENDS = "" | 88 | TESTIMAGEDEPENDS = "" |
| 89 | TESTIMAGEDEPENDS_append_qemuall = " qemu-native:do_populate_sysroot qemu-helper-native:do_populate_sysroot qemu-helper-native:do_addto_recipe_sysroot" | 89 | TESTIMAGEDEPENDS:append:qemuall = " qemu-native:do_populate_sysroot qemu-helper-native:do_populate_sysroot qemu-helper-native:do_addto_recipe_sysroot" |
| 90 | TESTIMAGEDEPENDS += "${@bb.utils.contains('IMAGE_PKGTYPE', 'rpm', 'cpio-native:do_populate_sysroot', '', d)}" | 90 | TESTIMAGEDEPENDS += "${@bb.utils.contains('IMAGE_PKGTYPE', 'rpm', 'cpio-native:do_populate_sysroot', '', d)}" |
| 91 | TESTIMAGEDEPENDS += "${@bb.utils.contains('IMAGE_PKGTYPE', 'rpm', 'dnf-native:do_populate_sysroot', '', d)}" | 91 | TESTIMAGEDEPENDS += "${@bb.utils.contains('IMAGE_PKGTYPE', 'rpm', 'dnf-native:do_populate_sysroot', '', d)}" |
| 92 | TESTIMAGEDEPENDS += "${@bb.utils.contains('IMAGE_PKGTYPE', 'rpm', 'createrepo-c-native:do_populate_sysroot', '', d)}" | 92 | TESTIMAGEDEPENDS += "${@bb.utils.contains('IMAGE_PKGTYPE', 'rpm', 'createrepo-c-native:do_populate_sysroot', '', d)}" |
| @@ -94,7 +94,7 @@ TESTIMAGEDEPENDS += "${@bb.utils.contains('IMAGE_PKGTYPE', 'ipk', 'opkg-utils-na | |||
| 94 | TESTIMAGEDEPENDS += "${@bb.utils.contains('IMAGE_PKGTYPE', 'deb', 'apt-native:do_populate_sysroot package-index:do_package_index', '', d)}" | 94 | TESTIMAGEDEPENDS += "${@bb.utils.contains('IMAGE_PKGTYPE', 'deb', 'apt-native:do_populate_sysroot package-index:do_package_index', '', d)}" |
| 95 | 95 | ||
| 96 | TESTIMAGELOCK = "${TMPDIR}/testimage.lock" | 96 | TESTIMAGELOCK = "${TMPDIR}/testimage.lock" |
| 97 | TESTIMAGELOCK_qemuall = "" | 97 | TESTIMAGELOCK:qemuall = "" |
| 98 | 98 | ||
| 99 | TESTIMAGE_DUMP_DIR ?= "${LOG_DIR}/runtime-hostdump/" | 99 | TESTIMAGE_DUMP_DIR ?= "${LOG_DIR}/runtime-hostdump/" |
| 100 | 100 | ||
diff --git a/meta/classes/texinfo.bbclass b/meta/classes/texinfo.bbclass index f46bacabd4..68c9d4fb70 100644 --- a/meta/classes/texinfo.bbclass +++ b/meta/classes/texinfo.bbclass | |||
| @@ -7,12 +7,12 @@ | |||
| 7 | # makeinfo from SANITY_REQUIRED_UTILITIES. | 7 | # makeinfo from SANITY_REQUIRED_UTILITIES. |
| 8 | 8 | ||
| 9 | TEXDEP = "${@bb.utils.contains('DISTRO_FEATURES', 'api-documentation', 'texinfo-replacement-native', 'texinfo-dummy-native', d)}" | 9 | TEXDEP = "${@bb.utils.contains('DISTRO_FEATURES', 'api-documentation', 'texinfo-replacement-native', 'texinfo-dummy-native', d)}" |
| 10 | TEXDEP_class-native = "texinfo-dummy-native" | 10 | TEXDEP:class-native = "texinfo-dummy-native" |
| 11 | TEXDEP_class-cross = "texinfo-dummy-native" | 11 | TEXDEP:class-cross = "texinfo-dummy-native" |
| 12 | TEXDEP_class-crosssdk = "texinfo-dummy-native" | 12 | TEXDEP:class-crosssdk = "texinfo-dummy-native" |
| 13 | TEXDEP_class-cross-canadian = "texinfo-dummy-native" | 13 | TEXDEP:class-cross-canadian = "texinfo-dummy-native" |
| 14 | DEPENDS_append = " ${TEXDEP}" | 14 | DEPENDS:append = " ${TEXDEP}" |
| 15 | 15 | ||
| 16 | # libtool-cross doesn't inherit cross | 16 | # libtool-cross doesn't inherit cross |
| 17 | TEXDEP_pn-libtool-cross = "texinfo-dummy-native" | 17 | TEXDEP:pn-libtool-cross = "texinfo-dummy-native" |
| 18 | 18 | ||
diff --git a/meta/classes/toolchain-scripts.bbclass b/meta/classes/toolchain-scripts.bbclass index 67a812cb02..479f3b706e 100644 --- a/meta/classes/toolchain-scripts.bbclass +++ b/meta/classes/toolchain-scripts.bbclass | |||
| @@ -3,7 +3,7 @@ inherit toolchain-scripts-base siteinfo kernel-arch | |||
| 3 | # We want to be able to change the value of MULTIMACH_TARGET_SYS, because it | 3 | # We want to be able to change the value of MULTIMACH_TARGET_SYS, because it |
| 4 | # doesn't always match our expectations... but we default to the stock value | 4 | # doesn't always match our expectations... but we default to the stock value |
| 5 | REAL_MULTIMACH_TARGET_SYS ?= "${MULTIMACH_TARGET_SYS}" | 5 | REAL_MULTIMACH_TARGET_SYS ?= "${MULTIMACH_TARGET_SYS}" |
| 6 | TARGET_CC_ARCH_append_libc-musl = " -mmusl" | 6 | TARGET_CC_ARCH:append:libc-musl = " -mmusl" |
| 7 | 7 | ||
| 8 | # default debug prefix map isn't valid in the SDK | 8 | # default debug prefix map isn't valid in the SDK |
| 9 | DEBUG_PREFIX_MAP = "" | 9 | DEBUG_PREFIX_MAP = "" |
diff --git a/meta/classes/uboot-sign.bbclass b/meta/classes/uboot-sign.bbclass index 9671cf76a5..fdf153248c 100644 --- a/meta/classes/uboot-sign.bbclass +++ b/meta/classes/uboot-sign.bbclass | |||
| @@ -19,7 +19,7 @@ | |||
| 19 | # The tasks sequence is set as below, using DEPLOY_IMAGE_DIR as common place to | 19 | # The tasks sequence is set as below, using DEPLOY_IMAGE_DIR as common place to |
| 20 | # treat the device tree blob: | 20 | # treat the device tree blob: |
| 21 | # | 21 | # |
| 22 | # * u-boot:do_install_append | 22 | # * u-boot:do_install:append |
| 23 | # Install UBOOT_DTB_BINARY to datadir, so that kernel can use it for | 23 | # Install UBOOT_DTB_BINARY to datadir, so that kernel can use it for |
| 24 | # signing, and kernel will deploy UBOOT_DTB_BINARY after signs it. | 24 | # signing, and kernel will deploy UBOOT_DTB_BINARY after signs it. |
| 25 | # | 25 | # |
| @@ -227,7 +227,7 @@ install_spl_helper() { | |||
| 227 | touch ${D}/${datadir}/${UBOOT_ITS_IMAGE} | 227 | touch ${D}/${datadir}/${UBOOT_ITS_IMAGE} |
| 228 | } | 228 | } |
| 229 | 229 | ||
| 230 | do_install_append() { | 230 | do_install:append() { |
| 231 | if [ "${PN}" = "${UBOOT_PN}" ]; then | 231 | if [ "${PN}" = "${UBOOT_PN}" ]; then |
| 232 | if [ -n "${UBOOT_CONFIG}" ]; then | 232 | if [ -n "${UBOOT_CONFIG}" ]; then |
| 233 | for config in ${UBOOT_MACHINE}; do | 233 | for config in ${UBOOT_MACHINE}; do |
| @@ -416,7 +416,7 @@ do_uboot_assemble_fitimage() { | |||
| 416 | 416 | ||
| 417 | addtask uboot_assemble_fitimage before do_deploy after do_compile | 417 | addtask uboot_assemble_fitimage before do_deploy after do_compile |
| 418 | 418 | ||
| 419 | do_deploy_prepend_pn-${UBOOT_PN}() { | 419 | do_deploy:prepend:pn-${UBOOT_PN}() { |
| 420 | if [ "${UBOOT_SIGN_ENABLE}" = "1" -a -n "${UBOOT_DTB_BINARY}" ] ; then | 420 | if [ "${UBOOT_SIGN_ENABLE}" = "1" -a -n "${UBOOT_DTB_BINARY}" ] ; then |
| 421 | concat_dtb | 421 | concat_dtb |
| 422 | fi | 422 | fi |
| @@ -446,7 +446,7 @@ do_deploy_prepend_pn-${UBOOT_PN}() { | |||
| 446 | 446 | ||
| 447 | } | 447 | } |
| 448 | 448 | ||
| 449 | do_deploy_append_pn-${UBOOT_PN}() { | 449 | do_deploy:append:pn-${UBOOT_PN}() { |
| 450 | # If we're creating a u-boot fitImage, point u-boot.bin | 450 | # If we're creating a u-boot fitImage, point u-boot.bin |
| 451 | # symlink since it might get used by image recipes | 451 | # symlink since it might get used by image recipes |
| 452 | if [ "${UBOOT_FITIMAGE_ENABLE}" = "1" ] ; then | 452 | if [ "${UBOOT_FITIMAGE_ENABLE}" = "1" ] ; then |
diff --git a/meta/classes/update-alternatives.bbclass b/meta/classes/update-alternatives.bbclass index 000e4d5664..81ca05de0e 100644 --- a/meta/classes/update-alternatives.bbclass +++ b/meta/classes/update-alternatives.bbclass | |||
| @@ -6,9 +6,9 @@ | |||
| 6 | # To use this class a number of variables should be defined: | 6 | # To use this class a number of variables should be defined: |
| 7 | # | 7 | # |
| 8 | # List all of the alternatives needed by a package: | 8 | # List all of the alternatives needed by a package: |
| 9 | # ALTERNATIVE_<pkg> = "name1 name2 name3 ..." | 9 | # ALTERNATIVE:<pkg> = "name1 name2 name3 ..." |
| 10 | # | 10 | # |
| 11 | # i.e. ALTERNATIVE_busybox = "sh sed test bracket" | 11 | # i.e. ALTERNATIVE:busybox = "sh sed test bracket" |
| 12 | # | 12 | # |
| 13 | # The pathname of the link | 13 | # The pathname of the link |
| 14 | # ALTERNATIVE_LINK_NAME[name] = "target" | 14 | # ALTERNATIVE_LINK_NAME[name] = "target" |
| @@ -123,7 +123,7 @@ def gen_updatealternativesvars(d): | |||
| 123 | 123 | ||
| 124 | for p in pkgs: | 124 | for p in pkgs: |
| 125 | for v in vars: | 125 | for v in vars: |
| 126 | ret.append(v + "_" + p) | 126 | ret.append(v + ":" + p) |
| 127 | ret.append(v + "_VARDEPS_" + p) | 127 | ret.append(v + "_VARDEPS_" + p) |
| 128 | return " ".join(ret) | 128 | return " ".join(ret) |
| 129 | 129 | ||
| @@ -141,10 +141,10 @@ python apply_update_alternative_renames () { | |||
| 141 | import re | 141 | import re |
| 142 | 142 | ||
| 143 | def update_files(alt_target, alt_target_rename, pkg, d): | 143 | def update_files(alt_target, alt_target_rename, pkg, d): |
| 144 | f = d.getVar('FILES_' + pkg) | 144 | f = d.getVar('FILES:' + pkg) |
| 145 | if f: | 145 | if f: |
| 146 | f = re.sub(r'(^|\s)%s(\s|$)' % re.escape (alt_target), r'\1%s\2' % alt_target_rename, f) | 146 | f = re.sub(r'(^|\s)%s(\s|$)' % re.escape (alt_target), r'\1%s\2' % alt_target_rename, f) |
| 147 | d.setVar('FILES_' + pkg, f) | 147 | d.setVar('FILES:' + pkg, f) |
| 148 | 148 | ||
| 149 | # Check for deprecated usage... | 149 | # Check for deprecated usage... |
| 150 | pn = d.getVar('BPN') | 150 | pn = d.getVar('BPN') |
| @@ -156,7 +156,7 @@ python apply_update_alternative_renames () { | |||
| 156 | for pkg in (d.getVar('PACKAGES') or "").split(): | 156 | for pkg in (d.getVar('PACKAGES') or "").split(): |
| 157 | # If the src == dest, we know we need to rename the dest by appending ${BPN} | 157 | # If the src == dest, we know we need to rename the dest by appending ${BPN} |
| 158 | link_rename = [] | 158 | link_rename = [] |
| 159 | for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split(): | 159 | for alt_name in (d.getVar('ALTERNATIVE:%s' % pkg) or "").split(): |
| 160 | alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name) | 160 | alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name) |
| 161 | if not alt_link: | 161 | if not alt_link: |
| 162 | alt_link = "%s/%s" % (d.getVar('bindir'), alt_name) | 162 | alt_link = "%s/%s" % (d.getVar('bindir'), alt_name) |
| @@ -233,7 +233,7 @@ def update_alternatives_alt_targets(d, pkg): | |||
| 233 | pn = d.getVar('BPN') | 233 | pn = d.getVar('BPN') |
| 234 | pkgdest = d.getVar('PKGD') | 234 | pkgdest = d.getVar('PKGD') |
| 235 | updates = list() | 235 | updates = list() |
| 236 | for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split(): | 236 | for alt_name in (d.getVar('ALTERNATIVE:%s' % pkg) or "").split(): |
| 237 | alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name) | 237 | alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name) |
| 238 | alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name) or \ | 238 | alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name) or \ |
| 239 | d.getVarFlag('ALTERNATIVE_TARGET', alt_name) or \ | 239 | d.getVarFlag('ALTERNATIVE_TARGET', alt_name) or \ |
| @@ -259,7 +259,7 @@ def update_alternatives_alt_targets(d, pkg): | |||
| 259 | 259 | ||
| 260 | return updates | 260 | return updates |
| 261 | 261 | ||
| 262 | PACKAGESPLITFUNCS_prepend = "populate_packages_updatealternatives " | 262 | PACKAGESPLITFUNCS:prepend = "populate_packages_updatealternatives " |
| 263 | 263 | ||
| 264 | python populate_packages_updatealternatives () { | 264 | python populate_packages_updatealternatives () { |
| 265 | if not update_alternatives_enabled(d): | 265 | if not update_alternatives_enabled(d): |
| @@ -280,24 +280,24 @@ python populate_packages_updatealternatives () { | |||
| 280 | provider = d.getVar('VIRTUAL-RUNTIME_update-alternatives') | 280 | provider = d.getVar('VIRTUAL-RUNTIME_update-alternatives') |
| 281 | if provider: | 281 | if provider: |
| 282 | #bb.note('adding runtime requirement for update-alternatives for %s' % pkg) | 282 | #bb.note('adding runtime requirement for update-alternatives for %s' % pkg) |
| 283 | d.appendVar('RDEPENDS_%s' % pkg, ' ' + d.getVar('MLPREFIX', False) + provider) | 283 | d.appendVar('RDEPENDS:%s' % pkg, ' ' + d.getVar('MLPREFIX', False) + provider) |
| 284 | 284 | ||
| 285 | bb.note('adding update-alternatives calls to postinst/prerm for %s' % pkg) | 285 | bb.note('adding update-alternatives calls to postinst/prerm for %s' % pkg) |
| 286 | bb.note('%s' % alt_setup_links) | 286 | bb.note('%s' % alt_setup_links) |
| 287 | postinst = d.getVar('pkg_postinst_%s' % pkg) | 287 | postinst = d.getVar('pkg_postinst:%s' % pkg) |
| 288 | if postinst: | 288 | if postinst: |
| 289 | postinst = alt_setup_links + postinst | 289 | postinst = alt_setup_links + postinst |
| 290 | else: | 290 | else: |
| 291 | postinst = '#!/bin/sh\n' + alt_setup_links | 291 | postinst = '#!/bin/sh\n' + alt_setup_links |
| 292 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 292 | d.setVar('pkg_postinst:%s' % pkg, postinst) |
| 293 | 293 | ||
| 294 | bb.note('%s' % alt_remove_links) | 294 | bb.note('%s' % alt_remove_links) |
| 295 | prerm = d.getVar('pkg_prerm_%s' % pkg) or '#!/bin/sh\n' | 295 | prerm = d.getVar('pkg_prerm:%s' % pkg) or '#!/bin/sh\n' |
| 296 | prerm += alt_remove_links | 296 | prerm += alt_remove_links |
| 297 | d.setVar('pkg_prerm_%s' % pkg, prerm) | 297 | d.setVar('pkg_prerm:%s' % pkg, prerm) |
| 298 | } | 298 | } |
| 299 | 299 | ||
| 300 | python package_do_filedeps_append () { | 300 | python package_do_filedeps:append () { |
| 301 | if update_alternatives_enabled(d): | 301 | if update_alternatives_enabled(d): |
| 302 | apply_update_alternative_provides(d) | 302 | apply_update_alternative_provides(d) |
| 303 | } | 303 | } |
| @@ -307,7 +307,7 @@ def apply_update_alternative_provides(d): | |||
| 307 | pkgdest = d.getVar('PKGDEST') | 307 | pkgdest = d.getVar('PKGDEST') |
| 308 | 308 | ||
| 309 | for pkg in d.getVar('PACKAGES').split(): | 309 | for pkg in d.getVar('PACKAGES').split(): |
| 310 | for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split(): | 310 | for alt_name in (d.getVar('ALTERNATIVE:%s' % pkg) or "").split(): |
| 311 | alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name) | 311 | alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name) |
| 312 | alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name) | 312 | alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name) |
| 313 | alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or d.getVar('ALTERNATIVE_TARGET') or alt_link | 313 | alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or d.getVar('ALTERNATIVE_TARGET') or alt_link |
diff --git a/meta/classes/update-rc.d.bbclass b/meta/classes/update-rc.d.bbclass index 1366fee653..0a3a608662 100644 --- a/meta/classes/update-rc.d.bbclass +++ b/meta/classes/update-rc.d.bbclass | |||
| @@ -1,11 +1,11 @@ | |||
| 1 | UPDATERCPN ?= "${PN}" | 1 | UPDATERCPN ?= "${PN}" |
| 2 | 2 | ||
| 3 | DEPENDS_append_class-target = "${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', ' update-rc.d initscripts', '', d)}" | 3 | DEPENDS:append:class-target = "${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', ' update-rc.d initscripts', '', d)}" |
| 4 | 4 | ||
| 5 | UPDATERCD = "update-rc.d" | 5 | UPDATERCD = "update-rc.d" |
| 6 | UPDATERCD_class-cross = "" | 6 | UPDATERCD:class-cross = "" |
| 7 | UPDATERCD_class-native = "" | 7 | UPDATERCD:class-native = "" |
| 8 | UPDATERCD_class-nativesdk = "" | 8 | UPDATERCD:class-nativesdk = "" |
| 9 | 9 | ||
| 10 | INITSCRIPT_PARAMS ?= "defaults" | 10 | INITSCRIPT_PARAMS ?= "defaults" |
| 11 | 11 | ||
| @@ -62,8 +62,8 @@ python __anonymous() { | |||
| 62 | update_rc_after_parse(d) | 62 | update_rc_after_parse(d) |
| 63 | } | 63 | } |
| 64 | 64 | ||
| 65 | PACKAGESPLITFUNCS_prepend = "${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', 'populate_packages_updatercd ', '', d)}" | 65 | PACKAGESPLITFUNCS:prepend = "${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', 'populate_packages_updatercd ', '', d)}" |
| 66 | PACKAGESPLITFUNCS_remove_class-nativesdk = "populate_packages_updatercd " | 66 | PACKAGESPLITFUNCS:remove:class-nativesdk = "populate_packages_updatercd " |
| 67 | 67 | ||
| 68 | populate_packages_updatercd[vardeps] += "updatercd_prerm updatercd_postrm updatercd_postinst" | 68 | populate_packages_updatercd[vardeps] += "updatercd_prerm updatercd_postrm updatercd_postinst" |
| 69 | populate_packages_updatercd[vardepsexclude] += "OVERRIDES" | 69 | populate_packages_updatercd[vardepsexclude] += "OVERRIDES" |
| @@ -78,7 +78,7 @@ python populate_packages_updatercd () { | |||
| 78 | statement = "grep -q -w '/etc/init.d/functions' %s" % path | 78 | statement = "grep -q -w '/etc/init.d/functions' %s" % path |
| 79 | if subprocess.call(statement, shell=True) == 0: | 79 | if subprocess.call(statement, shell=True) == 0: |
| 80 | mlprefix = d.getVar('MLPREFIX') or "" | 80 | mlprefix = d.getVar('MLPREFIX') or "" |
| 81 | d.appendVar('RDEPENDS_' + pkg, ' %sinitd-functions' % (mlprefix)) | 81 | d.appendVar('RDEPENDS:' + pkg, ' %sinitd-functions' % (mlprefix)) |
| 82 | 82 | ||
| 83 | def update_rcd_package(pkg): | 83 | def update_rcd_package(pkg): |
| 84 | bb.debug(1, 'adding update-rc.d calls to postinst/prerm/postrm for %s' % pkg) | 84 | bb.debug(1, 'adding update-rc.d calls to postinst/prerm/postrm for %s' % pkg) |
| @@ -89,25 +89,25 @@ python populate_packages_updatercd () { | |||
| 89 | 89 | ||
| 90 | update_rcd_auto_depend(pkg) | 90 | update_rcd_auto_depend(pkg) |
| 91 | 91 | ||
| 92 | postinst = d.getVar('pkg_postinst_%s' % pkg) | 92 | postinst = d.getVar('pkg_postinst:%s' % pkg) |
| 93 | if not postinst: | 93 | if not postinst: |
| 94 | postinst = '#!/bin/sh\n' | 94 | postinst = '#!/bin/sh\n' |
| 95 | postinst += localdata.getVar('updatercd_postinst') | 95 | postinst += localdata.getVar('updatercd_postinst') |
| 96 | d.setVar('pkg_postinst_%s' % pkg, postinst) | 96 | d.setVar('pkg_postinst:%s' % pkg, postinst) |
| 97 | 97 | ||
| 98 | prerm = d.getVar('pkg_prerm_%s' % pkg) | 98 | prerm = d.getVar('pkg_prerm:%s' % pkg) |
| 99 | if not prerm: | 99 | if not prerm: |
| 100 | prerm = '#!/bin/sh\n' | 100 | prerm = '#!/bin/sh\n' |
| 101 | prerm += localdata.getVar('updatercd_prerm') | 101 | prerm += localdata.getVar('updatercd_prerm') |
| 102 | d.setVar('pkg_prerm_%s' % pkg, prerm) | 102 | d.setVar('pkg_prerm:%s' % pkg, prerm) |
| 103 | 103 | ||
| 104 | postrm = d.getVar('pkg_postrm_%s' % pkg) | 104 | postrm = d.getVar('pkg_postrm:%s' % pkg) |
| 105 | if not postrm: | 105 | if not postrm: |
| 106 | postrm = '#!/bin/sh\n' | 106 | postrm = '#!/bin/sh\n' |
| 107 | postrm += localdata.getVar('updatercd_postrm') | 107 | postrm += localdata.getVar('updatercd_postrm') |
| 108 | d.setVar('pkg_postrm_%s' % pkg, postrm) | 108 | d.setVar('pkg_postrm:%s' % pkg, postrm) |
| 109 | 109 | ||
| 110 | d.appendVar('RRECOMMENDS_' + pkg, " ${MLPREFIX}${UPDATERCD}") | 110 | d.appendVar('RRECOMMENDS:' + pkg, " ${MLPREFIX}${UPDATERCD}") |
| 111 | 111 | ||
| 112 | # Check that this class isn't being inhibited (generally, by | 112 | # Check that this class isn't being inhibited (generally, by |
| 113 | # systemd.bbclass) before doing any work. | 113 | # systemd.bbclass) before doing any work. |
diff --git a/meta/classes/useradd-staticids.bbclass b/meta/classes/useradd-staticids.bbclass index 3a1b5f1320..8e2a7fb635 100644 --- a/meta/classes/useradd-staticids.bbclass +++ b/meta/classes/useradd-staticids.bbclass | |||
| @@ -77,7 +77,7 @@ def update_useradd_static_config(d): | |||
| 77 | try: | 77 | try: |
| 78 | uaargs = parser.parse_args(oe.useradd.split_args(param)) | 78 | uaargs = parser.parse_args(oe.useradd.split_args(param)) |
| 79 | except Exception as e: | 79 | except Exception as e: |
| 80 | bb.fatal("%s: Unable to parse arguments for USERADD_PARAM_%s '%s': %s" % (d.getVar('PN'), pkg, param, e)) | 80 | bb.fatal("%s: Unable to parse arguments for USERADD_PARAM:%s '%s': %s" % (d.getVar('PN'), pkg, param, e)) |
| 81 | 81 | ||
| 82 | # Read all passwd files specified in USERADD_UID_TABLES or files/passwd | 82 | # Read all passwd files specified in USERADD_UID_TABLES or files/passwd |
| 83 | # Use the standard passwd layout: | 83 | # Use the standard passwd layout: |
| @@ -140,13 +140,13 @@ def update_useradd_static_config(d): | |||
| 140 | uaargs.gid = uaargs.groupid | 140 | uaargs.gid = uaargs.groupid |
| 141 | uaargs.user_group = None | 141 | uaargs.user_group = None |
| 142 | if newgroup and is_pkg: | 142 | if newgroup and is_pkg: |
| 143 | groupadd = d.getVar("GROUPADD_PARAM_%s" % pkg) | 143 | groupadd = d.getVar("GROUPADD_PARAM:%s" % pkg) |
| 144 | if groupadd: | 144 | if groupadd: |
| 145 | # Only add the group if not already specified | 145 | # Only add the group if not already specified |
| 146 | if not uaargs.groupname in groupadd: | 146 | if not uaargs.groupname in groupadd: |
| 147 | d.setVar("GROUPADD_PARAM_%s" % pkg, "%s; %s" % (groupadd, newgroup)) | 147 | d.setVar("GROUPADD_PARAM:%s" % pkg, "%s; %s" % (groupadd, newgroup)) |
| 148 | else: | 148 | else: |
| 149 | d.setVar("GROUPADD_PARAM_%s" % pkg, newgroup) | 149 | d.setVar("GROUPADD_PARAM:%s" % pkg, newgroup) |
| 150 | 150 | ||
| 151 | uaargs.comment = "'%s'" % field[4] if field[4] else uaargs.comment | 151 | uaargs.comment = "'%s'" % field[4] if field[4] else uaargs.comment |
| 152 | uaargs.home_dir = field[5] or uaargs.home_dir | 152 | uaargs.home_dir = field[5] or uaargs.home_dir |
| @@ -198,7 +198,7 @@ def update_useradd_static_config(d): | |||
| 198 | # If we're processing multiple lines, we could have left over values here... | 198 | # If we're processing multiple lines, we could have left over values here... |
| 199 | gaargs = parser.parse_args(oe.useradd.split_args(param)) | 199 | gaargs = parser.parse_args(oe.useradd.split_args(param)) |
| 200 | except Exception as e: | 200 | except Exception as e: |
| 201 | bb.fatal("%s: Unable to parse arguments for GROUPADD_PARAM_%s '%s': %s" % (d.getVar('PN'), pkg, param, e)) | 201 | bb.fatal("%s: Unable to parse arguments for GROUPADD_PARAM:%s '%s': %s" % (d.getVar('PN'), pkg, param, e)) |
| 202 | 202 | ||
| 203 | # Read all group files specified in USERADD_GID_TABLES or files/group | 203 | # Read all group files specified in USERADD_GID_TABLES or files/group |
| 204 | # Use the standard group layout: | 204 | # Use the standard group layout: |
| @@ -265,17 +265,17 @@ def update_useradd_static_config(d): | |||
| 265 | for pkg in useradd_packages.split(): | 265 | for pkg in useradd_packages.split(): |
| 266 | # Groupmems doesn't have anything we might want to change, so simply validating | 266 | # Groupmems doesn't have anything we might want to change, so simply validating |
| 267 | # is a bit of a waste -- only process useradd/groupadd | 267 | # is a bit of a waste -- only process useradd/groupadd |
| 268 | useradd_param = d.getVar('USERADD_PARAM_%s' % pkg) | 268 | useradd_param = d.getVar('USERADD_PARAM:%s' % pkg) |
| 269 | if useradd_param: | 269 | if useradd_param: |
| 270 | #bb.warn("Before: 'USERADD_PARAM_%s' - '%s'" % (pkg, useradd_param)) | 270 | #bb.warn("Before: 'USERADD_PARAM:%s' - '%s'" % (pkg, useradd_param)) |
| 271 | d.setVar('USERADD_PARAM_%s' % pkg, rewrite_useradd(useradd_param, True)) | 271 | d.setVar('USERADD_PARAM:%s' % pkg, rewrite_useradd(useradd_param, True)) |
| 272 | #bb.warn("After: 'USERADD_PARAM_%s' - '%s'" % (pkg, d.getVar('USERADD_PARAM_%s' % pkg))) | 272 | #bb.warn("After: 'USERADD_PARAM:%s' - '%s'" % (pkg, d.getVar('USERADD_PARAM:%s' % pkg))) |
| 273 | 273 | ||
| 274 | groupadd_param = d.getVar('GROUPADD_PARAM_%s' % pkg) | 274 | groupadd_param = d.getVar('GROUPADD_PARAM:%s' % pkg) |
| 275 | if groupadd_param: | 275 | if groupadd_param: |
| 276 | #bb.warn("Before: 'GROUPADD_PARAM_%s' - '%s'" % (pkg, groupadd_param)) | 276 | #bb.warn("Before: 'GROUPADD_PARAM:%s' - '%s'" % (pkg, groupadd_param)) |
| 277 | d.setVar('GROUPADD_PARAM_%s' % pkg, rewrite_groupadd(groupadd_param, True)) | 277 | d.setVar('GROUPADD_PARAM:%s' % pkg, rewrite_groupadd(groupadd_param, True)) |
| 278 | #bb.warn("After: 'GROUPADD_PARAM_%s' - '%s'" % (pkg, d.getVar('GROUPADD_PARAM_%s' % pkg))) | 278 | #bb.warn("After: 'GROUPADD_PARAM:%s' - '%s'" % (pkg, d.getVar('GROUPADD_PARAM:%s' % pkg))) |
| 279 | 279 | ||
| 280 | # Load and process extra users and groups, rewriting only adduser/addgroup params | 280 | # Load and process extra users and groups, rewriting only adduser/addgroup params |
| 281 | pkg = d.getVar('PN') | 281 | pkg = d.getVar('PN') |
diff --git a/meta/classes/useradd.bbclass b/meta/classes/useradd.bbclass index e5f3ba24f9..fdec5cca56 100644 --- a/meta/classes/useradd.bbclass +++ b/meta/classes/useradd.bbclass | |||
| @@ -3,7 +3,7 @@ inherit useradd_base | |||
| 3 | # base-passwd-cross provides the default passwd and group files in the | 3 | # base-passwd-cross provides the default passwd and group files in the |
| 4 | # target sysroot, and shadow -native and -sysroot provide the utilities | 4 | # target sysroot, and shadow -native and -sysroot provide the utilities |
| 5 | # and support files needed to add and modify user and group accounts | 5 | # and support files needed to add and modify user and group accounts |
| 6 | DEPENDS_append_class-target = " base-files shadow-native shadow-sysroot shadow base-passwd" | 6 | DEPENDS:append:class-target = " base-files shadow-native shadow-sysroot shadow base-passwd" |
| 7 | PACKAGE_WRITE_DEPS += "shadow-native" | 7 | PACKAGE_WRITE_DEPS += "shadow-native" |
| 8 | 8 | ||
| 9 | # This preinstall function can be run in four different contexts: | 9 | # This preinstall function can be run in four different contexts: |
| @@ -164,16 +164,16 @@ python useradd_sysroot_sstate () { | |||
| 164 | } | 164 | } |
| 165 | 165 | ||
| 166 | do_prepare_recipe_sysroot[postfuncs] += "${SYSROOTFUNC}" | 166 | do_prepare_recipe_sysroot[postfuncs] += "${SYSROOTFUNC}" |
| 167 | SYSROOTFUNC_class-target = "useradd_sysroot_sstate" | 167 | SYSROOTFUNC:class-target = "useradd_sysroot_sstate" |
| 168 | SYSROOTFUNC = "" | 168 | SYSROOTFUNC = "" |
| 169 | 169 | ||
| 170 | SYSROOT_PREPROCESS_FUNCS += "${SYSROOTFUNC}" | 170 | SYSROOT_PREPROCESS_FUNCS += "${SYSROOTFUNC}" |
| 171 | 171 | ||
| 172 | SSTATEPREINSTFUNCS_append_class-target = " useradd_sysroot_sstate" | 172 | SSTATEPREINSTFUNCS:append:class-target = " useradd_sysroot_sstate" |
| 173 | 173 | ||
| 174 | do_package_setscene[depends] += "${USERADDSETSCENEDEPS}" | 174 | do_package_setscene[depends] += "${USERADDSETSCENEDEPS}" |
| 175 | do_populate_sysroot_setscene[depends] += "${USERADDSETSCENEDEPS}" | 175 | do_populate_sysroot_setscene[depends] += "${USERADDSETSCENEDEPS}" |
| 176 | USERADDSETSCENEDEPS_class-target = "${MLPREFIX}base-passwd:do_populate_sysroot_setscene pseudo-native:do_populate_sysroot_setscene shadow-native:do_populate_sysroot_setscene ${MLPREFIX}shadow-sysroot:do_populate_sysroot_setscene" | 176 | USERADDSETSCENEDEPS:class-target = "${MLPREFIX}base-passwd:do_populate_sysroot_setscene pseudo-native:do_populate_sysroot_setscene shadow-native:do_populate_sysroot_setscene ${MLPREFIX}shadow-sysroot:do_populate_sysroot_setscene" |
| 177 | USERADDSETSCENEDEPS = "" | 177 | USERADDSETSCENEDEPS = "" |
| 178 | 178 | ||
| 179 | # Recipe parse-time sanity checks | 179 | # Recipe parse-time sanity checks |
| @@ -184,8 +184,8 @@ def update_useradd_after_parse(d): | |||
| 184 | bb.fatal("%s inherits useradd but doesn't set USERADD_PACKAGES" % d.getVar('FILE', False)) | 184 | bb.fatal("%s inherits useradd but doesn't set USERADD_PACKAGES" % d.getVar('FILE', False)) |
| 185 | 185 | ||
| 186 | for pkg in useradd_packages.split(): | 186 | for pkg in useradd_packages.split(): |
| 187 | d.appendVarFlag("do_populate_sysroot", "vardeps", "USERADD_PARAM_%s GROUPADD_PARAM_%s GROUPMEMS_PARAM_%s" % (pkg, pkg, pkg)) | 187 | d.appendVarFlag("do_populate_sysroot", "vardeps", "USERADD_PARAM:%s GROUPADD_PARAM:%s GROUPMEMS_PARAM:%s" % (pkg, pkg, pkg)) |
| 188 | if not d.getVar('USERADD_PARAM_%s' % pkg) and not d.getVar('GROUPADD_PARAM_%s' % pkg) and not d.getVar('GROUPMEMS_PARAM_%s' % pkg): | 188 | if not d.getVar('USERADD_PARAM:%s' % pkg) and not d.getVar('GROUPADD_PARAM:%s' % pkg) and not d.getVar('GROUPMEMS_PARAM:%s' % pkg): |
| 189 | bb.fatal("%s inherits useradd but doesn't set USERADD_PARAM, GROUPADD_PARAM or GROUPMEMS_PARAM for package %s" % (d.getVar('FILE', False), pkg)) | 189 | bb.fatal("%s inherits useradd but doesn't set USERADD_PARAM, GROUPADD_PARAM or GROUPMEMS_PARAM for package %s" % (d.getVar('FILE', False), pkg)) |
| 190 | 190 | ||
| 191 | python __anonymous() { | 191 | python __anonymous() { |
| @@ -211,7 +211,7 @@ def get_all_cmd_params(d, cmd_type): | |||
| 211 | return "; ".join(params) | 211 | return "; ".join(params) |
| 212 | 212 | ||
| 213 | # Adds the preinst script into generated packages | 213 | # Adds the preinst script into generated packages |
| 214 | fakeroot python populate_packages_prepend () { | 214 | fakeroot python populate_packages:prepend () { |
| 215 | def update_useradd_package(pkg): | 215 | def update_useradd_package(pkg): |
| 216 | bb.debug(1, 'adding user/group calls to preinst for %s' % pkg) | 216 | bb.debug(1, 'adding user/group calls to preinst for %s' % pkg) |
| 217 | 217 | ||
| @@ -220,7 +220,7 @@ fakeroot python populate_packages_prepend () { | |||
| 220 | required to execute on the target. Not doing so may cause | 220 | required to execute on the target. Not doing so may cause |
| 221 | useradd preinst to be invoked twice, causing unwanted warnings. | 221 | useradd preinst to be invoked twice, causing unwanted warnings. |
| 222 | """ | 222 | """ |
| 223 | preinst = d.getVar('pkg_preinst_%s' % pkg) or d.getVar('pkg_preinst') | 223 | preinst = d.getVar('pkg_preinst:%s' % pkg) or d.getVar('pkg_preinst') |
| 224 | if not preinst: | 224 | if not preinst: |
| 225 | preinst = '#!/bin/sh\n' | 225 | preinst = '#!/bin/sh\n' |
| 226 | preinst += 'bbnote () {\n\techo "NOTE: $*"\n}\n' | 226 | preinst += 'bbnote () {\n\techo "NOTE: $*"\n}\n' |
| @@ -230,15 +230,15 @@ fakeroot python populate_packages_prepend () { | |||
| 230 | preinst += 'perform_useradd () {\n%s}\n' % d.getVar('perform_useradd') | 230 | preinst += 'perform_useradd () {\n%s}\n' % d.getVar('perform_useradd') |
| 231 | preinst += 'perform_groupmems () {\n%s}\n' % d.getVar('perform_groupmems') | 231 | preinst += 'perform_groupmems () {\n%s}\n' % d.getVar('perform_groupmems') |
| 232 | preinst += d.getVar('useradd_preinst') | 232 | preinst += d.getVar('useradd_preinst') |
| 233 | d.setVar('pkg_preinst_%s' % pkg, preinst) | 233 | d.setVar('pkg_preinst:%s' % pkg, preinst) |
| 234 | 234 | ||
| 235 | # RDEPENDS setup | 235 | # RDEPENDS setup |
| 236 | rdepends = d.getVar("RDEPENDS_%s" % pkg) or "" | 236 | rdepends = d.getVar("RDEPENDS:%s" % pkg) or "" |
| 237 | rdepends += ' ' + d.getVar('MLPREFIX', False) + 'base-passwd' | 237 | rdepends += ' ' + d.getVar('MLPREFIX', False) + 'base-passwd' |
| 238 | rdepends += ' ' + d.getVar('MLPREFIX', False) + 'shadow' | 238 | rdepends += ' ' + d.getVar('MLPREFIX', False) + 'shadow' |
| 239 | # base-files is where the default /etc/skel is packaged | 239 | # base-files is where the default /etc/skel is packaged |
| 240 | rdepends += ' ' + d.getVar('MLPREFIX', False) + 'base-files' | 240 | rdepends += ' ' + d.getVar('MLPREFIX', False) + 'base-files' |
| 241 | d.setVar("RDEPENDS_%s" % pkg, rdepends) | 241 | d.setVar("RDEPENDS:%s" % pkg, rdepends) |
| 242 | 242 | ||
| 243 | # Add the user/group preinstall scripts and RDEPENDS requirements | 243 | # Add the user/group preinstall scripts and RDEPENDS requirements |
| 244 | # to packages specified by USERADD_PACKAGES | 244 | # to packages specified by USERADD_PACKAGES |
diff --git a/meta/classes/utility-tasks.bbclass b/meta/classes/utility-tasks.bbclass index b1f27d3658..34d6b8f4d5 100644 --- a/meta/classes/utility-tasks.bbclass +++ b/meta/classes/utility-tasks.bbclass | |||
| @@ -19,7 +19,7 @@ python do_listtasks() { | |||
| 19 | 19 | ||
| 20 | CLEANFUNCS ?= "" | 20 | CLEANFUNCS ?= "" |
| 21 | 21 | ||
| 22 | T_task-clean = "${LOG_DIR}/cleanlogs/${PN}" | 22 | T:task-clean = "${LOG_DIR}/cleanlogs/${PN}" |
| 23 | addtask clean | 23 | addtask clean |
| 24 | do_clean[nostamp] = "1" | 24 | do_clean[nostamp] = "1" |
| 25 | python do_clean() { | 25 | python do_clean() { |
diff --git a/meta/classes/vala.bbclass b/meta/classes/vala.bbclass index bcaf68c5a7..bfcceff7cf 100644 --- a/meta/classes/vala.bbclass +++ b/meta/classes/vala.bbclass | |||
| @@ -2,8 +2,8 @@ | |||
| 2 | # because that is where target builds look for .vapi files. | 2 | # because that is where target builds look for .vapi files. |
| 3 | # | 3 | # |
| 4 | VALADEPENDS = "" | 4 | VALADEPENDS = "" |
| 5 | VALADEPENDS_class-target = "vala" | 5 | VALADEPENDS:class-target = "vala" |
| 6 | DEPENDS_append = " vala-native ${VALADEPENDS}" | 6 | DEPENDS:append = " vala-native ${VALADEPENDS}" |
| 7 | 7 | ||
| 8 | # Our patched version of Vala looks in STAGING_DATADIR for .vapi files | 8 | # Our patched version of Vala looks in STAGING_DATADIR for .vapi files |
| 9 | export STAGING_DATADIR | 9 | export STAGING_DATADIR |
| @@ -11,7 +11,7 @@ export STAGING_DATADIR | |||
| 11 | export XDG_DATA_DIRS = "${STAGING_DATADIR}:${STAGING_LIBDIR}" | 11 | export XDG_DATA_DIRS = "${STAGING_DATADIR}:${STAGING_LIBDIR}" |
| 12 | 12 | ||
| 13 | # Package additional files | 13 | # Package additional files |
| 14 | FILES_${PN}-dev += "\ | 14 | FILES:${PN}-dev += "\ |
| 15 | ${datadir}/vala/vapi/*.vapi \ | 15 | ${datadir}/vala/vapi/*.vapi \ |
| 16 | ${datadir}/vala/vapi/*.deps \ | 16 | ${datadir}/vala/vapi/*.deps \ |
| 17 | ${datadir}/gir-1.0 \ | 17 | ${datadir}/gir-1.0 \ |
| @@ -19,6 +19,6 @@ FILES_${PN}-dev += "\ | |||
| 19 | 19 | ||
| 20 | # Remove vapigen.m4 that is bundled with tarballs | 20 | # Remove vapigen.m4 that is bundled with tarballs |
| 21 | # because it does not yet have our cross-compile fixes | 21 | # because it does not yet have our cross-compile fixes |
| 22 | do_configure_prepend() { | 22 | do_configure:prepend() { |
| 23 | rm -f ${S}/m4/vapigen.m4 | 23 | rm -f ${S}/m4/vapigen.m4 |
| 24 | } | 24 | } |
diff --git a/meta/classes/waf.bbclass b/meta/classes/waf.bbclass index 188119f356..df0ab8d73f 100644 --- a/meta/classes/waf.bbclass +++ b/meta/classes/waf.bbclass | |||
| @@ -8,7 +8,7 @@ WAF_PYTHON ?= "python3" | |||
| 8 | B = "${WORKDIR}/build" | 8 | B = "${WORKDIR}/build" |
| 9 | do_configure[cleandirs] += "${B}" | 9 | do_configure[cleandirs] += "${B}" |
| 10 | 10 | ||
| 11 | EXTRA_OECONF_append = " ${PACKAGECONFIG_CONFARGS}" | 11 | EXTRA_OECONF:append = " ${PACKAGECONFIG_CONFARGS}" |
| 12 | 12 | ||
| 13 | EXTRA_OEWAF_BUILD ??= "" | 13 | EXTRA_OEWAF_BUILD ??= "" |
| 14 | # In most cases, you want to pass the same arguments to `waf build` and `waf | 14 | # In most cases, you want to pass the same arguments to `waf build` and `waf |
diff --git a/meta/classes/xmlcatalog.bbclass b/meta/classes/xmlcatalog.bbclass index ae4811fdeb..be155b7bc2 100644 --- a/meta/classes/xmlcatalog.bbclass +++ b/meta/classes/xmlcatalog.bbclass | |||
| @@ -4,7 +4,7 @@ DEPENDS = "libxml2-native" | |||
| 4 | # "${sysconfdir}/xml/docbook-xml.xml". | 4 | # "${sysconfdir}/xml/docbook-xml.xml". |
| 5 | XMLCATALOGS ?= "" | 5 | XMLCATALOGS ?= "" |
| 6 | 6 | ||
| 7 | SYSROOT_PREPROCESS_FUNCS_append = " xmlcatalog_sstate_postinst" | 7 | SYSROOT_PREPROCESS_FUNCS:append = " xmlcatalog_sstate_postinst" |
| 8 | 8 | ||
| 9 | xmlcatalog_complete() { | 9 | xmlcatalog_complete() { |
| 10 | ROOTCATALOG="${STAGING_ETCDIR_NATIVE}/xml/catalog" | 10 | ROOTCATALOG="${STAGING_ETCDIR_NATIVE}/xml/catalog" |
