summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline-fail.bb18
-rw-r--r--meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.bb2
-rw-r--r--meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.inc16
-rw-r--r--meta/classes-global/base.bbclass33
-rw-r--r--meta/classes-global/package.bbclass12
-rw-r--r--meta/classes/externalsrc.bbclass4
-rw-r--r--meta/conf/bitbake.conf5
-rw-r--r--meta/lib/oeqa/selftest/cases/fetch.py2
8 files changed, 45 insertions, 47 deletions
diff --git a/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline-fail.bb b/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline-fail.bb
index 602e895199..5146129666 100644
--- a/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline-fail.bb
+++ b/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline-fail.bb
@@ -1,16 +1,4 @@
1SUMMARY = "Test recipe for fetching git submodules" 1require gitunpackoffline.inc
2HOMEPAGE = "https://git.yoctoproject.org/git/matchbox-panel-2"
3LICENSE = "GPL-2.0-or-later"
4LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f"
5 2
6INHIBIT_DEFAULT_DEPS = "1" 3# Clear the base.bbclass magic srcrev call
7 4fetcher_hashes_dummyfunc[vardepvalue] = ""
8TAGVALUE = "2.10"
9
10# Deliberately have a tag which has to be resolved but ensure do_unpack doesn't access the network again.
11SRC_URI = "git://git.yoctoproject.org/git/matchbox-panel-2;branch=master;protocol=https"
12SRC_URI:append:gitunpack-enable-recipe = ";tag=${TAGVALUE}"
13SRCREV = "f82ca3f42510fb3ef10f598b393eb373a2c34ca7"
14SRCREV:gitunpack-enable-recipe = ""
15
16S = "${WORKDIR}/git"
diff --git a/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.bb b/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.bb
index 597c89b199..b051b5da5a 100644
--- a/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.bb
+++ b/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.bb
@@ -1,4 +1,4 @@
1require gitunpackoffline-fail.bb 1require gitunpackoffline.inc
2 2
3TAGVALUE = "2.11" 3TAGVALUE = "2.11"
4 4
diff --git a/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.inc b/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.inc
new file mode 100644
index 0000000000..602e895199
--- /dev/null
+++ b/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.inc
@@ -0,0 +1,16 @@
1SUMMARY = "Test recipe for fetching git submodules"
2HOMEPAGE = "https://git.yoctoproject.org/git/matchbox-panel-2"
3LICENSE = "GPL-2.0-or-later"
4LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f"
5
6INHIBIT_DEFAULT_DEPS = "1"
7
8TAGVALUE = "2.10"
9
10# Deliberately have a tag which has to be resolved but ensure do_unpack doesn't access the network again.
11SRC_URI = "git://git.yoctoproject.org/git/matchbox-panel-2;branch=master;protocol=https"
12SRC_URI:append:gitunpack-enable-recipe = ";tag=${TAGVALUE}"
13SRCREV = "f82ca3f42510fb3ef10f598b393eb373a2c34ca7"
14SRCREV:gitunpack-enable-recipe = ""
15
16S = "${WORKDIR}/git"
diff --git a/meta/classes-global/base.bbclass b/meta/classes-global/base.bbclass
index cbda8d12f0..7c774d250f 100644
--- a/meta/classes-global/base.bbclass
+++ b/meta/classes-global/base.bbclass
@@ -126,11 +126,18 @@ def setup_hosttools_dir(dest, toolsvar, d, fatal=True):
126 if notfound and fatal: 126 if notfound and fatal:
127 bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n %s" % " ".join(notfound)) 127 bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n %s" % " ".join(notfound))
128 128
129# We can't use vardepvalue against do_fetch directly since that would overwrite
130# the other task dependencies so we use an indirect function.
131python fetcher_hashes_dummyfunc() {
132 return
133}
134fetcher_hashes_dummyfunc[vardepvalue] = "${@bb.fetch.get_hashvalue(d)}"
135
129addtask fetch 136addtask fetch
130do_fetch[dirs] = "${DL_DIR}" 137do_fetch[dirs] = "${DL_DIR}"
131do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}" 138do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
132do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}" 139do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
133do_fetch[vardeps] += "SRCREV" 140do_fetch[prefuncs] += "fetcher_hashes_dummyfunc"
134do_fetch[network] = "1" 141do_fetch[network] = "1"
135python base_do_fetch() { 142python base_do_fetch() {
136 143
@@ -606,7 +613,6 @@ python () {
606 bb.debug(1, "Skipping recipe %s because of incompatible license(s): %s" % (pn, ' '.join(incompatible_lic))) 613 bb.debug(1, "Skipping recipe %s because of incompatible license(s): %s" % (pn, ' '.join(incompatible_lic)))
607 raise bb.parse.SkipRecipe("it has incompatible license(s): %s" % ' '.join(incompatible_lic)) 614 raise bb.parse.SkipRecipe("it has incompatible license(s): %s" % ' '.join(incompatible_lic))
608 615
609 needsrcrev = False
610 srcuri = d.getVar('SRC_URI') 616 srcuri = d.getVar('SRC_URI')
611 for uri_string in srcuri.split(): 617 for uri_string in srcuri.split():
612 uri = bb.fetch.URI(uri_string) 618 uri = bb.fetch.URI(uri_string)
@@ -619,24 +625,17 @@ python () {
619 625
620 # Svn packages should DEPEND on subversion-native 626 # Svn packages should DEPEND on subversion-native
621 if uri.scheme == "svn": 627 if uri.scheme == "svn":
622 needsrcrev = True
623 d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot') 628 d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot')
624 629
625 # Git packages should DEPEND on git-native 630 # Git packages should DEPEND on git-native
626 elif uri.scheme in ("git", "gitsm"): 631 elif uri.scheme in ("git", "gitsm"):
627 needsrcrev = True
628 d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot') 632 d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot')
629 633
630 # Mercurial packages should DEPEND on mercurial-native 634 # Mercurial packages should DEPEND on mercurial-native
631 elif uri.scheme == "hg": 635 elif uri.scheme == "hg":
632 needsrcrev = True
633 d.appendVar("EXTRANATIVEPATH", ' python3-native ') 636 d.appendVar("EXTRANATIVEPATH", ' python3-native ')
634 d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot') 637 d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot')
635 638
636 # Perforce packages support SRCREV = "${AUTOREV}"
637 elif uri.scheme == "p4":
638 needsrcrev = True
639
640 # OSC packages should DEPEND on osc-native 639 # OSC packages should DEPEND on osc-native
641 elif uri.scheme == "osc": 640 elif uri.scheme == "osc":
642 d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot') 641 d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot')
@@ -645,7 +644,6 @@ python () {
645 d.appendVarFlag('do_fetch', 'depends', ' nodejs-native:do_populate_sysroot') 644 d.appendVarFlag('do_fetch', 'depends', ' nodejs-native:do_populate_sysroot')
646 645
647 elif uri.scheme == "repo": 646 elif uri.scheme == "repo":
648 needsrcrev = True
649 d.appendVarFlag('do_fetch', 'depends', ' repo-native:do_populate_sysroot') 647 d.appendVarFlag('do_fetch', 'depends', ' repo-native:do_populate_sysroot')
650 648
651 # *.lz4 should DEPEND on lz4-native for unpacking 649 # *.lz4 should DEPEND on lz4-native for unpacking
@@ -676,21 +674,6 @@ python () {
676 elif path.endswith('.deb'): 674 elif path.endswith('.deb'):
677 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot') 675 d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
678 676
679 if needsrcrev:
680 d.setVar("SRCPV", "${@bb.fetch2.get_srcrev(d)}")
681
682 # Gather all named SRCREVs to add to the sstate hash calculation
683 # This anonymous python snippet is called multiple times so we
684 # need to be careful to not double up the appends here and cause
685 # the base hash to mismatch the task hash
686 for uri in srcuri.split():
687 parm = bb.fetch.decodeurl(uri)[5]
688 uri_names = parm.get("name", "").split(",")
689 for uri_name in filter(None, uri_names):
690 srcrev_name = "SRCREV_{}".format(uri_name)
691 if srcrev_name not in (d.getVarFlag("do_fetch", "vardeps") or "").split():
692 d.appendVarFlag("do_fetch", "vardeps", " {}".format(srcrev_name))
693
694 set_packagetriplet(d) 677 set_packagetriplet(d)
695 678
696 # 'multimachine' handling 679 # 'multimachine' handling
diff --git a/meta/classes-global/package.bbclass b/meta/classes-global/package.bbclass
index e8055a9cdc..0338a5c690 100644
--- a/meta/classes-global/package.bbclass
+++ b/meta/classes-global/package.bbclass
@@ -315,13 +315,21 @@ python package_get_auto_pr() {
315# Package functions suitable for inclusion in PACKAGEFUNCS 315# Package functions suitable for inclusion in PACKAGEFUNCS
316# 316#
317 317
318python package_convert_pr_autoinc() { 318python package_setup_pkgv() {
319 pkgv = d.getVar("PKGV") 319 pkgv = d.getVar("PKGV")
320 # Expand SRCPV into PKGV if not present
321 srcpv = bb.fetch.get_pkgv_string(d)
322 if srcpv and "+" in pkgv:
323 d.appendVar("PKGV", srcpv)
324 pkgv = d.getVar("PKGV")
320 325
321 # Adjust pkgv as necessary... 326 # Adjust pkgv as necessary...
322 if 'AUTOINC' in pkgv: 327 if 'AUTOINC' in pkgv:
323 d.setVar("PKGV", pkgv.replace("AUTOINC", "${PRSERV_PV_AUTOINC}")) 328 d.setVar("PKGV", pkgv.replace("AUTOINC", "${PRSERV_PV_AUTOINC}"))
329}
324 330
331
332python package_convert_pr_autoinc() {
325 # Change PRSERV_PV_AUTOINC and EXTENDPRAUTO usage to special values 333 # Change PRSERV_PV_AUTOINC and EXTENDPRAUTO usage to special values
326 d.setVar('PRSERV_PV_AUTOINC', '@PRSERV_PV_AUTOINC@') 334 d.setVar('PRSERV_PV_AUTOINC', '@PRSERV_PV_AUTOINC@')
327 d.setVar('EXTENDPRAUTO', '@EXTENDPRAUTO@') 335 d.setVar('EXTENDPRAUTO', '@EXTENDPRAUTO@')
@@ -494,6 +502,7 @@ python do_package () {
494 oe.qa.handle_error("var-undefined", msg, d) 502 oe.qa.handle_error("var-undefined", msg, d)
495 return 503 return
496 504
505 bb.build.exec_func("package_setup_pkgv", d)
497 bb.build.exec_func("package_convert_pr_autoinc", d) 506 bb.build.exec_func("package_convert_pr_autoinc", d)
498 507
499 # Check for conflict between renamed packages and existing ones 508 # Check for conflict between renamed packages and existing ones
@@ -577,6 +586,7 @@ addtask do_package_setscene
577# Copy from PKGDESTWORK to tempdirectory as tempdirectory can be cleaned at both 586# Copy from PKGDESTWORK to tempdirectory as tempdirectory can be cleaned at both
578# do_package_setscene and do_packagedata_setscene leading to races 587# do_package_setscene and do_packagedata_setscene leading to races
579python do_packagedata () { 588python do_packagedata () {
589 bb.build.exec_func("package_setup_pkgv", d)
580 bb.build.exec_func("package_get_auto_pr", d) 590 bb.build.exec_func("package_get_auto_pr", d)
581 591
582 src = d.expand("${PKGDESTWORK}") 592 src = d.expand("${PKGDESTWORK}")
diff --git a/meta/classes/externalsrc.bbclass b/meta/classes/externalsrc.bbclass
index aedd78a03a..322734f7ac 100644
--- a/meta/classes/externalsrc.bbclass
+++ b/meta/classes/externalsrc.bbclass
@@ -63,6 +63,7 @@ python () {
63 else: 63 else:
64 d.setVar('B', '${WORKDIR}/${BPN}-${PV}') 64 d.setVar('B', '${WORKDIR}/${BPN}-${PV}')
65 65
66 bb.fetch.get_hashvalue(d)
66 local_srcuri = [] 67 local_srcuri = []
67 fetch = bb.fetch2.Fetch((d.getVar('SRC_URI') or '').split(), d) 68 fetch = bb.fetch2.Fetch((d.getVar('SRC_URI') or '').split(), d)
68 for url in fetch.urls: 69 for url in fetch.urls:
@@ -126,6 +127,9 @@ python () {
126 d.setVarFlag('do_compile', 'file-checksums', '${@srctree_hash_files(d)}') 127 d.setVarFlag('do_compile', 'file-checksums', '${@srctree_hash_files(d)}')
127 d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}') 128 d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}')
128 129
130 d.appendVarFlag('do_compile', 'prefuncs', ' fetcher_hashes_dummyfunc')
131 d.appendVarFlag('do_configure', 'prefuncs', ' fetcher_hashes_dummyfunc')
132
129 # We don't want the workdir to go away 133 # We don't want the workdir to go away
130 d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN')) 134 d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN'))
131 135
diff --git a/meta/conf/bitbake.conf b/meta/conf/bitbake.conf
index 599bbc4ba8..c7ada2c49e 100644
--- a/meta/conf/bitbake.conf
+++ b/meta/conf/bitbake.conf
@@ -735,10 +735,7 @@ SRC_URI[vardepsexclude] += "\
735SRCDATE = "${DATE}" 735SRCDATE = "${DATE}"
736SRCREV ??= "INVALID" 736SRCREV ??= "INVALID"
737AUTOREV = "${@bb.fetch2.get_autorev(d)}" 737AUTOREV = "${@bb.fetch2.get_autorev(d)}"
738AUTOREV[vardepvalue] = "${SRCPV}" 738SRCPV = ""
739# Set Dynamically in base.bbclass
740# SRCPV = "${@bb.fetch2.get_srcrev(d)}"
741SRCPV[vardepvalue] = "${SRCPV}"
742 739
743SRC_URI = "" 740SRC_URI = ""
744 741
diff --git a/meta/lib/oeqa/selftest/cases/fetch.py b/meta/lib/oeqa/selftest/cases/fetch.py
index c9107022c8..44099176fc 100644
--- a/meta/lib/oeqa/selftest/cases/fetch.py
+++ b/meta/lib/oeqa/selftest/cases/fetch.py
@@ -103,7 +103,7 @@ class Dependencies(OESelftestTestCase):
103 103
104 r = """ 104 r = """
105 LICENSE="CLOSED" 105 LICENSE="CLOSED"
106 SRC_URI="git://example.com/repo;branch=master" 106 SRC_URI="git://example.com/repo;branch=master;rev=ffffffffffffffffffffffffffffffffffffffff"
107 """ 107 """
108 f = self.write_recipe(textwrap.dedent(r), tempdir) 108 f = self.write_recipe(textwrap.dedent(r), tempdir)
109 d = tinfoil.parse_recipe_file(f) 109 d = tinfoil.parse_recipe_file(f)