diff options
author | Paul Eggleton <paul.eggleton@linux.intel.com> | 2017-05-19 16:11:48 +1200 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2017-05-25 23:59:31 +0100 |
commit | 415fbfb0cd1bc5129179596ba27ae2362b7de2a4 (patch) | |
tree | a082a4ca595f2842dbb47a060ee68bfe0ac0d250 /scripts | |
parent | ac344766f09a10ddaed716f95cdfe3badb5345f7 (diff) | |
download | poky-415fbfb0cd1bc5129179596ba27ae2362b7de2a4.tar.gz |
scriptutils: fix fetch_uri() to work with RSS
Since recipe-specific sysroots were implemented, devtool add and devtool
upgrade operations that fetch from a URL that requires native sysroot
dependencies will fail to work as there is no recipe-specific sysroot
set up for them during fetching. An example was any URL pointing to a
tarball compressed with xz, e.g. devtool upgrade on gnutls.
The most expedient way to fix this is to set up a dummy recipe-specific
sysroot to use for the fetch/unpack operations. We do this in the same
manner as bitbake -b does, so we're just taking all of the sysroot
components available and creating a sysroot from those rather than
ensuring the correct dependencies are there - this means that we're
still going to have problems if e.g. xz-native hasn't been built yet,
but that issue will be trickier to solve and is tracked separately.
Fixes [YOCTO #11474].
(From OE-Core rev: 559151e783759af78b5cdd76cdbb9ce325a391e6)
Signed-off-by: Paul Eggleton <paul.eggleton@linux.intel.com>
Signed-off-by: Ross Burton <ross.burton@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'scripts')
-rw-r--r-- | scripts/lib/scriptutils.py | 65 |
1 files changed, 41 insertions, 24 deletions
diff --git a/scripts/lib/scriptutils.py b/scripts/lib/scriptutils.py index 4ccbe5c108..92b601c7e8 100644 --- a/scripts/lib/scriptutils.py +++ b/scripts/lib/scriptutils.py | |||
@@ -21,6 +21,8 @@ import logging | |||
21 | import glob | 21 | import glob |
22 | import argparse | 22 | import argparse |
23 | import subprocess | 23 | import subprocess |
24 | import tempfile | ||
25 | import shutil | ||
24 | 26 | ||
25 | def logger_create(name, stream=None): | 27 | def logger_create(name, stream=None): |
26 | logger = logging.getLogger(name) | 28 | logger = logging.getLogger(name) |
@@ -78,32 +80,47 @@ def git_convert_standalone_clone(repodir): | |||
78 | 80 | ||
79 | def fetch_uri(d, uri, destdir, srcrev=None): | 81 | def fetch_uri(d, uri, destdir, srcrev=None): |
80 | """Fetch a URI to a local directory""" | 82 | """Fetch a URI to a local directory""" |
81 | import bb.data | 83 | import bb |
82 | bb.utils.mkdirhier(destdir) | 84 | tmpparent = d.getVar('BASE_WORKDIR') |
83 | localdata = bb.data.createCopy(d) | 85 | bb.utils.mkdirhier(tmpparent) |
84 | localdata.setVar('BB_STRICT_CHECKSUM', '') | 86 | tmpworkdir = tempfile.mkdtemp(dir=tmpparent) |
85 | localdata.setVar('SRCREV', srcrev) | ||
86 | ret = (None, None) | ||
87 | olddir = os.getcwd() | ||
88 | try: | 87 | try: |
89 | fetcher = bb.fetch2.Fetch([uri], localdata) | 88 | bb.utils.mkdirhier(destdir) |
90 | for u in fetcher.ud: | 89 | localdata = bb.data.createCopy(d) |
91 | ud = fetcher.ud[u] | 90 | |
92 | ud.ignore_checksums = True | 91 | # Set some values to allow extend_recipe_sysroot to work here we're we are not running from a task |
93 | fetcher.download() | 92 | localdata.setVar('WORKDIR', tmpworkdir) |
94 | for u in fetcher.ud: | 93 | localdata.setVar('BB_RUNTASK', 'do_fetch') |
95 | ud = fetcher.ud[u] | 94 | localdata.setVar('PN', 'dummy') |
96 | if ud.localpath.rstrip(os.sep) == localdata.getVar('DL_DIR').rstrip(os.sep): | 95 | localdata.setVar('BB_LIMITEDDEPS', '1') |
97 | raise Exception('Local path is download directory - please check that the URI "%s" is correct' % uri) | 96 | bb.build.exec_func("extend_recipe_sysroot", localdata) |
98 | fetcher.unpack(destdir) | 97 | |
99 | for u in fetcher.ud: | 98 | # Set some values for the benefit of the fetcher code |
100 | ud = fetcher.ud[u] | 99 | localdata.setVar('BB_STRICT_CHECKSUM', '') |
101 | if ud.method.recommends_checksum(ud): | 100 | localdata.setVar('SRCREV', srcrev) |
102 | md5value = bb.utils.md5_file(ud.localpath) | 101 | ret = (None, None) |
103 | sha256value = bb.utils.sha256_file(ud.localpath) | 102 | olddir = os.getcwd() |
104 | ret = (md5value, sha256value) | 103 | try: |
104 | fetcher = bb.fetch2.Fetch([uri], localdata) | ||
105 | for u in fetcher.ud: | ||
106 | ud = fetcher.ud[u] | ||
107 | ud.ignore_checksums = True | ||
108 | fetcher.download() | ||
109 | for u in fetcher.ud: | ||
110 | ud = fetcher.ud[u] | ||
111 | if ud.localpath.rstrip(os.sep) == localdata.getVar('DL_DIR').rstrip(os.sep): | ||
112 | raise Exception('Local path is download directory - please check that the URI "%s" is correct' % uri) | ||
113 | fetcher.unpack(destdir) | ||
114 | for u in fetcher.ud: | ||
115 | ud = fetcher.ud[u] | ||
116 | if ud.method.recommends_checksum(ud): | ||
117 | md5value = bb.utils.md5_file(ud.localpath) | ||
118 | sha256value = bb.utils.sha256_file(ud.localpath) | ||
119 | ret = (md5value, sha256value) | ||
120 | finally: | ||
121 | os.chdir(olddir) | ||
105 | finally: | 122 | finally: |
106 | os.chdir(olddir) | 123 | shutil.rmtree(tmpworkdir) |
107 | return ret | 124 | return ret |
108 | 125 | ||
109 | def run_editor(fn): | 126 | def run_editor(fn): |