diff options
Diffstat (limited to 'bitbake/lib/bb/fetch2')
-rw-r--r-- | bitbake/lib/bb/fetch2/__init__.py | 64 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/gcp.py | 14 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/git.py | 3 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/gitsm.py | 44 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/wget.py | 4 |
5 files changed, 58 insertions, 71 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py index 5bf2c4b8cf..1a6ff25d4d 100644 --- a/bitbake/lib/bb/fetch2/__init__.py +++ b/bitbake/lib/bb/fetch2/__init__.py | |||
@@ -237,7 +237,7 @@ class URI(object): | |||
237 | # to RFC compliant URL format. E.g.: | 237 | # to RFC compliant URL format. E.g.: |
238 | # file://foo.diff -> file:foo.diff | 238 | # file://foo.diff -> file:foo.diff |
239 | if urlp.scheme in self._netloc_forbidden: | 239 | if urlp.scheme in self._netloc_forbidden: |
240 | uri = re.sub("(?<=:)//(?!/)", "", uri, 1) | 240 | uri = re.sub(r"(?<=:)//(?!/)", "", uri, count=1) |
241 | reparse = 1 | 241 | reparse = 1 |
242 | 242 | ||
243 | if reparse: | 243 | if reparse: |
@@ -499,30 +499,30 @@ def fetcher_init(d): | |||
499 | Calls before this must not hit the cache. | 499 | Calls before this must not hit the cache. |
500 | """ | 500 | """ |
501 | 501 | ||
502 | revs = bb.persist_data.persist('BB_URI_HEADREVS', d) | 502 | with bb.persist_data.persist('BB_URI_HEADREVS', d) as revs: |
503 | try: | 503 | try: |
504 | # fetcher_init is called multiple times, so make sure we only save the | 504 | # fetcher_init is called multiple times, so make sure we only save the |
505 | # revs the first time it is called. | 505 | # revs the first time it is called. |
506 | if not bb.fetch2.saved_headrevs: | 506 | if not bb.fetch2.saved_headrevs: |
507 | bb.fetch2.saved_headrevs = dict(revs) | 507 | bb.fetch2.saved_headrevs = dict(revs) |
508 | except: | 508 | except: |
509 | pass | 509 | pass |
510 | |||
511 | # When to drop SCM head revisions controlled by user policy | ||
512 | srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear" | ||
513 | if srcrev_policy == "cache": | ||
514 | logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) | ||
515 | elif srcrev_policy == "clear": | ||
516 | logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) | ||
517 | revs.clear() | ||
518 | else: | ||
519 | raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) | ||
520 | 510 | ||
521 | _checksum_cache.init_cache(d.getVar("BB_CACHEDIR")) | 511 | # When to drop SCM head revisions controlled by user policy |
512 | srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear" | ||
513 | if srcrev_policy == "cache": | ||
514 | logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) | ||
515 | elif srcrev_policy == "clear": | ||
516 | logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) | ||
517 | revs.clear() | ||
518 | else: | ||
519 | raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) | ||
520 | |||
521 | _checksum_cache.init_cache(d.getVar("BB_CACHEDIR")) | ||
522 | 522 | ||
523 | for m in methods: | 523 | for m in methods: |
524 | if hasattr(m, "init"): | 524 | if hasattr(m, "init"): |
525 | m.init(d) | 525 | m.init(d) |
526 | 526 | ||
527 | def fetcher_parse_save(): | 527 | def fetcher_parse_save(): |
528 | _checksum_cache.save_extras() | 528 | _checksum_cache.save_extras() |
@@ -536,8 +536,8 @@ def fetcher_compare_revisions(d): | |||
536 | when bitbake was started and return true if they have changed. | 536 | when bitbake was started and return true if they have changed. |
537 | """ | 537 | """ |
538 | 538 | ||
539 | headrevs = dict(bb.persist_data.persist('BB_URI_HEADREVS', d)) | 539 | with dict(bb.persist_data.persist('BB_URI_HEADREVS', d)) as headrevs: |
540 | return headrevs != bb.fetch2.saved_headrevs | 540 | return headrevs != bb.fetch2.saved_headrevs |
541 | 541 | ||
542 | def mirror_from_string(data): | 542 | def mirror_from_string(data): |
543 | mirrors = (data or "").replace('\\n',' ').split() | 543 | mirrors = (data or "").replace('\\n',' ').split() |
@@ -1662,13 +1662,13 @@ class FetchMethod(object): | |||
1662 | if not hasattr(self, "_latest_revision"): | 1662 | if not hasattr(self, "_latest_revision"): |
1663 | raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url) | 1663 | raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url) |
1664 | 1664 | ||
1665 | revs = bb.persist_data.persist('BB_URI_HEADREVS', d) | 1665 | with bb.persist_data.persist('BB_URI_HEADREVS', d) as revs: |
1666 | key = self.generate_revision_key(ud, d, name) | 1666 | key = self.generate_revision_key(ud, d, name) |
1667 | try: | 1667 | try: |
1668 | return revs[key] | 1668 | return revs[key] |
1669 | except KeyError: | 1669 | except KeyError: |
1670 | revs[key] = rev = self._latest_revision(ud, d, name) | 1670 | revs[key] = rev = self._latest_revision(ud, d, name) |
1671 | return rev | 1671 | return rev |
1672 | 1672 | ||
1673 | def sortable_revision(self, ud, d, name): | 1673 | def sortable_revision(self, ud, d, name): |
1674 | latest_rev = self._build_revision(ud, d, name) | 1674 | latest_rev = self._build_revision(ud, d, name) |
diff --git a/bitbake/lib/bb/fetch2/gcp.py b/bitbake/lib/bb/fetch2/gcp.py index eb3e0c6a6b..2ee9ed2194 100644 --- a/bitbake/lib/bb/fetch2/gcp.py +++ b/bitbake/lib/bb/fetch2/gcp.py | |||
@@ -23,7 +23,6 @@ import urllib.parse, urllib.error | |||
23 | from bb.fetch2 import FetchMethod | 23 | from bb.fetch2 import FetchMethod |
24 | from bb.fetch2 import FetchError | 24 | from bb.fetch2 import FetchError |
25 | from bb.fetch2 import logger | 25 | from bb.fetch2 import logger |
26 | from bb.fetch2 import runfetchcmd | ||
27 | 26 | ||
28 | class GCP(FetchMethod): | 27 | class GCP(FetchMethod): |
29 | """ | 28 | """ |
@@ -48,7 +47,6 @@ class GCP(FetchMethod): | |||
48 | ud.basename = os.path.basename(ud.path) | 47 | ud.basename = os.path.basename(ud.path) |
49 | 48 | ||
50 | ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) | 49 | ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) |
51 | ud.basecmd = "gsutil stat" | ||
52 | 50 | ||
53 | def get_gcp_client(self): | 51 | def get_gcp_client(self): |
54 | from google.cloud import storage | 52 | from google.cloud import storage |
@@ -59,17 +57,20 @@ class GCP(FetchMethod): | |||
59 | Fetch urls using the GCP API. | 57 | Fetch urls using the GCP API. |
60 | Assumes localpath was called first. | 58 | Assumes localpath was called first. |
61 | """ | 59 | """ |
60 | from google.api_core.exceptions import NotFound | ||
62 | logger.debug2(f"Trying to download gs://{ud.host}{ud.path} to {ud.localpath}") | 61 | logger.debug2(f"Trying to download gs://{ud.host}{ud.path} to {ud.localpath}") |
63 | if self.gcp_client is None: | 62 | if self.gcp_client is None: |
64 | self.get_gcp_client() | 63 | self.get_gcp_client() |
65 | 64 | ||
66 | bb.fetch2.check_network_access(d, ud.basecmd, f"gs://{ud.host}{ud.path}") | 65 | bb.fetch2.check_network_access(d, "blob.download_to_filename", f"gs://{ud.host}{ud.path}") |
67 | runfetchcmd("%s %s" % (ud.basecmd, f"gs://{ud.host}{ud.path}"), d) | ||
68 | 66 | ||
69 | # Path sometimes has leading slash, so strip it | 67 | # Path sometimes has leading slash, so strip it |
70 | path = ud.path.lstrip("/") | 68 | path = ud.path.lstrip("/") |
71 | blob = self.gcp_client.bucket(ud.host).blob(path) | 69 | blob = self.gcp_client.bucket(ud.host).blob(path) |
72 | blob.download_to_filename(ud.localpath) | 70 | try: |
71 | blob.download_to_filename(ud.localpath) | ||
72 | except NotFound: | ||
73 | raise FetchError("The GCP API threw a NotFound exception") | ||
73 | 74 | ||
74 | # Additional sanity checks copied from the wget class (although there | 75 | # Additional sanity checks copied from the wget class (although there |
75 | # are no known issues which mean these are required, treat the GCP API | 76 | # are no known issues which mean these are required, treat the GCP API |
@@ -91,8 +92,7 @@ class GCP(FetchMethod): | |||
91 | if self.gcp_client is None: | 92 | if self.gcp_client is None: |
92 | self.get_gcp_client() | 93 | self.get_gcp_client() |
93 | 94 | ||
94 | bb.fetch2.check_network_access(d, ud.basecmd, f"gs://{ud.host}{ud.path}") | 95 | bb.fetch2.check_network_access(d, "gcp_client.bucket(ud.host).blob(path).exists()", f"gs://{ud.host}{ud.path}") |
95 | runfetchcmd("%s %s" % (ud.basecmd, f"gs://{ud.host}{ud.path}"), d) | ||
96 | 96 | ||
97 | # Path sometimes has leading slash, so strip it | 97 | # Path sometimes has leading slash, so strip it |
98 | path = ud.path.lstrip("/") | 98 | path = ud.path.lstrip("/") |
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py index c7ff769fdf..6029144601 100644 --- a/bitbake/lib/bb/fetch2/git.py +++ b/bitbake/lib/bb/fetch2/git.py | |||
@@ -926,9 +926,8 @@ class Git(FetchMethod): | |||
926 | commits = None | 926 | commits = None |
927 | else: | 927 | else: |
928 | if not os.path.exists(rev_file) or not os.path.getsize(rev_file): | 928 | if not os.path.exists(rev_file) or not os.path.getsize(rev_file): |
929 | from pipes import quote | ||
930 | commits = bb.fetch2.runfetchcmd( | 929 | commits = bb.fetch2.runfetchcmd( |
931 | "git rev-list %s -- | wc -l" % quote(rev), | 930 | "git rev-list %s -- | wc -l" % shlex.quote(rev), |
932 | d, quiet=True).strip().lstrip('0') | 931 | d, quiet=True).strip().lstrip('0') |
933 | if commits: | 932 | if commits: |
934 | open(rev_file, "w").write("%d\n" % int(commits)) | 933 | open(rev_file, "w").write("%d\n" % int(commits)) |
diff --git a/bitbake/lib/bb/fetch2/gitsm.py b/bitbake/lib/bb/fetch2/gitsm.py index f7f3af7212..fab4b1164c 100644 --- a/bitbake/lib/bb/fetch2/gitsm.py +++ b/bitbake/lib/bb/fetch2/gitsm.py | |||
@@ -147,6 +147,19 @@ class GitSM(Git): | |||
147 | 147 | ||
148 | return submodules != [] | 148 | return submodules != [] |
149 | 149 | ||
150 | def call_process_submodules(self, ud, d, extra_check, subfunc): | ||
151 | # If we're using a shallow mirror tarball it needs to be | ||
152 | # unpacked temporarily so that we can examine the .gitmodules file | ||
153 | if ud.shallow and os.path.exists(ud.fullshallow) and extra_check: | ||
154 | tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR")) | ||
155 | try: | ||
156 | runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir) | ||
157 | self.process_submodules(ud, tmpdir, subfunc, d) | ||
158 | finally: | ||
159 | shutil.rmtree(tmpdir) | ||
160 | else: | ||
161 | self.process_submodules(ud, ud.clonedir, subfunc, d) | ||
162 | |||
150 | def need_update(self, ud, d): | 163 | def need_update(self, ud, d): |
151 | if Git.need_update(self, ud, d): | 164 | if Git.need_update(self, ud, d): |
152 | return True | 165 | return True |
@@ -164,15 +177,7 @@ class GitSM(Git): | |||
164 | logger.error('gitsm: submodule update check failed: %s %s' % (type(e).__name__, str(e))) | 177 | logger.error('gitsm: submodule update check failed: %s %s' % (type(e).__name__, str(e))) |
165 | need_update_result = True | 178 | need_update_result = True |
166 | 179 | ||
167 | # If we're using a shallow mirror tarball it needs to be unpacked | 180 | self.call_process_submodules(ud, d, not os.path.exists(ud.clonedir), need_update_submodule) |
168 | # temporarily so that we can examine the .gitmodules file | ||
169 | if ud.shallow and os.path.exists(ud.fullshallow) and not os.path.exists(ud.clonedir): | ||
170 | tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR")) | ||
171 | runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir) | ||
172 | self.process_submodules(ud, tmpdir, need_update_submodule, d) | ||
173 | shutil.rmtree(tmpdir) | ||
174 | else: | ||
175 | self.process_submodules(ud, ud.clonedir, need_update_submodule, d) | ||
176 | 181 | ||
177 | if need_update_list: | 182 | if need_update_list: |
178 | logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list))) | 183 | logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list))) |
@@ -195,16 +200,7 @@ class GitSM(Git): | |||
195 | raise | 200 | raise |
196 | 201 | ||
197 | Git.download(self, ud, d) | 202 | Git.download(self, ud, d) |
198 | 203 | self.call_process_submodules(ud, d, self.need_update(ud, d), download_submodule) | |
199 | # If we're using a shallow mirror tarball it needs to be unpacked | ||
200 | # temporarily so that we can examine the .gitmodules file | ||
201 | if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d): | ||
202 | tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR")) | ||
203 | runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir) | ||
204 | self.process_submodules(ud, tmpdir, download_submodule, d) | ||
205 | shutil.rmtree(tmpdir) | ||
206 | else: | ||
207 | self.process_submodules(ud, ud.clonedir, download_submodule, d) | ||
208 | 204 | ||
209 | def unpack(self, ud, destdir, d): | 205 | def unpack(self, ud, destdir, d): |
210 | def unpack_submodules(ud, url, module, modpath, workdir, d): | 206 | def unpack_submodules(ud, url, module, modpath, workdir, d): |
@@ -263,14 +259,6 @@ class GitSM(Git): | |||
263 | newfetch = Fetch([url], d, cache=False) | 259 | newfetch = Fetch([url], d, cache=False) |
264 | urldata.extend(newfetch.expanded_urldata()) | 260 | urldata.extend(newfetch.expanded_urldata()) |
265 | 261 | ||
266 | # If we're using a shallow mirror tarball it needs to be unpacked | 262 | self.call_process_submodules(ud, d, ud.method.need_update(ud, d), add_submodule) |
267 | # temporarily so that we can examine the .gitmodules file | ||
268 | if ud.shallow and os.path.exists(ud.fullshallow) and ud.method.need_update(ud, d): | ||
269 | tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR")) | ||
270 | subprocess.check_call("tar -xzf %s" % ud.fullshallow, cwd=tmpdir, shell=True) | ||
271 | self.process_submodules(ud, tmpdir, add_submodule, d) | ||
272 | shutil.rmtree(tmpdir) | ||
273 | else: | ||
274 | self.process_submodules(ud, ud.clonedir, add_submodule, d) | ||
275 | 263 | ||
276 | return urldata | 264 | return urldata |
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py index 2e92117634..5bb3b2f361 100644 --- a/bitbake/lib/bb/fetch2/wget.py +++ b/bitbake/lib/bb/fetch2/wget.py | |||
@@ -87,7 +87,7 @@ class Wget(FetchMethod): | |||
87 | if not ud.localfile: | 87 | if not ud.localfile: |
88 | ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", ".")) | 88 | ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", ".")) |
89 | 89 | ||
90 | self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30" | 90 | self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 100" |
91 | 91 | ||
92 | if ud.type == 'ftp' or ud.type == 'ftps': | 92 | if ud.type == 'ftp' or ud.type == 'ftps': |
93 | self.basecmd += " --passive-ftp" | 93 | self.basecmd += " --passive-ftp" |
@@ -371,7 +371,7 @@ class Wget(FetchMethod): | |||
371 | except (FileNotFoundError, netrc.NetrcParseError): | 371 | except (FileNotFoundError, netrc.NetrcParseError): |
372 | pass | 372 | pass |
373 | 373 | ||
374 | with opener.open(r, timeout=30) as response: | 374 | with opener.open(r, timeout=100) as response: |
375 | pass | 375 | pass |
376 | except (urllib.error.URLError, ConnectionResetError, TimeoutError) as e: | 376 | except (urllib.error.URLError, ConnectionResetError, TimeoutError) as e: |
377 | if try_again: | 377 | if try_again: |