diff options
author | Alejandro Hernandez Samaniego <alejandro@enedino.org> | 2021-02-24 10:26:32 -0700 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2021-03-11 14:04:45 +0000 |
commit | 0f84d24df8b83877b45b5d6aa3eca401b059071e (patch) | |
tree | 61407ab019cef36dab08fbb59115c26094348710 | |
parent | 271caebdc097fffe006a5a6399672b9b27900877 (diff) | |
download | poky-0f84d24df8b83877b45b5d6aa3eca401b059071e.tar.gz |
bitbake: bitbake: Add Azure Storage fetcher implementation
Allows bitbake to fetch from an Azure Storage account.
The fetcher submodule is compatible with the az:// URI protocol, its
functionality is based on bitbakes wget fetcher, superior in performance
to using a propietary tool like azcopy which can handle cloud storage
account operations with more functionality (that we dont need in a fetcher)
but less compatibility.
A sample URI uses can be defined in the following way:
SRC_URI = "az://<azure-storage-account>.blob.core.windows.net/<container>/foo.tar.xz"
This fetcher can easily be used with PREMIRRORS and SSTATE_MIRRORS, e.g.:
SSTATE_MIRRORS = "file://.* az://<azure-storage-account>.blob.core.windows.net/sstate-cache/PATH;downloadfilename=PATH \n"
PREMIRRORS_prepend = "\
git://.*/.* az://<azure-storage-account>.blob.core.windows.net/downloads/ \n \
ftp://.*/.* az://<azure-storage-account>.blob.core.windows.net/downloads/ \n \
http://.*/.* az://<azure-storage-account>.blob.core.windows.net/downloads/ \n \
https://.*/.* az://<azure-storage-account>.blob.core.windows.net/downloads/ \n \
"
Can also be used with non-public access Azure Storage accounts/containers via a
Shared Access Signature by declaring the AZ_SAS variable which will be
automatically used by the fetcher:
AZ_SAS="?sv=2000-01-01&ss=...&sig=somesignature"
(Bitbake rev: b103b02f2ce2f8f5079f17ec1a854f904c2110a4)
Signed-off-by: Alejandro Enedino Hernandez Samaniego <alhe@linux.microsoft.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-rw-r--r-- | bitbake/lib/bb/fetch2/__init__.py | 4 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/az.py | 93 |
2 files changed, 96 insertions, 1 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py index 19169d780f..cf0201c490 100644 --- a/bitbake/lib/bb/fetch2/__init__.py +++ b/bitbake/lib/bb/fetch2/__init__.py | |||
@@ -1243,7 +1243,7 @@ class FetchData(object): | |||
1243 | 1243 | ||
1244 | if checksum_name in self.parm: | 1244 | if checksum_name in self.parm: |
1245 | checksum_expected = self.parm[checksum_name] | 1245 | checksum_expected = self.parm[checksum_name] |
1246 | elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]: | 1246 | elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az"]: |
1247 | checksum_expected = None | 1247 | checksum_expected = None |
1248 | else: | 1248 | else: |
1249 | checksum_expected = d.getVarFlag("SRC_URI", checksum_name) | 1249 | checksum_expected = d.getVarFlag("SRC_URI", checksum_name) |
@@ -1908,6 +1908,7 @@ from . import repo | |||
1908 | from . import clearcase | 1908 | from . import clearcase |
1909 | from . import npm | 1909 | from . import npm |
1910 | from . import npmsw | 1910 | from . import npmsw |
1911 | from . import az | ||
1911 | 1912 | ||
1912 | methods.append(local.Local()) | 1913 | methods.append(local.Local()) |
1913 | methods.append(wget.Wget()) | 1914 | methods.append(wget.Wget()) |
@@ -1927,3 +1928,4 @@ methods.append(repo.Repo()) | |||
1927 | methods.append(clearcase.ClearCase()) | 1928 | methods.append(clearcase.ClearCase()) |
1928 | methods.append(npm.Npm()) | 1929 | methods.append(npm.Npm()) |
1929 | methods.append(npmsw.NpmShrinkWrap()) | 1930 | methods.append(npmsw.NpmShrinkWrap()) |
1931 | methods.append(az.Az()) | ||
diff --git a/bitbake/lib/bb/fetch2/az.py b/bitbake/lib/bb/fetch2/az.py new file mode 100644 index 0000000000..3ccc594c22 --- /dev/null +++ b/bitbake/lib/bb/fetch2/az.py | |||
@@ -0,0 +1,93 @@ | |||
1 | """ | ||
2 | BitBake 'Fetch' Azure Storage implementation | ||
3 | |||
4 | """ | ||
5 | |||
6 | # Copyright (C) 2021 Alejandro Hernandez Samaniego | ||
7 | # | ||
8 | # Based on bb.fetch2.wget: | ||
9 | # Copyright (C) 2003, 2004 Chris Larson | ||
10 | # | ||
11 | # SPDX-License-Identifier: GPL-2.0-only | ||
12 | # | ||
13 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
14 | |||
15 | import shlex | ||
16 | import os | ||
17 | import bb | ||
18 | from bb.fetch2 import FetchError | ||
19 | from bb.fetch2 import logger | ||
20 | from bb.fetch2.wget import Wget | ||
21 | |||
22 | |||
23 | class Az(Wget): | ||
24 | |||
25 | def supports(self, ud, d): | ||
26 | """ | ||
27 | Check to see if a given url can be fetched from Azure Storage | ||
28 | """ | ||
29 | return ud.type in ['az'] | ||
30 | |||
31 | |||
32 | def checkstatus(self, fetch, ud, d, try_again=True): | ||
33 | |||
34 | # checkstatus discards parameters either way, we need to do this before adding the SAS | ||
35 | ud.url = ud.url.replace('az://','https://').split(';')[0] | ||
36 | |||
37 | az_sas = d.getVar('AZ_SAS') | ||
38 | if az_sas and az_sas not in ud.url: | ||
39 | ud.url += az_sas | ||
40 | |||
41 | return Wget.checkstatus(self, fetch, ud, d, try_again) | ||
42 | |||
43 | # Override download method, include retries | ||
44 | def download(self, ud, d, retries=3): | ||
45 | """Fetch urls""" | ||
46 | |||
47 | # If were reaching the account transaction limit we might be refused a connection, | ||
48 | # retrying allows us to avoid false negatives since the limit changes over time | ||
49 | fetchcmd = self.basecmd + ' --retry-connrefused --waitretry=5' | ||
50 | |||
51 | # We need to provide a localpath to avoid wget using the SAS | ||
52 | # ud.localfile either has the downloadfilename or ud.path | ||
53 | localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) | ||
54 | bb.utils.mkdirhier(os.path.dirname(localpath)) | ||
55 | fetchcmd += " -O %s" % shlex.quote(localpath) | ||
56 | |||
57 | |||
58 | if ud.user and ud.pswd: | ||
59 | fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (ud.user, ud.pswd) | ||
60 | |||
61 | # Check if a Shared Access Signature was given and use it | ||
62 | az_sas = d.getVar('AZ_SAS') | ||
63 | |||
64 | if az_sas: | ||
65 | azuri = '%s%s%s%s' % ('https://', ud.host, ud.path, az_sas) | ||
66 | else: | ||
67 | azuri = '%s%s%s' % ('https://', ud.host, ud.path) | ||
68 | |||
69 | if os.path.exists(ud.localpath): | ||
70 | # file exists, but we didnt complete it.. trying again. | ||
71 | fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % azuri) | ||
72 | else: | ||
73 | fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % azuri) | ||
74 | |||
75 | try: | ||
76 | self._runwget(ud, d, fetchcmd, False) | ||
77 | except FetchError as e: | ||
78 | # Azure fails on handshake sometimes when using wget after some stress, producing a | ||
79 | # FetchError from the fetcher, if the artifact exists retyring should succeed | ||
80 | if 'Unable to establish SSL connection' in str(e): | ||
81 | logger.debug2('Unable to establish SSL connection: Retries remaining: %s, Retrying...' % retries) | ||
82 | self.download(ud, d, retries -1) | ||
83 | |||
84 | # Sanity check since wget can pretend it succeed when it didn't | ||
85 | # Also, this used to happen if sourceforge sent us to the mirror page | ||
86 | if not os.path.exists(ud.localpath): | ||
87 | raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (azuri, ud.localpath), azuri) | ||
88 | |||
89 | if os.path.getsize(ud.localpath) == 0: | ||
90 | os.remove(ud.localpath) | ||
91 | raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (azuri), azuri) | ||
92 | |||
93 | return True | ||