diff options
Diffstat (limited to 'bitbake/lib')
-rw-r--r-- | bitbake/lib/bb/fetch2/__init__.py | 6 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/s3.py | 96 |
2 files changed, 100 insertions, 2 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py index 672f1095dc..5209f4d3dc 100644 --- a/bitbake/lib/bb/fetch2/__init__.py +++ b/bitbake/lib/bb/fetch2/__init__.py | |||
@@ -1200,13 +1200,13 @@ class FetchData(object): | |||
1200 | self.sha256_name = "sha256sum" | 1200 | self.sha256_name = "sha256sum" |
1201 | if self.md5_name in self.parm: | 1201 | if self.md5_name in self.parm: |
1202 | self.md5_expected = self.parm[self.md5_name] | 1202 | self.md5_expected = self.parm[self.md5_name] |
1203 | elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]: | 1203 | elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]: |
1204 | self.md5_expected = None | 1204 | self.md5_expected = None |
1205 | else: | 1205 | else: |
1206 | self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name) | 1206 | self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name) |
1207 | if self.sha256_name in self.parm: | 1207 | if self.sha256_name in self.parm: |
1208 | self.sha256_expected = self.parm[self.sha256_name] | 1208 | self.sha256_expected = self.parm[self.sha256_name] |
1209 | elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]: | 1209 | elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]: |
1210 | self.sha256_expected = None | 1210 | self.sha256_expected = None |
1211 | else: | 1211 | else: |
1212 | self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name) | 1212 | self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name) |
@@ -1791,6 +1791,7 @@ from . import svn | |||
1791 | from . import wget | 1791 | from . import wget |
1792 | from . import ssh | 1792 | from . import ssh |
1793 | from . import sftp | 1793 | from . import sftp |
1794 | from . import s3 | ||
1794 | from . import perforce | 1795 | from . import perforce |
1795 | from . import bzr | 1796 | from . import bzr |
1796 | from . import hg | 1797 | from . import hg |
@@ -1808,6 +1809,7 @@ methods.append(gitannex.GitANNEX()) | |||
1808 | methods.append(cvs.Cvs()) | 1809 | methods.append(cvs.Cvs()) |
1809 | methods.append(ssh.SSH()) | 1810 | methods.append(ssh.SSH()) |
1810 | methods.append(sftp.SFTP()) | 1811 | methods.append(sftp.SFTP()) |
1812 | methods.append(s3.S3()) | ||
1811 | methods.append(perforce.Perforce()) | 1813 | methods.append(perforce.Perforce()) |
1812 | methods.append(bzr.Bzr()) | 1814 | methods.append(bzr.Bzr()) |
1813 | methods.append(hg.Hg()) | 1815 | methods.append(hg.Hg()) |
diff --git a/bitbake/lib/bb/fetch2/s3.py b/bitbake/lib/bb/fetch2/s3.py new file mode 100644 index 0000000000..27993aacfe --- /dev/null +++ b/bitbake/lib/bb/fetch2/s3.py | |||
@@ -0,0 +1,96 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | """ | ||
4 | BitBake 'Fetch' implementation for Amazon AWS S3. | ||
5 | |||
6 | Class for fetching files from Amazon S3 using the AWS Command Line Interface. | ||
7 | The aws tool must be correctly installed and configured prior to use. | ||
8 | |||
9 | """ | ||
10 | |||
11 | # Copyright (C) 2017, Andre McCurdy <armccurdy@gmail.com> | ||
12 | # | ||
13 | # Based in part on bb.fetch2.wget: | ||
14 | # Copyright (C) 2003, 2004 Chris Larson | ||
15 | # | ||
16 | # This program is free software; you can redistribute it and/or modify | ||
17 | # it under the terms of the GNU General Public License version 2 as | ||
18 | # published by the Free Software Foundation. | ||
19 | # | ||
20 | # This program is distributed in the hope that it will be useful, | ||
21 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
22 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
23 | # GNU General Public License for more details. | ||
24 | # | ||
25 | # You should have received a copy of the GNU General Public License along | ||
26 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
27 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
28 | # | ||
29 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
30 | |||
31 | import os | ||
32 | import bb | ||
33 | import urllib.request, urllib.parse, urllib.error | ||
34 | from bb.fetch2 import FetchMethod | ||
35 | from bb.fetch2 import FetchError | ||
36 | from bb.fetch2 import runfetchcmd | ||
37 | |||
38 | class S3(FetchMethod): | ||
39 | """Class to fetch urls via 'aws s3'""" | ||
40 | |||
41 | def supports(self, ud, d): | ||
42 | """ | ||
43 | Check to see if a given url can be fetched with s3. | ||
44 | """ | ||
45 | return ud.type in ['s3'] | ||
46 | |||
47 | def recommends_checksum(self, urldata): | ||
48 | return True | ||
49 | |||
50 | def urldata_init(self, ud, d): | ||
51 | if 'downloadfilename' in ud.parm: | ||
52 | ud.basename = ud.parm['downloadfilename'] | ||
53 | else: | ||
54 | ud.basename = os.path.basename(ud.path) | ||
55 | |||
56 | ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) | ||
57 | |||
58 | def download(self, ud, d): | ||
59 | """ | ||
60 | Fetch urls | ||
61 | Assumes localpath was called first | ||
62 | """ | ||
63 | |||
64 | cmd = 'aws s3 cp s3://%s%s %s' % (ud.host, ud.path, ud.localpath) | ||
65 | bb.fetch2.check_network_access(d, cmd, ud.url) | ||
66 | runfetchcmd(cmd, d) | ||
67 | |||
68 | # Additional sanity checks copied from the wget class (although there | ||
69 | # are no known issues which mean these are required, treat the aws cli | ||
70 | # tool with a little healthy suspicion). | ||
71 | |||
72 | if not os.path.exists(ud.localpath): | ||
73 | raise FetchError("The aws cp command returned success for s3://%s%s but %s doesn't exist?!" % (ud.host, ud.path, ud.localpath)) | ||
74 | |||
75 | if os.path.getsize(ud.localpath) == 0: | ||
76 | os.remove(ud.localpath) | ||
77 | raise FetchError("The aws cp command for s3://%s%s resulted in a zero size file?! Deleting and failing since this isn't right." % (ud.host, ud.path)) | ||
78 | |||
79 | return True | ||
80 | |||
81 | def checkstatus(self, fetch, ud, d): | ||
82 | """ | ||
83 | Check the status of a URL | ||
84 | """ | ||
85 | |||
86 | cmd = 'aws s3 ls s3://%s%s' % (ud.host, ud.path) | ||
87 | bb.fetch2.check_network_access(d, cmd, ud.url) | ||
88 | output = runfetchcmd(cmd, d) | ||
89 | |||
90 | # "aws s3 ls s3://mybucket/foo" will exit with success even if the file | ||
91 | # is not found, so check output of the command to confirm success. | ||
92 | |||
93 | if not output: | ||
94 | raise FetchError("The aws ls command for s3://%s%s gave empty output" % (ud.host, ud.path)) | ||
95 | |||
96 | return True | ||