summaryrefslogtreecommitdiffstats
path: root/bitbake
diff options
context:
space:
mode:
authorAndre McCurdy <armccurdy@gmail.com>2017-03-01 15:56:46 -0800
committerRichard Purdie <richard.purdie@linuxfoundation.org>2017-03-03 00:19:37 +0000
commit8f1ed1758748e4c00eaa6b4b3528e79f36d62c19 (patch)
treee91830fe15d27f40c92885a13e2a0f335a54220f /bitbake
parentda6652b526c461c77417385802306e802084d992 (diff)
downloadpoky-8f1ed1758748e4c00eaa6b4b3528e79f36d62c19.tar.gz
bitbake: fetch2: add initial Amazon AWS S3 fetcher
Class for fetching files from Amazon S3 using the AWS Command Line Interface. The aws tool must be correctly installed and configured prior to use. The class supports both download() and checkstatus(), which therefore allows S3 mirrors to be used for SSTATE_MIRRORS. (Bitbake rev: 6fe07ed25457dd7952b60f4b2153d56b15d5eea6) Signed-off-by: Andre McCurdy <armccurdy@gmail.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake')
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py6
-rw-r--r--bitbake/lib/bb/fetch2/s3.py96
2 files changed, 100 insertions, 2 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
index 672f1095dc..5209f4d3dc 100644
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -1200,13 +1200,13 @@ class FetchData(object):
1200 self.sha256_name = "sha256sum" 1200 self.sha256_name = "sha256sum"
1201 if self.md5_name in self.parm: 1201 if self.md5_name in self.parm:
1202 self.md5_expected = self.parm[self.md5_name] 1202 self.md5_expected = self.parm[self.md5_name]
1203 elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]: 1203 elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]:
1204 self.md5_expected = None 1204 self.md5_expected = None
1205 else: 1205 else:
1206 self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name) 1206 self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
1207 if self.sha256_name in self.parm: 1207 if self.sha256_name in self.parm:
1208 self.sha256_expected = self.parm[self.sha256_name] 1208 self.sha256_expected = self.parm[self.sha256_name]
1209 elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]: 1209 elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]:
1210 self.sha256_expected = None 1210 self.sha256_expected = None
1211 else: 1211 else:
1212 self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name) 1212 self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
@@ -1791,6 +1791,7 @@ from . import svn
1791from . import wget 1791from . import wget
1792from . import ssh 1792from . import ssh
1793from . import sftp 1793from . import sftp
1794from . import s3
1794from . import perforce 1795from . import perforce
1795from . import bzr 1796from . import bzr
1796from . import hg 1797from . import hg
@@ -1808,6 +1809,7 @@ methods.append(gitannex.GitANNEX())
1808methods.append(cvs.Cvs()) 1809methods.append(cvs.Cvs())
1809methods.append(ssh.SSH()) 1810methods.append(ssh.SSH())
1810methods.append(sftp.SFTP()) 1811methods.append(sftp.SFTP())
1812methods.append(s3.S3())
1811methods.append(perforce.Perforce()) 1813methods.append(perforce.Perforce())
1812methods.append(bzr.Bzr()) 1814methods.append(bzr.Bzr())
1813methods.append(hg.Hg()) 1815methods.append(hg.Hg())
diff --git a/bitbake/lib/bb/fetch2/s3.py b/bitbake/lib/bb/fetch2/s3.py
new file mode 100644
index 0000000000..27993aacfe
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/s3.py
@@ -0,0 +1,96 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementation for Amazon AWS S3.
5
6Class for fetching files from Amazon S3 using the AWS Command Line Interface.
7The aws tool must be correctly installed and configured prior to use.
8
9"""
10
11# Copyright (C) 2017, Andre McCurdy <armccurdy@gmail.com>
12#
13# Based in part on bb.fetch2.wget:
14# Copyright (C) 2003, 2004 Chris Larson
15#
16# This program is free software; you can redistribute it and/or modify
17# it under the terms of the GNU General Public License version 2 as
18# published by the Free Software Foundation.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License along
26# with this program; if not, write to the Free Software Foundation, Inc.,
27# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
28#
29# Based on functions from the base bb module, Copyright 2003 Holger Schurig
30
31import os
32import bb
33import urllib.request, urllib.parse, urllib.error
34from bb.fetch2 import FetchMethod
35from bb.fetch2 import FetchError
36from bb.fetch2 import runfetchcmd
37
38class S3(FetchMethod):
39 """Class to fetch urls via 'aws s3'"""
40
41 def supports(self, ud, d):
42 """
43 Check to see if a given url can be fetched with s3.
44 """
45 return ud.type in ['s3']
46
47 def recommends_checksum(self, urldata):
48 return True
49
50 def urldata_init(self, ud, d):
51 if 'downloadfilename' in ud.parm:
52 ud.basename = ud.parm['downloadfilename']
53 else:
54 ud.basename = os.path.basename(ud.path)
55
56 ud.localfile = d.expand(urllib.parse.unquote(ud.basename))
57
58 def download(self, ud, d):
59 """
60 Fetch urls
61 Assumes localpath was called first
62 """
63
64 cmd = 'aws s3 cp s3://%s%s %s' % (ud.host, ud.path, ud.localpath)
65 bb.fetch2.check_network_access(d, cmd, ud.url)
66 runfetchcmd(cmd, d)
67
68 # Additional sanity checks copied from the wget class (although there
69 # are no known issues which mean these are required, treat the aws cli
70 # tool with a little healthy suspicion).
71
72 if not os.path.exists(ud.localpath):
73 raise FetchError("The aws cp command returned success for s3://%s%s but %s doesn't exist?!" % (ud.host, ud.path, ud.localpath))
74
75 if os.path.getsize(ud.localpath) == 0:
76 os.remove(ud.localpath)
77 raise FetchError("The aws cp command for s3://%s%s resulted in a zero size file?! Deleting and failing since this isn't right." % (ud.host, ud.path))
78
79 return True
80
81 def checkstatus(self, fetch, ud, d):
82 """
83 Check the status of a URL
84 """
85
86 cmd = 'aws s3 ls s3://%s%s' % (ud.host, ud.path)
87 bb.fetch2.check_network_access(d, cmd, ud.url)
88 output = runfetchcmd(cmd, d)
89
90 # "aws s3 ls s3://mybucket/foo" will exit with success even if the file
91 # is not found, so check output of the command to confirm success.
92
93 if not output:
94 raise FetchError("The aws ls command for s3://%s%s gave empty output" % (ud.host, ud.path))
95
96 return True