diff options
Diffstat (limited to 'bitbake/lib/bb/fetch2/__init__.py')
-rw-r--r-- | bitbake/lib/bb/fetch2/__init__.py | 836 |
1 files changed, 836 insertions, 0 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py new file mode 100644 index 0000000000..f7153ebce9 --- /dev/null +++ b/bitbake/lib/bb/fetch2/__init__.py | |||
@@ -0,0 +1,836 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | """ | ||
4 | BitBake 'Fetch' implementations | ||
5 | |||
6 | Classes for obtaining upstream sources for the | ||
7 | BitBake build tools. | ||
8 | """ | ||
9 | |||
10 | # Copyright (C) 2003, 2004 Chris Larson | ||
11 | # | ||
12 | # This program is free software; you can redistribute it and/or modify | ||
13 | # it under the terms of the GNU General Public License version 2 as | ||
14 | # published by the Free Software Foundation. | ||
15 | # | ||
16 | # This program is distributed in the hope that it will be useful, | ||
17 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
18 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
19 | # GNU General Public License for more details. | ||
20 | # | ||
21 | # You should have received a copy of the GNU General Public License along | ||
22 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
23 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
24 | # | ||
25 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
26 | |||
27 | from __future__ import absolute_import | ||
28 | from __future__ import print_function | ||
29 | import os, re | ||
30 | import logging | ||
31 | import bb | ||
32 | from bb import data | ||
33 | from bb import persist_data | ||
34 | |||
35 | logger = logging.getLogger("BitBake.Fetch") | ||
36 | |||
37 | class MalformedUrl(Exception): | ||
38 | """Exception raised when encountering an invalid url""" | ||
39 | |||
40 | class FetchError(Exception): | ||
41 | """Exception raised when a download fails""" | ||
42 | |||
43 | class NoMethodError(Exception): | ||
44 | """Exception raised when there is no method to obtain a supplied url or set of urls""" | ||
45 | |||
46 | class MissingParameterError(Exception): | ||
47 | """Exception raised when a fetch method is missing a critical parameter in the url""" | ||
48 | |||
49 | class ParameterError(Exception): | ||
50 | """Exception raised when a url cannot be proccessed due to invalid parameters.""" | ||
51 | |||
52 | class MD5SumError(Exception): | ||
53 | """Exception raised when a MD5SUM of a file does not match the expected one""" | ||
54 | |||
55 | class InvalidSRCREV(Exception): | ||
56 | """Exception raised when an invalid SRCREV is encountered""" | ||
57 | |||
58 | def decodeurl(url): | ||
59 | """Decodes an URL into the tokens (scheme, network location, path, | ||
60 | user, password, parameters). | ||
61 | """ | ||
62 | |||
63 | m = re.compile('(?P<type>[^:]*)://((?P<user>.+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url) | ||
64 | if not m: | ||
65 | raise MalformedUrl(url) | ||
66 | |||
67 | type = m.group('type') | ||
68 | location = m.group('location') | ||
69 | if not location: | ||
70 | raise MalformedUrl(url) | ||
71 | user = m.group('user') | ||
72 | parm = m.group('parm') | ||
73 | |||
74 | locidx = location.find('/') | ||
75 | if locidx != -1 and type.lower() != 'file': | ||
76 | host = location[:locidx] | ||
77 | path = location[locidx:] | ||
78 | else: | ||
79 | host = "" | ||
80 | path = location | ||
81 | if user: | ||
82 | m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user) | ||
83 | if m: | ||
84 | user = m.group('user') | ||
85 | pswd = m.group('pswd') | ||
86 | else: | ||
87 | user = '' | ||
88 | pswd = '' | ||
89 | |||
90 | p = {} | ||
91 | if parm: | ||
92 | for s in parm.split(';'): | ||
93 | s1, s2 = s.split('=') | ||
94 | p[s1] = s2 | ||
95 | |||
96 | return (type, host, path, user, pswd, p) | ||
97 | |||
98 | def encodeurl(decoded): | ||
99 | """Encodes a URL from tokens (scheme, network location, path, | ||
100 | user, password, parameters). | ||
101 | """ | ||
102 | |||
103 | (type, host, path, user, pswd, p) = decoded | ||
104 | |||
105 | if not type or not path: | ||
106 | raise MissingParameterError("Type or path url components missing when encoding %s" % decoded) | ||
107 | url = '%s://' % type | ||
108 | if user: | ||
109 | url += "%s" % user | ||
110 | if pswd: | ||
111 | url += ":%s" % pswd | ||
112 | url += "@" | ||
113 | if host: | ||
114 | url += "%s" % host | ||
115 | url += "%s" % path | ||
116 | if p: | ||
117 | for parm in p: | ||
118 | url += ";%s=%s" % (parm, p[parm]) | ||
119 | |||
120 | return url | ||
121 | |||
122 | def uri_replace(uri, uri_find, uri_replace, d): | ||
123 | if not uri or not uri_find or not uri_replace: | ||
124 | logger.debug(1, "uri_replace: passed an undefined value, not replacing") | ||
125 | uri_decoded = list(decodeurl(uri)) | ||
126 | uri_find_decoded = list(decodeurl(uri_find)) | ||
127 | uri_replace_decoded = list(decodeurl(uri_replace)) | ||
128 | result_decoded = ['', '', '', '', '', {}] | ||
129 | for i in uri_find_decoded: | ||
130 | loc = uri_find_decoded.index(i) | ||
131 | result_decoded[loc] = uri_decoded[loc] | ||
132 | if isinstance(i, basestring): | ||
133 | if (re.match(i, uri_decoded[loc])): | ||
134 | result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc]) | ||
135 | if uri_find_decoded.index(i) == 2: | ||
136 | if d: | ||
137 | localfn = bb.fetch.localpath(uri, d) | ||
138 | if localfn: | ||
139 | result_decoded[loc] = os.path.join(os.path.dirname(result_decoded[loc]), os.path.basename(bb.fetch.localpath(uri, d))) | ||
140 | else: | ||
141 | return uri | ||
142 | return encodeurl(result_decoded) | ||
143 | |||
144 | methods = [] | ||
145 | urldata_cache = {} | ||
146 | saved_headrevs = {} | ||
147 | |||
148 | def fetcher_init(d): | ||
149 | """ | ||
150 | Called to initialize the fetchers once the configuration data is known. | ||
151 | Calls before this must not hit the cache. | ||
152 | """ | ||
153 | pd = persist_data.persist(d) | ||
154 | # When to drop SCM head revisions controlled by user policy | ||
155 | srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear" | ||
156 | if srcrev_policy == "cache": | ||
157 | logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) | ||
158 | elif srcrev_policy == "clear": | ||
159 | logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) | ||
160 | try: | ||
161 | bb.fetch.saved_headrevs = pd['BB_URI_HEADREVS'].items() | ||
162 | except: | ||
163 | pass | ||
164 | del pd['BB_URI_HEADREVS'] | ||
165 | else: | ||
166 | raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) | ||
167 | |||
168 | for m in methods: | ||
169 | if hasattr(m, "init"): | ||
170 | m.init(d) | ||
171 | |||
172 | def fetcher_compare_revisions(d): | ||
173 | """ | ||
174 | Compare the revisions in the persistant cache with current values and | ||
175 | return true/false on whether they've changed. | ||
176 | """ | ||
177 | |||
178 | pd = persist_data.persist(d) | ||
179 | data = pd['BB_URI_HEADREVS'].items() | ||
180 | data2 = bb.fetch.saved_headrevs | ||
181 | |||
182 | changed = False | ||
183 | for key in data: | ||
184 | if key not in data2 or data2[key] != data[key]: | ||
185 | logger.debug(1, "%s changed", key) | ||
186 | changed = True | ||
187 | return True | ||
188 | else: | ||
189 | logger.debug(2, "%s did not change", key) | ||
190 | return False | ||
191 | |||
192 | # Function call order is usually: | ||
193 | # 1. init | ||
194 | # 2. go | ||
195 | # 3. localpaths | ||
196 | # localpath can be called at any time | ||
197 | |||
198 | def init(urls, d, setup = True): | ||
199 | urldata = {} | ||
200 | |||
201 | fn = bb.data.getVar('FILE', d, 1) | ||
202 | if fn in urldata_cache: | ||
203 | urldata = urldata_cache[fn] | ||
204 | |||
205 | for url in urls: | ||
206 | if url not in urldata: | ||
207 | urldata[url] = FetchData(url, d) | ||
208 | |||
209 | if setup: | ||
210 | for url in urldata: | ||
211 | if not urldata[url].setup: | ||
212 | urldata[url].setup_localpath(d) | ||
213 | |||
214 | urldata_cache[fn] = urldata | ||
215 | return urldata | ||
216 | |||
217 | def mirror_from_string(data): | ||
218 | return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ] | ||
219 | |||
220 | def removefile(f): | ||
221 | try: | ||
222 | os.remove(f) | ||
223 | except: | ||
224 | pass | ||
225 | |||
226 | def verify_checksum(u, ud, d): | ||
227 | """ | ||
228 | verify the MD5 and SHA256 checksum for downloaded src | ||
229 | |||
230 | return value: | ||
231 | - True: checksum matched | ||
232 | - False: checksum unmatched | ||
233 | |||
234 | if checksum is missing in recipes file, "BB_STRICT_CHECKSUM" decide the return value. | ||
235 | if BB_STRICT_CHECKSUM = "1" then return false as unmatched, otherwise return true as | ||
236 | matched | ||
237 | """ | ||
238 | |||
239 | if not ud.type in ["http", "https", "ftp", "ftps"]: | ||
240 | return | ||
241 | |||
242 | md5data = bb.utils.md5_file(ud.localpath) | ||
243 | sha256data = bb.utils.sha256_file(ud.localpath) | ||
244 | |||
245 | if (ud.md5_expected == None or ud.sha256_expected == None): | ||
246 | logger.warn('Missing SRC_URI checksum for %s, consider adding to the recipe:\n' | ||
247 | 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"', | ||
248 | ud.localpath, ud.md5_name, md5data, | ||
249 | ud.sha256_name, sha256data) | ||
250 | if bb.data.getVar("BB_STRICT_CHECKSUM", d, True) == "1": | ||
251 | raise FetchError("No checksum specified for %s." % u) | ||
252 | return | ||
253 | |||
254 | if (ud.md5_expected != md5data or ud.sha256_expected != sha256data): | ||
255 | logger.error('The checksums for "%s" did not match.\n' | ||
256 | ' MD5: expected "%s", got "%s"\n' | ||
257 | ' SHA256: expected "%s", got "%s"\n', | ||
258 | ud.localpath, ud.md5_expected, md5data, | ||
259 | ud.sha256_expected, sha256data) | ||
260 | raise FetchError("%s checksum mismatch." % u) | ||
261 | |||
262 | def go(d, urls = None): | ||
263 | """ | ||
264 | Fetch all urls | ||
265 | init must have previously been called | ||
266 | """ | ||
267 | if not urls: | ||
268 | urls = d.getVar("SRC_URI", 1).split() | ||
269 | urldata = init(urls, d, True) | ||
270 | |||
271 | for u in urls: | ||
272 | ud = urldata[u] | ||
273 | m = ud.method | ||
274 | localpath = "" | ||
275 | |||
276 | if not ud.localfile: | ||
277 | continue | ||
278 | |||
279 | lf = bb.utils.lockfile(ud.lockfile) | ||
280 | |||
281 | if m.try_premirror(u, ud, d): | ||
282 | # First try fetching uri, u, from PREMIRRORS | ||
283 | mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True)) | ||
284 | localpath = try_mirrors(d, u, mirrors, False, m.forcefetch(u, ud, d)) | ||
285 | elif os.path.exists(ud.localfile): | ||
286 | localpath = ud.localfile | ||
287 | |||
288 | # Need to re-test forcefetch() which will return true if our copy is too old | ||
289 | if m.forcefetch(u, ud, d) or not localpath: | ||
290 | # Next try fetching from the original uri, u | ||
291 | try: | ||
292 | m.go(u, ud, d) | ||
293 | localpath = ud.localpath | ||
294 | except FetchError: | ||
295 | # Remove any incomplete file | ||
296 | removefile(ud.localpath) | ||
297 | # Finally, try fetching uri, u, from MIRRORS | ||
298 | mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True)) | ||
299 | localpath = try_mirrors (d, u, mirrors) | ||
300 | if not localpath or not os.path.exists(localpath): | ||
301 | raise FetchError("Unable to fetch URL %s from any source." % u) | ||
302 | |||
303 | ud.localpath = localpath | ||
304 | |||
305 | if os.path.exists(ud.md5): | ||
306 | # Touch the md5 file to show active use of the download | ||
307 | try: | ||
308 | os.utime(ud.md5, None) | ||
309 | except: | ||
310 | # Errors aren't fatal here | ||
311 | pass | ||
312 | else: | ||
313 | # Only check the checksums if we've not seen this item before | ||
314 | verify_checksum(u, ud, d) | ||
315 | Fetch.write_md5sum(u, ud, d) | ||
316 | |||
317 | bb.utils.unlockfile(lf) | ||
318 | |||
319 | def checkstatus(d, urls = None): | ||
320 | """ | ||
321 | Check all urls exist upstream | ||
322 | init must have previously been called | ||
323 | """ | ||
324 | urldata = init([], d, True) | ||
325 | |||
326 | if not urls: | ||
327 | urls = urldata | ||
328 | |||
329 | for u in urls: | ||
330 | ud = urldata[u] | ||
331 | m = ud.method | ||
332 | logger.debug(1, "Testing URL %s", u) | ||
333 | # First try checking uri, u, from PREMIRRORS | ||
334 | mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True)) | ||
335 | ret = try_mirrors(d, u, mirrors, True) | ||
336 | if not ret: | ||
337 | # Next try checking from the original uri, u | ||
338 | try: | ||
339 | ret = m.checkstatus(u, ud, d) | ||
340 | except: | ||
341 | # Finally, try checking uri, u, from MIRRORS | ||
342 | mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True)) | ||
343 | ret = try_mirrors (d, u, mirrors, True) | ||
344 | |||
345 | if not ret: | ||
346 | raise FetchError("URL %s doesn't work" % u) | ||
347 | |||
348 | def localpaths(d): | ||
349 | """ | ||
350 | Return a list of the local filenames, assuming successful fetch | ||
351 | """ | ||
352 | local = [] | ||
353 | urldata = init([], d, True) | ||
354 | |||
355 | for u in urldata: | ||
356 | ud = urldata[u] | ||
357 | local.append(ud.localpath) | ||
358 | |||
359 | return local | ||
360 | |||
361 | srcrev_internal_call = False | ||
362 | |||
363 | def get_srcrev(d): | ||
364 | """ | ||
365 | Return the version string for the current package | ||
366 | (usually to be used as PV) | ||
367 | Most packages usually only have one SCM so we just pass on the call. | ||
368 | In the multi SCM case, we build a value based on SRCREV_FORMAT which must | ||
369 | have been set. | ||
370 | """ | ||
371 | |||
372 | # | ||
373 | # Ugly code alert. localpath in the fetchers will try to evaluate SRCREV which | ||
374 | # could translate into a call to here. If it does, we need to catch this | ||
375 | # and provide some way so it knows get_srcrev is active instead of being | ||
376 | # some number etc. hence the srcrev_internal_call tracking and the magic | ||
377 | # "SRCREVINACTION" return value. | ||
378 | # | ||
379 | # Neater solutions welcome! | ||
380 | # | ||
381 | if bb.fetch.srcrev_internal_call: | ||
382 | return "SRCREVINACTION" | ||
383 | |||
384 | scms = [] | ||
385 | |||
386 | # Only call setup_localpath on URIs which supports_srcrev() | ||
387 | urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False) | ||
388 | for u in urldata: | ||
389 | ud = urldata[u] | ||
390 | if ud.method.supports_srcrev(): | ||
391 | if not ud.setup: | ||
392 | ud.setup_localpath(d) | ||
393 | scms.append(u) | ||
394 | |||
395 | if len(scms) == 0: | ||
396 | logger.error("SRCREV was used yet no valid SCM was found in SRC_URI") | ||
397 | raise ParameterError | ||
398 | |||
399 | if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache": | ||
400 | bb.data.setVar('__BB_DONT_CACHE', '1', d) | ||
401 | |||
402 | if len(scms) == 1: | ||
403 | return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d) | ||
404 | |||
405 | # | ||
406 | # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT | ||
407 | # | ||
408 | format = bb.data.getVar('SRCREV_FORMAT', d, 1) | ||
409 | if not format: | ||
410 | logger.error("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") | ||
411 | raise ParameterError | ||
412 | |||
413 | for scm in scms: | ||
414 | if 'name' in urldata[scm].parm: | ||
415 | name = urldata[scm].parm["name"] | ||
416 | rev = urldata[scm].method.sortable_revision(scm, urldata[scm], d) | ||
417 | format = format.replace(name, rev) | ||
418 | |||
419 | return format | ||
420 | |||
421 | def localpath(url, d, cache = True): | ||
422 | """ | ||
423 | Called from the parser with cache=False since the cache isn't ready | ||
424 | at this point. Also called from classed in OE e.g. patch.bbclass | ||
425 | """ | ||
426 | ud = init([url], d) | ||
427 | if ud[url].method: | ||
428 | return ud[url].localpath | ||
429 | return url | ||
430 | |||
431 | def runfetchcmd(cmd, d, quiet = False): | ||
432 | """ | ||
433 | Run cmd returning the command output | ||
434 | Raise an error if interrupted or cmd fails | ||
435 | Optionally echo command output to stdout | ||
436 | """ | ||
437 | |||
438 | # Need to export PATH as binary could be in metadata paths | ||
439 | # rather than host provided | ||
440 | # Also include some other variables. | ||
441 | # FIXME: Should really include all export varaiables? | ||
442 | exportvars = ['PATH', 'GIT_PROXY_COMMAND', 'GIT_PROXY_HOST', | ||
443 | 'GIT_PROXY_PORT', 'GIT_CONFIG', 'http_proxy', 'ftp_proxy', | ||
444 | 'https_proxy', 'no_proxy', 'ALL_PROXY', 'all_proxy', | ||
445 | 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', 'HOME'] | ||
446 | |||
447 | for var in exportvars: | ||
448 | val = data.getVar(var, d, True) | ||
449 | if val: | ||
450 | cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd) | ||
451 | |||
452 | logger.debug(1, "Running %s", cmd) | ||
453 | |||
454 | # redirect stderr to stdout | ||
455 | stdout_handle = os.popen(cmd + " 2>&1", "r") | ||
456 | output = "" | ||
457 | |||
458 | while True: | ||
459 | line = stdout_handle.readline() | ||
460 | if not line: | ||
461 | break | ||
462 | if not quiet: | ||
463 | print(line, end=' ') | ||
464 | output += line | ||
465 | |||
466 | status = stdout_handle.close() or 0 | ||
467 | signal = status >> 8 | ||
468 | exitstatus = status & 0xff | ||
469 | |||
470 | if signal: | ||
471 | raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output)) | ||
472 | elif status != 0: | ||
473 | raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output)) | ||
474 | |||
475 | return output | ||
476 | |||
477 | def try_mirrors(d, uri, mirrors, check = False, force = False): | ||
478 | """ | ||
479 | Try to use a mirrored version of the sources. | ||
480 | This method will be automatically called before the fetchers go. | ||
481 | |||
482 | d Is a bb.data instance | ||
483 | uri is the original uri we're trying to download | ||
484 | mirrors is the list of mirrors we're going to try | ||
485 | """ | ||
486 | fpath = os.path.join(data.getVar("DL_DIR", d, 1), os.path.basename(uri)) | ||
487 | if not check and os.access(fpath, os.R_OK) and not force: | ||
488 | logger.debug(1, "%s already exists, skipping checkout.", fpath) | ||
489 | return fpath | ||
490 | |||
491 | ld = d.createCopy() | ||
492 | for (find, replace) in mirrors: | ||
493 | newuri = uri_replace(uri, find, replace, ld) | ||
494 | if newuri != uri: | ||
495 | try: | ||
496 | ud = FetchData(newuri, ld) | ||
497 | except bb.fetch.NoMethodError: | ||
498 | logger.debug(1, "No method for %s", uri) | ||
499 | continue | ||
500 | |||
501 | ud.setup_localpath(ld) | ||
502 | |||
503 | try: | ||
504 | if check: | ||
505 | found = ud.method.checkstatus(newuri, ud, ld) | ||
506 | if found: | ||
507 | return found | ||
508 | else: | ||
509 | ud.method.go(newuri, ud, ld) | ||
510 | return ud.localpath | ||
511 | except (bb.fetch.MissingParameterError, | ||
512 | bb.fetch.FetchError, | ||
513 | bb.fetch.MD5SumError): | ||
514 | import sys | ||
515 | (type, value, traceback) = sys.exc_info() | ||
516 | logger.debug(2, "Mirror fetch failure: %s", value) | ||
517 | removefile(ud.localpath) | ||
518 | continue | ||
519 | return None | ||
520 | |||
521 | |||
522 | class FetchData(object): | ||
523 | """ | ||
524 | A class which represents the fetcher state for a given URI. | ||
525 | """ | ||
526 | def __init__(self, url, d): | ||
527 | self.localfile = "" | ||
528 | (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d)) | ||
529 | self.date = Fetch.getSRCDate(self, d) | ||
530 | self.url = url | ||
531 | if not self.user and "user" in self.parm: | ||
532 | self.user = self.parm["user"] | ||
533 | if not self.pswd and "pswd" in self.parm: | ||
534 | self.pswd = self.parm["pswd"] | ||
535 | self.setup = False | ||
536 | |||
537 | if "name" in self.parm: | ||
538 | self.md5_name = "%s.md5sum" % self.parm["name"] | ||
539 | self.sha256_name = "%s.sha256sum" % self.parm["name"] | ||
540 | else: | ||
541 | self.md5_name = "md5sum" | ||
542 | self.sha256_name = "sha256sum" | ||
543 | self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d) | ||
544 | self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d) | ||
545 | |||
546 | for m in methods: | ||
547 | if m.supports(url, self, d): | ||
548 | self.method = m | ||
549 | return | ||
550 | raise NoMethodError("Missing implementation for url %s" % url) | ||
551 | |||
552 | def setup_localpath(self, d): | ||
553 | self.setup = True | ||
554 | if "localpath" in self.parm: | ||
555 | # if user sets localpath for file, use it instead. | ||
556 | self.localpath = self.parm["localpath"] | ||
557 | self.basename = os.path.basename(self.localpath) | ||
558 | else: | ||
559 | premirrors = bb.data.getVar('PREMIRRORS', d, True) | ||
560 | local = "" | ||
561 | if premirrors and self.url: | ||
562 | aurl = self.url.split(";")[0] | ||
563 | mirrors = mirror_from_string(premirrors) | ||
564 | for (find, replace) in mirrors: | ||
565 | if replace.startswith("file://"): | ||
566 | path = aurl.split("://")[1] | ||
567 | path = path.split(";")[0] | ||
568 | local = replace.split("://")[1] + os.path.basename(path) | ||
569 | if local == aurl or not os.path.exists(local) or os.path.isdir(local): | ||
570 | local = "" | ||
571 | self.localpath = local | ||
572 | if not local: | ||
573 | try: | ||
574 | bb.fetch.srcrev_internal_call = True | ||
575 | self.localpath = self.method.localpath(self.url, self, d) | ||
576 | finally: | ||
577 | bb.fetch.srcrev_internal_call = False | ||
578 | # We have to clear data's internal caches since the cached value of SRCREV is now wrong. | ||
579 | # Horrible... | ||
580 | bb.data.delVar("ISHOULDNEVEREXIST", d) | ||
581 | |||
582 | if self.localpath is not None: | ||
583 | # Note: These files should always be in DL_DIR whereas localpath may not be. | ||
584 | basepath = bb.data.expand("${DL_DIR}/%s" % os.path.basename(self.localpath), d) | ||
585 | self.md5 = basepath + '.md5' | ||
586 | self.lockfile = basepath + '.lock' | ||
587 | |||
588 | |||
589 | class Fetch(object): | ||
590 | """Base class for 'fetch'ing data""" | ||
591 | |||
592 | def __init__(self, urls = []): | ||
593 | self.urls = [] | ||
594 | |||
595 | def supports(self, url, urldata, d): | ||
596 | """ | ||
597 | Check to see if this fetch class supports a given url. | ||
598 | """ | ||
599 | return 0 | ||
600 | |||
601 | def localpath(self, url, urldata, d): | ||
602 | """ | ||
603 | Return the local filename of a given url assuming a successful fetch. | ||
604 | Can also setup variables in urldata for use in go (saving code duplication | ||
605 | and duplicate code execution) | ||
606 | """ | ||
607 | return url | ||
608 | def _strip_leading_slashes(self, relpath): | ||
609 | """ | ||
610 | Remove leading slash as os.path.join can't cope | ||
611 | """ | ||
612 | while os.path.isabs(relpath): | ||
613 | relpath = relpath[1:] | ||
614 | return relpath | ||
615 | |||
616 | def setUrls(self, urls): | ||
617 | self.__urls = urls | ||
618 | |||
619 | def getUrls(self): | ||
620 | return self.__urls | ||
621 | |||
622 | urls = property(getUrls, setUrls, None, "Urls property") | ||
623 | |||
624 | def forcefetch(self, url, urldata, d): | ||
625 | """ | ||
626 | Force a fetch, even if localpath exists? | ||
627 | """ | ||
628 | return False | ||
629 | |||
630 | def supports_srcrev(self): | ||
631 | """ | ||
632 | The fetcher supports auto source revisions (SRCREV) | ||
633 | """ | ||
634 | return False | ||
635 | |||
636 | def go(self, url, urldata, d): | ||
637 | """ | ||
638 | Fetch urls | ||
639 | Assumes localpath was called first | ||
640 | """ | ||
641 | raise NoMethodError("Missing implementation for url") | ||
642 | |||
643 | def try_premirror(self, url, urldata, d): | ||
644 | """ | ||
645 | Should premirrors be used? | ||
646 | """ | ||
647 | if urldata.method.forcefetch(url, urldata, d): | ||
648 | return True | ||
649 | elif os.path.exists(urldata.md5) and os.path.exists(urldata.localfile): | ||
650 | return False | ||
651 | else: | ||
652 | return True | ||
653 | |||
654 | def checkstatus(self, url, urldata, d): | ||
655 | """ | ||
656 | Check the status of a URL | ||
657 | Assumes localpath was called first | ||
658 | """ | ||
659 | logger.info("URL %s could not be checked for status since no method exists.", url) | ||
660 | return True | ||
661 | |||
662 | def getSRCDate(urldata, d): | ||
663 | """ | ||
664 | Return the SRC Date for the component | ||
665 | |||
666 | d the bb.data module | ||
667 | """ | ||
668 | if "srcdate" in urldata.parm: | ||
669 | return urldata.parm['srcdate'] | ||
670 | |||
671 | pn = data.getVar("PN", d, 1) | ||
672 | |||
673 | if pn: | ||
674 | return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) | ||
675 | |||
676 | return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) | ||
677 | getSRCDate = staticmethod(getSRCDate) | ||
678 | |||
679 | def srcrev_internal_helper(ud, d): | ||
680 | """ | ||
681 | Return: | ||
682 | a) a source revision if specified | ||
683 | b) True if auto srcrev is in action | ||
684 | c) False otherwise | ||
685 | """ | ||
686 | |||
687 | if 'rev' in ud.parm: | ||
688 | return ud.parm['rev'] | ||
689 | |||
690 | if 'tag' in ud.parm: | ||
691 | return ud.parm['tag'] | ||
692 | |||
693 | rev = None | ||
694 | if 'name' in ud.parm: | ||
695 | pn = data.getVar("PN", d, 1) | ||
696 | rev = data.getVar("SRCREV_%s_pn-%s" % (ud.parm['name'], pn), d, 1) | ||
697 | if not rev: | ||
698 | rev = data.getVar("SRCREV_pn-%s_%s" % (pn, ud.parm['name']), d, 1) | ||
699 | if not rev: | ||
700 | rev = data.getVar("SRCREV_%s" % (ud.parm['name']), d, 1) | ||
701 | if not rev: | ||
702 | rev = data.getVar("SRCREV", d, 1) | ||
703 | if rev == "INVALID": | ||
704 | raise InvalidSRCREV("Please set SRCREV to a valid value") | ||
705 | if not rev: | ||
706 | return False | ||
707 | if rev is "SRCREVINACTION": | ||
708 | return True | ||
709 | return rev | ||
710 | |||
711 | srcrev_internal_helper = staticmethod(srcrev_internal_helper) | ||
712 | |||
713 | def localcount_internal_helper(ud, d): | ||
714 | """ | ||
715 | Return: | ||
716 | a) a locked localcount if specified | ||
717 | b) None otherwise | ||
718 | """ | ||
719 | |||
720 | localcount = None | ||
721 | if 'name' in ud.parm: | ||
722 | pn = data.getVar("PN", d, 1) | ||
723 | localcount = data.getVar("LOCALCOUNT_" + ud.parm['name'], d, 1) | ||
724 | if not localcount: | ||
725 | localcount = data.getVar("LOCALCOUNT", d, 1) | ||
726 | return localcount | ||
727 | |||
728 | localcount_internal_helper = staticmethod(localcount_internal_helper) | ||
729 | |||
730 | def verify_md5sum(ud, got_sum): | ||
731 | """ | ||
732 | Verify the md5sum we wanted with the one we got | ||
733 | """ | ||
734 | wanted_sum = ud.parm.get('md5sum') | ||
735 | if not wanted_sum: | ||
736 | return True | ||
737 | |||
738 | return wanted_sum == got_sum | ||
739 | verify_md5sum = staticmethod(verify_md5sum) | ||
740 | |||
741 | def write_md5sum(url, ud, d): | ||
742 | md5data = bb.utils.md5_file(ud.localpath) | ||
743 | # verify the md5sum | ||
744 | if not Fetch.verify_md5sum(ud, md5data): | ||
745 | raise MD5SumError(url) | ||
746 | |||
747 | md5out = file(ud.md5, 'w') | ||
748 | md5out.write(md5data) | ||
749 | md5out.close() | ||
750 | write_md5sum = staticmethod(write_md5sum) | ||
751 | |||
752 | def latest_revision(self, url, ud, d): | ||
753 | """ | ||
754 | Look in the cache for the latest revision, if not present ask the SCM. | ||
755 | """ | ||
756 | if not hasattr(self, "_latest_revision"): | ||
757 | raise ParameterError | ||
758 | |||
759 | pd = persist_data.persist(d) | ||
760 | revs = pd['BB_URI_HEADREVS'] | ||
761 | key = self.generate_revision_key(url, ud, d) | ||
762 | rev = revs[key] | ||
763 | if rev != None: | ||
764 | return str(rev) | ||
765 | |||
766 | revs[key] = rev = self._latest_revision(url, ud, d) | ||
767 | return rev | ||
768 | |||
769 | def sortable_revision(self, url, ud, d): | ||
770 | """ | ||
771 | |||
772 | """ | ||
773 | if hasattr(self, "_sortable_revision"): | ||
774 | return self._sortable_revision(url, ud, d) | ||
775 | |||
776 | pd = persist_data.persist(d) | ||
777 | localcounts = pd['BB_URI_LOCALCOUNT'] | ||
778 | key = self.generate_revision_key(url, ud, d) | ||
779 | |||
780 | latest_rev = self._build_revision(url, ud, d) | ||
781 | last_rev = localcounts[key + '_rev'] | ||
782 | uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False | ||
783 | count = None | ||
784 | if uselocalcount: | ||
785 | count = Fetch.localcount_internal_helper(ud, d) | ||
786 | if count is None: | ||
787 | count = localcounts[key + '_count'] | ||
788 | |||
789 | if last_rev == latest_rev: | ||
790 | return str(count + "+" + latest_rev) | ||
791 | |||
792 | buildindex_provided = hasattr(self, "_sortable_buildindex") | ||
793 | if buildindex_provided: | ||
794 | count = self._sortable_buildindex(url, ud, d, latest_rev) | ||
795 | |||
796 | if count is None: | ||
797 | count = "0" | ||
798 | elif uselocalcount or buildindex_provided: | ||
799 | count = str(count) | ||
800 | else: | ||
801 | count = str(int(count) + 1) | ||
802 | |||
803 | localcounts[key + '_rev'] = latest_rev | ||
804 | localcounts[key + '_count'] = count | ||
805 | |||
806 | return str(count + "+" + latest_rev) | ||
807 | |||
808 | def generate_revision_key(self, url, ud, d): | ||
809 | key = self._revision_key(url, ud, d) | ||
810 | return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "") | ||
811 | |||
812 | from . import cvs | ||
813 | from . import git | ||
814 | from . import local | ||
815 | from . import svn | ||
816 | from . import wget | ||
817 | from . import svk | ||
818 | from . import ssh | ||
819 | from . import perforce | ||
820 | from . import bzr | ||
821 | from . import hg | ||
822 | from . import osc | ||
823 | from . import repo | ||
824 | |||
825 | methods.append(local.Local()) | ||
826 | methods.append(wget.Wget()) | ||
827 | methods.append(svn.Svn()) | ||
828 | methods.append(git.Git()) | ||
829 | methods.append(cvs.Cvs()) | ||
830 | methods.append(svk.Svk()) | ||
831 | methods.append(ssh.SSH()) | ||
832 | methods.append(perforce.Perforce()) | ||
833 | methods.append(bzr.Bzr()) | ||
834 | methods.append(hg.Hg()) | ||
835 | methods.append(osc.Osc()) | ||
836 | methods.append(repo.Repo()) | ||