summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/fetch/__init__.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/fetch/__init__.py')
-rw-r--r--bitbake/lib/bb/fetch/__init__.py832
1 files changed, 0 insertions, 832 deletions
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py
deleted file mode 100644
index 18988646b9..0000000000
--- a/bitbake/lib/bb/fetch/__init__.py
+++ /dev/null
@@ -1,832 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8"""
9
10# Copyright (C) 2003, 2004 Chris Larson
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24#
25# Based on functions from the base bb module, Copyright 2003 Holger Schurig
26
27from __future__ import absolute_import
28from __future__ import print_function
29import os, re
30import logging
31import bb
32from bb import data
33from bb import persist_data
34from bb import utils
35
36__version__ = "1"
37
38logger = logging.getLogger("BitBake.Fetch")
39
40class MalformedUrl(Exception):
41 """Exception raised when encountering an invalid url"""
42
43class FetchError(Exception):
44 """Exception raised when a download fails"""
45
46class NoMethodError(Exception):
47 """Exception raised when there is no method to obtain a supplied url or set of urls"""
48
49class MissingParameterError(Exception):
50 """Exception raised when a fetch method is missing a critical parameter in the url"""
51
52class ParameterError(Exception):
53 """Exception raised when a url cannot be proccessed due to invalid parameters."""
54
55class MD5SumError(Exception):
56 """Exception raised when a MD5SUM of a file does not match the expected one"""
57
58class InvalidSRCREV(Exception):
59 """Exception raised when an invalid SRCREV is encountered"""
60
61def decodeurl(url):
62 """Decodes an URL into the tokens (scheme, network location, path,
63 user, password, parameters).
64 """
65
66 m = re.compile('(?P<type>[^:]*)://((?P<user>.+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
67 if not m:
68 raise MalformedUrl(url)
69
70 type = m.group('type')
71 location = m.group('location')
72 if not location:
73 raise MalformedUrl(url)
74 user = m.group('user')
75 parm = m.group('parm')
76
77 locidx = location.find('/')
78 if locidx != -1 and type.lower() != 'file':
79 host = location[:locidx]
80 path = location[locidx:]
81 else:
82 host = ""
83 path = location
84 if user:
85 m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
86 if m:
87 user = m.group('user')
88 pswd = m.group('pswd')
89 else:
90 user = ''
91 pswd = ''
92
93 p = {}
94 if parm:
95 for s in parm.split(';'):
96 s1, s2 = s.split('=')
97 p[s1] = s2
98
99 return (type, host, path, user, pswd, p)
100
101def encodeurl(decoded):
102 """Encodes a URL from tokens (scheme, network location, path,
103 user, password, parameters).
104 """
105
106 (type, host, path, user, pswd, p) = decoded
107
108 if not type or not path:
109 raise MissingParameterError("Type or path url components missing when encoding %s" % decoded)
110 url = '%s://' % type
111 if user:
112 url += "%s" % user
113 if pswd:
114 url += ":%s" % pswd
115 url += "@"
116 if host:
117 url += "%s" % host
118 url += "%s" % path
119 if p:
120 for parm in p:
121 url += ";%s=%s" % (parm, p[parm])
122
123 return url
124
125def uri_replace(uri, uri_find, uri_replace, d):
126 if not uri or not uri_find or not uri_replace:
127 logger.debug(1, "uri_replace: passed an undefined value, not replacing")
128 uri_decoded = list(decodeurl(uri))
129 uri_find_decoded = list(decodeurl(uri_find))
130 uri_replace_decoded = list(decodeurl(uri_replace))
131 result_decoded = ['', '', '', '', '', {}]
132 for i in uri_find_decoded:
133 loc = uri_find_decoded.index(i)
134 result_decoded[loc] = uri_decoded[loc]
135 if isinstance(i, basestring):
136 if (re.match(i, uri_decoded[loc])):
137 result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc])
138 if uri_find_decoded.index(i) == 2:
139 if d:
140 localfn = bb.fetch.localpath(uri, d)
141 if localfn:
142 result_decoded[loc] = os.path.join(os.path.dirname(result_decoded[loc]), os.path.basename(bb.fetch.localpath(uri, d)))
143 else:
144 return uri
145 return encodeurl(result_decoded)
146
147methods = []
148urldata_cache = {}
149saved_headrevs = {}
150
151def fetcher_init(d):
152 """
153 Called to initialize the fetchers once the configuration data is known.
154 Calls before this must not hit the cache.
155 """
156 # When to drop SCM head revisions controlled by user policy
157 srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear"
158 if srcrev_policy == "cache":
159 logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
160 elif srcrev_policy == "clear":
161 logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
162 revs = persist_data.persist('BB_URI_HEADREVS', d)
163 try:
164 bb.fetch.saved_headrevs = revs.items()
165 except:
166 pass
167 revs.clear()
168 else:
169 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
170
171 for m in methods:
172 if hasattr(m, "init"):
173 m.init(d)
174
175def fetcher_compare_revisions(d):
176 """
177 Compare the revisions in the persistant cache with current values and
178 return true/false on whether they've changed.
179 """
180
181 data = persist_data.persist('BB_URI_HEADREVS', d).items()
182 data2 = bb.fetch.saved_headrevs
183
184 changed = False
185 for key in data:
186 if key not in data2 or data2[key] != data[key]:
187 logger.debug(1, "%s changed", key)
188 changed = True
189 return True
190 else:
191 logger.debug(2, "%s did not change", key)
192 return False
193
194# Function call order is usually:
195# 1. init
196# 2. go
197# 3. localpaths
198# localpath can be called at any time
199
200def init(urls, d, setup = True):
201 urldata = {}
202
203 fn = d.getVar('FILE', True)
204 if fn in urldata_cache:
205 urldata = urldata_cache[fn]
206
207 for url in urls:
208 if url not in urldata:
209 urldata[url] = FetchData(url, d)
210
211 if setup:
212 for url in urldata:
213 if not urldata[url].setup:
214 urldata[url].setup_localpath(d)
215
216 urldata_cache[fn] = urldata
217 return urldata
218
219def mirror_from_string(data):
220 return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
221
222def verify_checksum(u, ud, d):
223 """
224 verify the MD5 and SHA256 checksum for downloaded src
225
226 return value:
227 - True: checksum matched
228 - False: checksum unmatched
229
230 if checksum is missing in recipes file, "BB_STRICT_CHECKSUM" decide the return value.
231 if BB_STRICT_CHECKSUM = "1" then return false as unmatched, otherwise return true as
232 matched
233 """
234
235 if not ud.type in ["http", "https", "ftp", "ftps"]:
236 return
237
238 md5data = bb.utils.md5_file(ud.localpath)
239 sha256data = bb.utils.sha256_file(ud.localpath)
240
241 if (ud.md5_expected == None or ud.sha256_expected == None):
242 logger.warn('Missing SRC_URI checksum for %s, consider adding to the recipe:\n'
243 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"',
244 ud.localpath, ud.md5_name, md5data,
245 ud.sha256_name, sha256data)
246 if d.getVar("BB_STRICT_CHECKSUM", True) == "1":
247 raise FetchError("No checksum specified for %s." % u)
248 return
249
250 if (ud.md5_expected != md5data or ud.sha256_expected != sha256data):
251 logger.error('The checksums for "%s" did not match.\n'
252 ' MD5: expected "%s", got "%s"\n'
253 ' SHA256: expected "%s", got "%s"\n',
254 ud.localpath, ud.md5_expected, md5data,
255 ud.sha256_expected, sha256data)
256 raise FetchError("%s checksum mismatch." % u)
257
258def go(d, urls = None):
259 """
260 Fetch all urls
261 init must have previously been called
262 """
263 if not urls:
264 urls = d.getVar("SRC_URI", True).split()
265 urldata = init(urls, d, True)
266
267 for u in urls:
268 ud = urldata[u]
269 m = ud.method
270 localpath = ""
271
272 if not ud.localfile:
273 continue
274
275 lf = bb.utils.lockfile(ud.lockfile)
276
277 if m.try_premirror(u, ud, d):
278 # First try fetching uri, u, from PREMIRRORS
279 mirrors = mirror_from_string(d.getVar('PREMIRRORS', True))
280 localpath = try_mirrors(d, u, mirrors, False, m.forcefetch(u, ud, d))
281 elif os.path.exists(ud.localfile):
282 localpath = ud.localfile
283
284 # Need to re-test forcefetch() which will return true if our copy is too old
285 if m.forcefetch(u, ud, d) or not localpath:
286 # Next try fetching from the original uri, u
287 try:
288 m.go(u, ud, d)
289 localpath = ud.localpath
290 except FetchError:
291 # Remove any incomplete file
292 bb.utils.remove(ud.localpath)
293 # Finally, try fetching uri, u, from MIRRORS
294 mirrors = mirror_from_string(d.getVar('MIRRORS', True))
295 localpath = try_mirrors (d, u, mirrors)
296 if not localpath or not os.path.exists(localpath):
297 raise FetchError("Unable to fetch URL %s from any source." % u)
298
299 ud.localpath = localpath
300
301 if os.path.exists(ud.md5):
302 # Touch the md5 file to show active use of the download
303 try:
304 os.utime(ud.md5, None)
305 except:
306 # Errors aren't fatal here
307 pass
308 else:
309 # Only check the checksums if we've not seen this item before
310 verify_checksum(u, ud, d)
311 Fetch.write_md5sum(u, ud, d)
312
313 bb.utils.unlockfile(lf)
314
315def checkstatus(d, urls = None):
316 """
317 Check all urls exist upstream
318 init must have previously been called
319 """
320 urldata = init([], d, True)
321
322 if not urls:
323 urls = urldata
324
325 for u in urls:
326 ud = urldata[u]
327 m = ud.method
328 logger.debug(1, "Testing URL %s", u)
329 # First try checking uri, u, from PREMIRRORS
330 mirrors = mirror_from_string(d.getVar('PREMIRRORS', True))
331 ret = try_mirrors(d, u, mirrors, True)
332 if not ret:
333 # Next try checking from the original uri, u
334 try:
335 ret = m.checkstatus(u, ud, d)
336 except:
337 # Finally, try checking uri, u, from MIRRORS
338 mirrors = mirror_from_string(d.getVar('MIRRORS', True))
339 ret = try_mirrors (d, u, mirrors, True)
340
341 if not ret:
342 raise FetchError("URL %s doesn't work" % u)
343
344def localpaths(d):
345 """
346 Return a list of the local filenames, assuming successful fetch
347 """
348 local = []
349 urldata = init([], d, True)
350
351 for u in urldata:
352 ud = urldata[u]
353 local.append(ud.localpath)
354
355 return local
356
357srcrev_internal_call = False
358
359def get_autorev(d):
360 return get_srcrev(d)
361
362def get_srcrev(d):
363 """
364 Return the version string for the current package
365 (usually to be used as PV)
366 Most packages usually only have one SCM so we just pass on the call.
367 In the multi SCM case, we build a value based on SRCREV_FORMAT which must
368 have been set.
369 """
370
371 #
372 # Ugly code alert. localpath in the fetchers will try to evaluate SRCREV which
373 # could translate into a call to here. If it does, we need to catch this
374 # and provide some way so it knows get_srcrev is active instead of being
375 # some number etc. hence the srcrev_internal_call tracking and the magic
376 # "SRCREVINACTION" return value.
377 #
378 # Neater solutions welcome!
379 #
380 if bb.fetch.srcrev_internal_call:
381 return "SRCREVINACTION"
382
383 scms = []
384
385 # Only call setup_localpath on URIs which supports_srcrev()
386 urldata = init(d.getVar('SRC_URI', True).split(), d, False)
387 for u in urldata:
388 ud = urldata[u]
389 if ud.method.supports_srcrev():
390 if not ud.setup:
391 ud.setup_localpath(d)
392 scms.append(u)
393
394 if len(scms) == 0:
395 logger.error("SRCREV was used yet no valid SCM was found in SRC_URI")
396 raise ParameterError
397
398 if d.getVar('BB_SRCREV_POLICY', True) != "cache":
399 d.setVar('__BB_DONT_CACHE', '1')
400
401 if len(scms) == 1:
402 return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d)
403
404 #
405 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
406 #
407 format = d.getVar('SRCREV_FORMAT', True)
408 if not format:
409 logger.error("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
410 raise ParameterError
411
412 for scm in scms:
413 if 'name' in urldata[scm].parm:
414 name = urldata[scm].parm["name"]
415 rev = urldata[scm].method.sortable_revision(scm, urldata[scm], d)
416 format = format.replace(name, rev)
417
418 return format
419
420def localpath(url, d, cache = True):
421 """
422 Called from the parser with cache=False since the cache isn't ready
423 at this point. Also called from classed in OE e.g. patch.bbclass
424 """
425 ud = init([url], d)
426 if ud[url].method:
427 return ud[url].localpath
428 return url
429
430def runfetchcmd(cmd, d, quiet = False):
431 """
432 Run cmd returning the command output
433 Raise an error if interrupted or cmd fails
434 Optionally echo command output to stdout
435 """
436
437 # Need to export PATH as binary could be in metadata paths
438 # rather than host provided
439 # Also include some other variables.
440 # FIXME: Should really include all export varaiables?
441 exportvars = ['PATH', 'GIT_PROXY_COMMAND', 'GIT_PROXY_HOST',
442 'GIT_PROXY_PORT', 'GIT_CONFIG', 'http_proxy', 'ftp_proxy',
443 'https_proxy', 'no_proxy', 'ALL_PROXY', 'all_proxy',
444 'KRB5CCNAME', 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', 'HOME']
445
446 for var in exportvars:
447 val = data.getVar(var, d, True)
448 if val:
449 cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
450
451 logger.debug(1, "Running %s", cmd)
452
453 # redirect stderr to stdout
454 stdout_handle = os.popen(cmd + " 2>&1", "r")
455 output = ""
456
457 while True:
458 line = stdout_handle.readline()
459 if not line:
460 break
461 if not quiet:
462 print(line, end=' ')
463 output += line
464
465 status = stdout_handle.close() or 0
466 signal = status >> 8
467 exitstatus = status & 0xff
468
469 if signal:
470 raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output))
471 elif status != 0:
472 raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output))
473
474 return output
475
476def try_mirrors(d, uri, mirrors, check = False, force = False):
477 """
478 Try to use a mirrored version of the sources.
479 This method will be automatically called before the fetchers go.
480
481 d Is a bb.data instance
482 uri is the original uri we're trying to download
483 mirrors is the list of mirrors we're going to try
484 """
485 fpath = os.path.join(data.getVar("DL_DIR", d, 1), os.path.basename(uri))
486 if not check and os.access(fpath, os.R_OK) and not force:
487 logger.debug(1, "%s already exists, skipping checkout.", fpath)
488 return fpath
489
490 ld = d.createCopy()
491 for (find, replace) in mirrors:
492 newuri = uri_replace(uri, find, replace, ld)
493 if newuri != uri:
494 try:
495 ud = FetchData(newuri, ld)
496 except bb.fetch.NoMethodError:
497 logger.debug(1, "No method for %s", uri)
498 continue
499
500 ud.setup_localpath(ld)
501
502 try:
503 if check:
504 found = ud.method.checkstatus(newuri, ud, ld)
505 if found:
506 return found
507 else:
508 ud.method.go(newuri, ud, ld)
509 return ud.localpath
510 except (bb.fetch.MissingParameterError,
511 bb.fetch.FetchError,
512 bb.fetch.MD5SumError):
513 import sys
514 (type, value, traceback) = sys.exc_info()
515 logger.debug(2, "Mirror fetch failure: %s", value)
516 bb.utils.remove(ud.localpath)
517 continue
518 return None
519
520
521class FetchData(object):
522 """
523 A class which represents the fetcher state for a given URI.
524 """
525 def __init__(self, url, d):
526 self.localfile = ""
527 (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
528 self.date = Fetch.getSRCDate(self, d)
529 self.url = url
530 if not self.user and "user" in self.parm:
531 self.user = self.parm["user"]
532 if not self.pswd and "pswd" in self.parm:
533 self.pswd = self.parm["pswd"]
534 self.setup = False
535
536 if "name" in self.parm:
537 self.md5_name = "%s.md5sum" % self.parm["name"]
538 self.sha256_name = "%s.sha256sum" % self.parm["name"]
539 else:
540 self.md5_name = "md5sum"
541 self.sha256_name = "sha256sum"
542 self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
543 self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
544
545 for m in methods:
546 if m.supports(url, self, d):
547 self.method = m
548 return
549 raise NoMethodError("Missing implementation for url %s" % url)
550
551 def setup_localpath(self, d):
552 self.setup = True
553 if "localpath" in self.parm:
554 # if user sets localpath for file, use it instead.
555 self.localpath = self.parm["localpath"]
556 self.basename = os.path.basename(self.localpath)
557 else:
558 premirrors = d.getVar('PREMIRRORS', True)
559 local = ""
560 if premirrors and self.url:
561 aurl = self.url.split(";")[0]
562 mirrors = mirror_from_string(premirrors)
563 for (find, replace) in mirrors:
564 if replace.startswith("file://"):
565 path = aurl.split("://")[1]
566 path = path.split(";")[0]
567 local = replace.split("://")[1] + os.path.basename(path)
568 if local == aurl or not os.path.exists(local) or os.path.isdir(local):
569 local = ""
570 self.localpath = local
571 if not local:
572 try:
573 bb.fetch.srcrev_internal_call = True
574 self.localpath = self.method.localpath(self.url, self, d)
575 finally:
576 bb.fetch.srcrev_internal_call = False
577 # We have to clear data's internal caches since the cached value of SRCREV is now wrong.
578 # Horrible...
579 bb.data.delVar("ISHOULDNEVEREXIST", d)
580
581 if self.localpath is not None:
582 # Note: These files should always be in DL_DIR whereas localpath may not be.
583 basepath = bb.data.expand("${DL_DIR}/%s" % os.path.basename(self.localpath), d)
584 self.md5 = basepath + '.md5'
585 self.lockfile = basepath + '.lock'
586
587
588class Fetch(object):
589 """Base class for 'fetch'ing data"""
590
591 def __init__(self, urls = []):
592 self.urls = []
593
594 def supports(self, url, urldata, d):
595 """
596 Check to see if this fetch class supports a given url.
597 """
598 return 0
599
600 def localpath(self, url, urldata, d):
601 """
602 Return the local filename of a given url assuming a successful fetch.
603 Can also setup variables in urldata for use in go (saving code duplication
604 and duplicate code execution)
605 """
606 return url
607 def _strip_leading_slashes(self, relpath):
608 """
609 Remove leading slash as os.path.join can't cope
610 """
611 while os.path.isabs(relpath):
612 relpath = relpath[1:]
613 return relpath
614
615 def setUrls(self, urls):
616 self.__urls = urls
617
618 def getUrls(self):
619 return self.__urls
620
621 urls = property(getUrls, setUrls, None, "Urls property")
622
623 def forcefetch(self, url, urldata, d):
624 """
625 Force a fetch, even if localpath exists?
626 """
627 return False
628
629 def supports_srcrev(self):
630 """
631 The fetcher supports auto source revisions (SRCREV)
632 """
633 return False
634
635 def go(self, url, urldata, d):
636 """
637 Fetch urls
638 Assumes localpath was called first
639 """
640 raise NoMethodError("Missing implementation for url")
641
642 def try_premirror(self, url, urldata, d):
643 """
644 Should premirrors be used?
645 """
646 if urldata.method.forcefetch(url, urldata, d):
647 return True
648 elif os.path.exists(urldata.md5) and os.path.exists(urldata.localfile):
649 return False
650 else:
651 return True
652
653 def checkstatus(self, url, urldata, d):
654 """
655 Check the status of a URL
656 Assumes localpath was called first
657 """
658 logger.info("URL %s could not be checked for status since no method exists.", url)
659 return True
660
661 def getSRCDate(urldata, d):
662 """
663 Return the SRC Date for the component
664
665 d the bb.data module
666 """
667 if "srcdate" in urldata.parm:
668 return urldata.parm['srcdate']
669
670 pn = data.getVar("PN", d, 1)
671
672 if pn:
673 return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
674
675 return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
676 getSRCDate = staticmethod(getSRCDate)
677
678 def srcrev_internal_helper(ud, d):
679 """
680 Return:
681 a) a source revision if specified
682 b) True if auto srcrev is in action
683 c) False otherwise
684 """
685
686 if 'rev' in ud.parm:
687 return ud.parm['rev']
688
689 if 'tag' in ud.parm:
690 return ud.parm['tag']
691
692 rev = None
693 if 'name' in ud.parm:
694 pn = data.getVar("PN", d, 1)
695 rev = data.getVar("SRCREV_%s_pn-%s" % (ud.parm['name'], pn), d, 1)
696 if not rev:
697 rev = data.getVar("SRCREV_pn-%s_%s" % (pn, ud.parm['name']), d, 1)
698 if not rev:
699 rev = data.getVar("SRCREV_%s" % (ud.parm['name']), d, 1)
700 if not rev:
701 rev = data.getVar("SRCREV", d, 1)
702 if rev == "INVALID":
703 raise InvalidSRCREV("Please set SRCREV to a valid value")
704 if not rev:
705 return False
706 if rev == "SRCREVINACTION":
707 return True
708 return rev
709
710 srcrev_internal_helper = staticmethod(srcrev_internal_helper)
711
712 def localcount_internal_helper(ud, d):
713 """
714 Return:
715 a) a locked localcount if specified
716 b) None otherwise
717 """
718
719 localcount = None
720 if 'name' in ud.parm:
721 pn = data.getVar("PN", d, 1)
722 localcount = data.getVar("LOCALCOUNT_" + ud.parm['name'], d, 1)
723 if not localcount:
724 localcount = data.getVar("LOCALCOUNT", d, 1)
725 return localcount
726
727 localcount_internal_helper = staticmethod(localcount_internal_helper)
728
729 def verify_md5sum(ud, got_sum):
730 """
731 Verify the md5sum we wanted with the one we got
732 """
733 wanted_sum = ud.parm.get('md5sum')
734 if not wanted_sum:
735 return True
736
737 return wanted_sum == got_sum
738 verify_md5sum = staticmethod(verify_md5sum)
739
740 def write_md5sum(url, ud, d):
741 md5data = bb.utils.md5_file(ud.localpath)
742 # verify the md5sum
743 if not Fetch.verify_md5sum(ud, md5data):
744 raise MD5SumError(url)
745
746 md5out = file(ud.md5, 'w')
747 md5out.write(md5data)
748 md5out.close()
749 write_md5sum = staticmethod(write_md5sum)
750
751 def latest_revision(self, url, ud, d):
752 """
753 Look in the cache for the latest revision, if not present ask the SCM.
754 """
755 if not hasattr(self, "_latest_revision"):
756 raise ParameterError
757
758 revs = persist_data.persist('BB_URI_HEADREVS', d)
759 key = self.generate_revision_key(url, ud, d)
760 try:
761 return revs[key]
762 except KeyError:
763 revs[key] = rev = self._latest_revision(url, ud, d)
764 return rev
765
766 def sortable_revision(self, url, ud, d):
767 """
768
769 """
770 if hasattr(self, "_sortable_revision"):
771 return self._sortable_revision(url, ud, d)
772
773 localcounts = persist_data.persist('BB_URI_LOCALCOUNT', d)
774 key = self.generate_revision_key(url, ud, d)
775
776 latest_rev = self._build_revision(url, ud, d)
777 last_rev = localcounts.get(key + '_rev')
778 uselocalcount = d.getVar("BB_LOCALCOUNT_OVERRIDE", True) or False
779 count = None
780 if uselocalcount:
781 count = Fetch.localcount_internal_helper(ud, d)
782 if count is None:
783 count = localcounts.get(key + '_count')
784
785 if last_rev == latest_rev:
786 return str(count + "+" + latest_rev)
787
788 buildindex_provided = hasattr(self, "_sortable_buildindex")
789 if buildindex_provided:
790 count = self._sortable_buildindex(url, ud, d, latest_rev)
791
792 if count is None:
793 count = "0"
794 elif uselocalcount or buildindex_provided:
795 count = str(count)
796 else:
797 count = str(int(count) + 1)
798
799 localcounts[key + '_rev'] = latest_rev
800 localcounts[key + '_count'] = count
801
802 return str(count + "+" + latest_rev)
803
804 def generate_revision_key(self, url, ud, d):
805 key = self._revision_key(url, ud, d)
806 return "%s-%s" % (key, d.getVar("PN", True) or "")
807
808from . import cvs
809from . import git
810from . import local
811from . import svn
812from . import wget
813from . import svk
814from . import ssh
815from . import perforce
816from . import bzr
817from . import hg
818from . import osc
819from . import repo
820
821methods.append(local.Local())
822methods.append(wget.Wget())
823methods.append(svn.Svn())
824methods.append(git.Git())
825methods.append(cvs.Cvs())
826methods.append(svk.Svk())
827methods.append(ssh.SSH())
828methods.append(perforce.Perforce())
829methods.append(bzr.Bzr())
830methods.append(hg.Hg())
831methods.append(osc.Osc())
832methods.append(repo.Repo())