diff options
Diffstat (limited to 'bitbake-dev/lib/bb/fetch/__init__.py')
-rw-r--r-- | bitbake-dev/lib/bb/fetch/__init__.py | 640 |
1 files changed, 0 insertions, 640 deletions
diff --git a/bitbake-dev/lib/bb/fetch/__init__.py b/bitbake-dev/lib/bb/fetch/__init__.py deleted file mode 100644 index ab4658bc3b..0000000000 --- a/bitbake-dev/lib/bb/fetch/__init__.py +++ /dev/null | |||
@@ -1,640 +0,0 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | """ | ||
4 | BitBake 'Fetch' implementations | ||
5 | |||
6 | Classes for obtaining upstream sources for the | ||
7 | BitBake build tools. | ||
8 | """ | ||
9 | |||
10 | # Copyright (C) 2003, 2004 Chris Larson | ||
11 | # | ||
12 | # This program is free software; you can redistribute it and/or modify | ||
13 | # it under the terms of the GNU General Public License version 2 as | ||
14 | # published by the Free Software Foundation. | ||
15 | # | ||
16 | # This program is distributed in the hope that it will be useful, | ||
17 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
18 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
19 | # GNU General Public License for more details. | ||
20 | # | ||
21 | # You should have received a copy of the GNU General Public License along | ||
22 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
23 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
24 | # | ||
25 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
26 | |||
27 | import os, re | ||
28 | import bb | ||
29 | from bb import data | ||
30 | from bb import persist_data | ||
31 | |||
32 | class FetchError(Exception): | ||
33 | """Exception raised when a download fails""" | ||
34 | |||
35 | class NoMethodError(Exception): | ||
36 | """Exception raised when there is no method to obtain a supplied url or set of urls""" | ||
37 | |||
38 | class MissingParameterError(Exception): | ||
39 | """Exception raised when a fetch method is missing a critical parameter in the url""" | ||
40 | |||
41 | class ParameterError(Exception): | ||
42 | """Exception raised when a url cannot be proccessed due to invalid parameters.""" | ||
43 | |||
44 | class MD5SumError(Exception): | ||
45 | """Exception raised when a MD5SUM of a file does not match the expected one""" | ||
46 | |||
47 | class InvalidSRCREV(Exception): | ||
48 | """Exception raised when an invalid SRCREV is encountered""" | ||
49 | |||
50 | def uri_replace(uri, uri_find, uri_replace, d): | ||
51 | # bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: operating on %s" % uri) | ||
52 | if not uri or not uri_find or not uri_replace: | ||
53 | bb.msg.debug(1, bb.msg.domain.Fetcher, "uri_replace: passed an undefined value, not replacing") | ||
54 | uri_decoded = list(bb.decodeurl(uri)) | ||
55 | uri_find_decoded = list(bb.decodeurl(uri_find)) | ||
56 | uri_replace_decoded = list(bb.decodeurl(uri_replace)) | ||
57 | result_decoded = ['','','','','',{}] | ||
58 | for i in uri_find_decoded: | ||
59 | loc = uri_find_decoded.index(i) | ||
60 | result_decoded[loc] = uri_decoded[loc] | ||
61 | import types | ||
62 | if type(i) == types.StringType: | ||
63 | if (re.match(i, uri_decoded[loc])): | ||
64 | result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc]) | ||
65 | if uri_find_decoded.index(i) == 2: | ||
66 | if d: | ||
67 | localfn = bb.fetch.localpath(uri, d) | ||
68 | if localfn: | ||
69 | result_decoded[loc] = os.path.dirname(result_decoded[loc]) + "/" + os.path.basename(bb.fetch.localpath(uri, d)) | ||
70 | # bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc])) | ||
71 | else: | ||
72 | # bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: no match") | ||
73 | return uri | ||
74 | # else: | ||
75 | # for j in i.keys(): | ||
76 | # FIXME: apply replacements against options | ||
77 | return bb.encodeurl(result_decoded) | ||
78 | |||
79 | methods = [] | ||
80 | urldata_cache = {} | ||
81 | saved_headrevs = {} | ||
82 | |||
83 | def fetcher_init(d): | ||
84 | """ | ||
85 | Called to initilize the fetchers once the configuration data is known | ||
86 | Calls before this must not hit the cache. | ||
87 | """ | ||
88 | pd = persist_data.PersistData(d) | ||
89 | # When to drop SCM head revisions controled by user policy | ||
90 | srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear" | ||
91 | if srcrev_policy == "cache": | ||
92 | bb.msg.debug(1, bb.msg.domain.Fetcher, "Keeping SRCREV cache due to cache policy of: %s" % srcrev_policy) | ||
93 | elif srcrev_policy == "clear": | ||
94 | bb.msg.debug(1, bb.msg.domain.Fetcher, "Clearing SRCREV cache due to cache policy of: %s" % srcrev_policy) | ||
95 | try: | ||
96 | bb.fetch.saved_headrevs = pd.getKeyValues("BB_URI_HEADREVS") | ||
97 | except: | ||
98 | pass | ||
99 | pd.delDomain("BB_URI_HEADREVS") | ||
100 | else: | ||
101 | bb.msg.fatal(bb.msg.domain.Fetcher, "Invalid SRCREV cache policy of: %s" % srcrev_policy) | ||
102 | |||
103 | for m in methods: | ||
104 | if hasattr(m, "init"): | ||
105 | m.init(d) | ||
106 | |||
107 | # Make sure our domains exist | ||
108 | pd.addDomain("BB_URI_HEADREVS") | ||
109 | pd.addDomain("BB_URI_LOCALCOUNT") | ||
110 | |||
111 | def fetcher_compare_revisons(d): | ||
112 | """ | ||
113 | Compare the revisions in the persistant cache with current values and | ||
114 | return true/false on whether they've changed. | ||
115 | """ | ||
116 | |||
117 | pd = persist_data.PersistData(d) | ||
118 | data = pd.getKeyValues("BB_URI_HEADREVS") | ||
119 | data2 = bb.fetch.saved_headrevs | ||
120 | |||
121 | changed = False | ||
122 | for key in data: | ||
123 | if key not in data2 or data2[key] != data[key]: | ||
124 | bb.msg.debug(1, bb.msg.domain.Fetcher, "%s changed" % key) | ||
125 | changed = True | ||
126 | return True | ||
127 | else: | ||
128 | bb.msg.debug(2, bb.msg.domain.Fetcher, "%s did not change" % key) | ||
129 | return False | ||
130 | |||
131 | # Function call order is usually: | ||
132 | # 1. init | ||
133 | # 2. go | ||
134 | # 3. localpaths | ||
135 | # localpath can be called at any time | ||
136 | |||
137 | def init(urls, d, setup = True): | ||
138 | urldata = {} | ||
139 | fn = bb.data.getVar('FILE', d, 1) | ||
140 | if fn in urldata_cache: | ||
141 | urldata = urldata_cache[fn] | ||
142 | |||
143 | for url in urls: | ||
144 | if url not in urldata: | ||
145 | urldata[url] = FetchData(url, d) | ||
146 | |||
147 | if setup: | ||
148 | for url in urldata: | ||
149 | if not urldata[url].setup: | ||
150 | urldata[url].setup_localpath(d) | ||
151 | |||
152 | urldata_cache[fn] = urldata | ||
153 | return urldata | ||
154 | |||
155 | def go(d, urls = None): | ||
156 | """ | ||
157 | Fetch all urls | ||
158 | init must have previously been called | ||
159 | """ | ||
160 | if not urls: | ||
161 | urls = d.getVar("SRC_URI", 1).split() | ||
162 | urldata = init(urls, d, True) | ||
163 | |||
164 | for u in urls: | ||
165 | ud = urldata[u] | ||
166 | m = ud.method | ||
167 | if ud.localfile: | ||
168 | if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5): | ||
169 | # File already present along with md5 stamp file | ||
170 | # Touch md5 file to show activity | ||
171 | try: | ||
172 | os.utime(ud.md5, None) | ||
173 | except: | ||
174 | # Errors aren't fatal here | ||
175 | pass | ||
176 | continue | ||
177 | lf = bb.utils.lockfile(ud.lockfile) | ||
178 | if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5): | ||
179 | # If someone else fetched this before we got the lock, | ||
180 | # notice and don't try again | ||
181 | try: | ||
182 | os.utime(ud.md5, None) | ||
183 | except: | ||
184 | # Errors aren't fatal here | ||
185 | pass | ||
186 | bb.utils.unlockfile(lf) | ||
187 | continue | ||
188 | m.go(u, ud, d) | ||
189 | if ud.localfile: | ||
190 | if not m.forcefetch(u, ud, d): | ||
191 | Fetch.write_md5sum(u, ud, d) | ||
192 | bb.utils.unlockfile(lf) | ||
193 | |||
194 | |||
195 | def checkstatus(d): | ||
196 | """ | ||
197 | Check all urls exist upstream | ||
198 | init must have previously been called | ||
199 | """ | ||
200 | urldata = init([], d, True) | ||
201 | |||
202 | for u in urldata: | ||
203 | ud = urldata[u] | ||
204 | m = ud.method | ||
205 | bb.msg.note(1, bb.msg.domain.Fetcher, "Testing URL %s" % u) | ||
206 | ret = m.checkstatus(u, ud, d) | ||
207 | if not ret: | ||
208 | bb.msg.fatal(bb.msg.domain.Fetcher, "URL %s doesn't work" % u) | ||
209 | |||
210 | def localpaths(d): | ||
211 | """ | ||
212 | Return a list of the local filenames, assuming successful fetch | ||
213 | """ | ||
214 | local = [] | ||
215 | urldata = init([], d, True) | ||
216 | |||
217 | for u in urldata: | ||
218 | ud = urldata[u] | ||
219 | local.append(ud.localpath) | ||
220 | |||
221 | return local | ||
222 | |||
223 | srcrev_internal_call = False | ||
224 | |||
225 | def get_srcrev(d): | ||
226 | """ | ||
227 | Return the version string for the current package | ||
228 | (usually to be used as PV) | ||
229 | Most packages usually only have one SCM so we just pass on the call. | ||
230 | In the multi SCM case, we build a value based on SRCREV_FORMAT which must | ||
231 | have been set. | ||
232 | """ | ||
233 | |||
234 | # | ||
235 | # Ugly code alert. localpath in the fetchers will try to evaluate SRCREV which | ||
236 | # could translate into a call to here. If it does, we need to catch this | ||
237 | # and provide some way so it knows get_srcrev is active instead of being | ||
238 | # some number etc. hence the srcrev_internal_call tracking and the magic | ||
239 | # "SRCREVINACTION" return value. | ||
240 | # | ||
241 | # Neater solutions welcome! | ||
242 | # | ||
243 | if bb.fetch.srcrev_internal_call: | ||
244 | return "SRCREVINACTION" | ||
245 | |||
246 | scms = [] | ||
247 | |||
248 | # Only call setup_localpath on URIs which suppports_srcrev() | ||
249 | urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False) | ||
250 | for u in urldata: | ||
251 | ud = urldata[u] | ||
252 | if ud.method.suppports_srcrev(): | ||
253 | if not ud.setup: | ||
254 | ud.setup_localpath(d) | ||
255 | scms.append(u) | ||
256 | |||
257 | if len(scms) == 0: | ||
258 | bb.msg.error(bb.msg.domain.Fetcher, "SRCREV was used yet no valid SCM was found in SRC_URI") | ||
259 | raise ParameterError | ||
260 | |||
261 | bb.data.setVar('__BB_DONT_CACHE','1', d) | ||
262 | |||
263 | if len(scms) == 1: | ||
264 | return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d) | ||
265 | |||
266 | # | ||
267 | # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT | ||
268 | # | ||
269 | format = bb.data.getVar('SRCREV_FORMAT', d, 1) | ||
270 | if not format: | ||
271 | bb.msg.error(bb.msg.domain.Fetcher, "The SRCREV_FORMAT variable must be set when multiple SCMs are used.") | ||
272 | raise ParameterError | ||
273 | |||
274 | for scm in scms: | ||
275 | if 'name' in urldata[scm].parm: | ||
276 | name = urldata[scm].parm["name"] | ||
277 | rev = urldata[scm].method.sortable_revision(scm, urldata[scm], d) | ||
278 | format = format.replace(name, rev) | ||
279 | |||
280 | return format | ||
281 | |||
282 | def localpath(url, d, cache = True): | ||
283 | """ | ||
284 | Called from the parser with cache=False since the cache isn't ready | ||
285 | at this point. Also called from classed in OE e.g. patch.bbclass | ||
286 | """ | ||
287 | ud = init([url], d) | ||
288 | if ud[url].method: | ||
289 | return ud[url].localpath | ||
290 | return url | ||
291 | |||
292 | def runfetchcmd(cmd, d, quiet = False): | ||
293 | """ | ||
294 | Run cmd returning the command output | ||
295 | Raise an error if interrupted or cmd fails | ||
296 | Optionally echo command output to stdout | ||
297 | """ | ||
298 | |||
299 | # Need to export PATH as binary could be in metadata paths | ||
300 | # rather than host provided | ||
301 | # Also include some other variables. | ||
302 | # FIXME: Should really include all export varaiables? | ||
303 | exportvars = ['PATH', 'GIT_PROXY_COMMAND', 'GIT_PROXY_HOST', 'GIT_PROXY_PORT', 'GIT_CONFIG', 'http_proxy', 'ftp_proxy', 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', 'HOME'] | ||
304 | |||
305 | for var in exportvars: | ||
306 | val = data.getVar(var, d, True) | ||
307 | if val: | ||
308 | cmd = 'export ' + var + '=%s; %s' % (val, cmd) | ||
309 | |||
310 | bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd) | ||
311 | |||
312 | # redirect stderr to stdout | ||
313 | stdout_handle = os.popen(cmd + " 2>&1", "r") | ||
314 | output = "" | ||
315 | |||
316 | while 1: | ||
317 | line = stdout_handle.readline() | ||
318 | if not line: | ||
319 | break | ||
320 | if not quiet: | ||
321 | print line, | ||
322 | output += line | ||
323 | |||
324 | status = stdout_handle.close() or 0 | ||
325 | signal = status >> 8 | ||
326 | exitstatus = status & 0xff | ||
327 | |||
328 | if signal: | ||
329 | raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output)) | ||
330 | elif status != 0: | ||
331 | raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output)) | ||
332 | |||
333 | return output | ||
334 | |||
335 | class FetchData(object): | ||
336 | """ | ||
337 | A class which represents the fetcher state for a given URI. | ||
338 | """ | ||
339 | def __init__(self, url, d): | ||
340 | self.localfile = "" | ||
341 | (self.type, self.host, self.path, self.user, self.pswd, self.parm) = bb.decodeurl(data.expand(url, d)) | ||
342 | self.date = Fetch.getSRCDate(self, d) | ||
343 | self.url = url | ||
344 | if not self.user and "user" in self.parm: | ||
345 | self.user = self.parm["user"] | ||
346 | if not self.pswd and "pswd" in self.parm: | ||
347 | self.pswd = self.parm["pswd"] | ||
348 | self.setup = False | ||
349 | for m in methods: | ||
350 | if m.supports(url, self, d): | ||
351 | self.method = m | ||
352 | return | ||
353 | raise NoMethodError("Missing implementation for url %s" % url) | ||
354 | |||
355 | def setup_localpath(self, d): | ||
356 | self.setup = True | ||
357 | if "localpath" in self.parm: | ||
358 | # if user sets localpath for file, use it instead. | ||
359 | self.localpath = self.parm["localpath"] | ||
360 | else: | ||
361 | try: | ||
362 | bb.fetch.srcrev_internal_call = True | ||
363 | self.localpath = self.method.localpath(self.url, self, d) | ||
364 | finally: | ||
365 | bb.fetch.srcrev_internal_call = False | ||
366 | # We have to clear data's internal caches since the cached value of SRCREV is now wrong. | ||
367 | # Horrible... | ||
368 | bb.data.delVar("ISHOULDNEVEREXIST", d) | ||
369 | self.md5 = self.localpath + '.md5' | ||
370 | self.lockfile = self.localpath + '.lock' | ||
371 | |||
372 | |||
373 | class Fetch(object): | ||
374 | """Base class for 'fetch'ing data""" | ||
375 | |||
376 | def __init__(self, urls = []): | ||
377 | self.urls = [] | ||
378 | |||
379 | def supports(self, url, urldata, d): | ||
380 | """ | ||
381 | Check to see if this fetch class supports a given url. | ||
382 | """ | ||
383 | return 0 | ||
384 | |||
385 | def localpath(self, url, urldata, d): | ||
386 | """ | ||
387 | Return the local filename of a given url assuming a successful fetch. | ||
388 | Can also setup variables in urldata for use in go (saving code duplication | ||
389 | and duplicate code execution) | ||
390 | """ | ||
391 | return url | ||
392 | |||
393 | def setUrls(self, urls): | ||
394 | self.__urls = urls | ||
395 | |||
396 | def getUrls(self): | ||
397 | return self.__urls | ||
398 | |||
399 | urls = property(getUrls, setUrls, None, "Urls property") | ||
400 | |||
401 | def forcefetch(self, url, urldata, d): | ||
402 | """ | ||
403 | Force a fetch, even if localpath exists? | ||
404 | """ | ||
405 | return False | ||
406 | |||
407 | def suppports_srcrev(self): | ||
408 | """ | ||
409 | The fetcher supports auto source revisions (SRCREV) | ||
410 | """ | ||
411 | return False | ||
412 | |||
413 | def go(self, url, urldata, d): | ||
414 | """ | ||
415 | Fetch urls | ||
416 | Assumes localpath was called first | ||
417 | """ | ||
418 | raise NoMethodError("Missing implementation for url") | ||
419 | |||
420 | def checkstatus(self, url, urldata, d): | ||
421 | """ | ||
422 | Check the status of a URL | ||
423 | Assumes localpath was called first | ||
424 | """ | ||
425 | bb.msg.note(1, bb.msg.domain.Fetcher, "URL %s could not be checked for status since no method exists." % url) | ||
426 | return True | ||
427 | |||
428 | def getSRCDate(urldata, d): | ||
429 | """ | ||
430 | Return the SRC Date for the component | ||
431 | |||
432 | d the bb.data module | ||
433 | """ | ||
434 | if "srcdate" in urldata.parm: | ||
435 | return urldata.parm['srcdate'] | ||
436 | |||
437 | pn = data.getVar("PN", d, 1) | ||
438 | |||
439 | if pn: | ||
440 | return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) | ||
441 | |||
442 | return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) | ||
443 | getSRCDate = staticmethod(getSRCDate) | ||
444 | |||
445 | def srcrev_internal_helper(ud, d): | ||
446 | """ | ||
447 | Return: | ||
448 | a) a source revision if specified | ||
449 | b) True if auto srcrev is in action | ||
450 | c) False otherwise | ||
451 | """ | ||
452 | |||
453 | if 'rev' in ud.parm: | ||
454 | return ud.parm['rev'] | ||
455 | |||
456 | if 'tag' in ud.parm: | ||
457 | return ud.parm['tag'] | ||
458 | |||
459 | rev = None | ||
460 | if 'name' in ud.parm: | ||
461 | pn = data.getVar("PN", d, 1) | ||
462 | rev = data.getVar("SRCREV_pn-" + pn + "_" + ud.parm['name'], d, 1) | ||
463 | if not rev: | ||
464 | rev = data.getVar("SRCREV", d, 1) | ||
465 | if rev == "INVALID": | ||
466 | raise InvalidSRCREV("Please set SRCREV to a valid value") | ||
467 | if not rev: | ||
468 | return False | ||
469 | if rev is "SRCREVINACTION": | ||
470 | return True | ||
471 | return rev | ||
472 | |||
473 | srcrev_internal_helper = staticmethod(srcrev_internal_helper) | ||
474 | |||
475 | def localcount_internal_helper(ud, d): | ||
476 | """ | ||
477 | Return: | ||
478 | a) a locked localcount if specified | ||
479 | b) None otherwise | ||
480 | """ | ||
481 | |||
482 | localcount= None | ||
483 | if 'name' in ud.parm: | ||
484 | pn = data.getVar("PN", d, 1) | ||
485 | localcount = data.getVar("LOCALCOUNT_" + ud.parm['name'], d, 1) | ||
486 | if not localcount: | ||
487 | localcount = data.getVar("LOCALCOUNT", d, 1) | ||
488 | return localcount | ||
489 | |||
490 | localcount_internal_helper = staticmethod(localcount_internal_helper) | ||
491 | |||
492 | def try_mirror(d, tarfn): | ||
493 | """ | ||
494 | Try to use a mirrored version of the sources. We do this | ||
495 | to avoid massive loads on foreign cvs and svn servers. | ||
496 | This method will be used by the different fetcher | ||
497 | implementations. | ||
498 | |||
499 | d Is a bb.data instance | ||
500 | tarfn is the name of the tarball | ||
501 | """ | ||
502 | tarpath = os.path.join(data.getVar("DL_DIR", d, 1), tarfn) | ||
503 | if os.access(tarpath, os.R_OK): | ||
504 | bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists, skipping checkout." % tarfn) | ||
505 | return True | ||
506 | |||
507 | pn = data.getVar('PN', d, True) | ||
508 | src_tarball_stash = None | ||
509 | if pn: | ||
510 | src_tarball_stash = (data.getVar('SRC_TARBALL_STASH_%s' % pn, d, True) or data.getVar('CVS_TARBALL_STASH_%s' % pn, d, True) or data.getVar('SRC_TARBALL_STASH', d, True) or data.getVar('CVS_TARBALL_STASH', d, True) or "").split() | ||
511 | |||
512 | ld = d.createCopy() | ||
513 | for stash in src_tarball_stash: | ||
514 | url = stash + tarfn | ||
515 | try: | ||
516 | ud = FetchData(url, ld) | ||
517 | except bb.fetch.NoMethodError: | ||
518 | bb.msg.debug(1, bb.msg.domain.Fetcher, "No method for %s" % url) | ||
519 | continue | ||
520 | |||
521 | ud.setup_localpath(ld) | ||
522 | |||
523 | try: | ||
524 | ud.method.go(url, ud, ld) | ||
525 | return True | ||
526 | except (bb.fetch.MissingParameterError, | ||
527 | bb.fetch.FetchError, | ||
528 | bb.fetch.MD5SumError): | ||
529 | import sys | ||
530 | (type, value, traceback) = sys.exc_info() | ||
531 | bb.msg.debug(2, bb.msg.domain.Fetcher, "Tarball stash fetch failure: %s" % value) | ||
532 | return False | ||
533 | try_mirror = staticmethod(try_mirror) | ||
534 | |||
535 | def verify_md5sum(ud, got_sum): | ||
536 | """ | ||
537 | Verify the md5sum we wanted with the one we got | ||
538 | """ | ||
539 | wanted_sum = None | ||
540 | if 'md5sum' in ud.parm: | ||
541 | wanted_sum = ud.parm['md5sum'] | ||
542 | if not wanted_sum: | ||
543 | return True | ||
544 | |||
545 | return wanted_sum == got_sum | ||
546 | verify_md5sum = staticmethod(verify_md5sum) | ||
547 | |||
548 | def write_md5sum(url, ud, d): | ||
549 | md5data = bb.utils.md5_file(ud.localpath) | ||
550 | # verify the md5sum | ||
551 | if not Fetch.verify_md5sum(ud, md5data): | ||
552 | raise MD5SumError(url) | ||
553 | |||
554 | md5out = file(ud.md5, 'w') | ||
555 | md5out.write(md5data) | ||
556 | md5out.close() | ||
557 | write_md5sum = staticmethod(write_md5sum) | ||
558 | |||
559 | def latest_revision(self, url, ud, d): | ||
560 | """ | ||
561 | Look in the cache for the latest revision, if not present ask the SCM. | ||
562 | """ | ||
563 | if not hasattr(self, "_latest_revision"): | ||
564 | raise ParameterError | ||
565 | |||
566 | pd = persist_data.PersistData(d) | ||
567 | key = self.generate_revision_key(url, ud, d) | ||
568 | rev = pd.getValue("BB_URI_HEADREVS", key) | ||
569 | if rev != None: | ||
570 | return str(rev) | ||
571 | |||
572 | rev = self._latest_revision(url, ud, d) | ||
573 | pd.setValue("BB_URI_HEADREVS", key, rev) | ||
574 | return rev | ||
575 | |||
576 | def sortable_revision(self, url, ud, d): | ||
577 | """ | ||
578 | |||
579 | """ | ||
580 | if hasattr(self, "_sortable_revision"): | ||
581 | return self._sortable_revision(url, ud, d) | ||
582 | |||
583 | pd = persist_data.PersistData(d) | ||
584 | key = self.generate_revision_key(url, ud, d) | ||
585 | |||
586 | latest_rev = self._build_revision(url, ud, d) | ||
587 | last_rev = pd.getValue("BB_URI_LOCALCOUNT", key + "_rev") | ||
588 | uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False | ||
589 | count = None | ||
590 | if uselocalcount: | ||
591 | count = Fetch.localcount_internal_helper(ud, d) | ||
592 | if count is None: | ||
593 | count = pd.getValue("BB_URI_LOCALCOUNT", key + "_count") | ||
594 | |||
595 | if last_rev == latest_rev: | ||
596 | return str(count + "+" + latest_rev) | ||
597 | |||
598 | buildindex_provided = hasattr(self, "_sortable_buildindex") | ||
599 | if buildindex_provided: | ||
600 | count = self._sortable_buildindex(url, ud, d, latest_rev) | ||
601 | |||
602 | if count is None: | ||
603 | count = "0" | ||
604 | elif uselocalcount or buildindex_provided: | ||
605 | count = str(count) | ||
606 | else: | ||
607 | count = str(int(count) + 1) | ||
608 | |||
609 | pd.setValue("BB_URI_LOCALCOUNT", key + "_rev", latest_rev) | ||
610 | pd.setValue("BB_URI_LOCALCOUNT", key + "_count", count) | ||
611 | |||
612 | return str(count + "+" + latest_rev) | ||
613 | |||
614 | def generate_revision_key(self, url, ud, d): | ||
615 | key = self._revision_key(url, ud, d) | ||
616 | return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "") | ||
617 | |||
618 | import cvs | ||
619 | import git | ||
620 | import local | ||
621 | import svn | ||
622 | import wget | ||
623 | import svk | ||
624 | import ssh | ||
625 | import perforce | ||
626 | import bzr | ||
627 | import hg | ||
628 | import osc | ||
629 | |||
630 | methods.append(local.Local()) | ||
631 | methods.append(wget.Wget()) | ||
632 | methods.append(svn.Svn()) | ||
633 | methods.append(git.Git()) | ||
634 | methods.append(cvs.Cvs()) | ||
635 | methods.append(svk.Svk()) | ||
636 | methods.append(ssh.SSH()) | ||
637 | methods.append(perforce.Perforce()) | ||
638 | methods.append(bzr.Bzr()) | ||
639 | methods.append(hg.Hg()) | ||
640 | methods.append(osc.Osc()) | ||