summaryrefslogtreecommitdiffstats
path: root/bitbake-dev/lib/bb/fetch/__init__.py
diff options
context:
space:
mode:
authorRichard Purdie <richard@openedhand.com>2008-09-30 15:08:33 +0000
committerRichard Purdie <richard@openedhand.com>2008-09-30 15:08:33 +0000
commitc30eddb243e7e65f67f656e62848a033cf6f2e5c (patch)
tree110dd95788b76f55d31cb8d30aac2de8400b6f4a /bitbake-dev/lib/bb/fetch/__init__.py
parent5ef0510474004eeb2ae8a99b64e2febb1920e077 (diff)
downloadpoky-c30eddb243e7e65f67f656e62848a033cf6f2e5c.tar.gz
Add bitbake-dev to allow ease of testing and development of bitbake trunk
git-svn-id: https://svn.o-hand.com/repos/poky/trunk@5337 311d38ba-8fff-0310-9ca6-ca027cbcb966
Diffstat (limited to 'bitbake-dev/lib/bb/fetch/__init__.py')
-rw-r--r--bitbake-dev/lib/bb/fetch/__init__.py556
1 files changed, 556 insertions, 0 deletions
diff --git a/bitbake-dev/lib/bb/fetch/__init__.py b/bitbake-dev/lib/bb/fetch/__init__.py
new file mode 100644
index 0000000000..c3bea447c1
--- /dev/null
+++ b/bitbake-dev/lib/bb/fetch/__init__.py
@@ -0,0 +1,556 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8"""
9
10# Copyright (C) 2003, 2004 Chris Larson
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24#
25# Based on functions from the base bb module, Copyright 2003 Holger Schurig
26
27import os, re, fcntl
28import bb
29from bb import data
30from bb import persist_data
31
32try:
33 import cPickle as pickle
34except ImportError:
35 import pickle
36
37class FetchError(Exception):
38 """Exception raised when a download fails"""
39
40class NoMethodError(Exception):
41 """Exception raised when there is no method to obtain a supplied url or set of urls"""
42
43class MissingParameterError(Exception):
44 """Exception raised when a fetch method is missing a critical parameter in the url"""
45
46class ParameterError(Exception):
47 """Exception raised when a url cannot be proccessed due to invalid parameters."""
48
49class MD5SumError(Exception):
50 """Exception raised when a MD5SUM of a file does not match the expected one"""
51
52def uri_replace(uri, uri_find, uri_replace, d):
53# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: operating on %s" % uri)
54 if not uri or not uri_find or not uri_replace:
55 bb.msg.debug(1, bb.msg.domain.Fetcher, "uri_replace: passed an undefined value, not replacing")
56 uri_decoded = list(bb.decodeurl(uri))
57 uri_find_decoded = list(bb.decodeurl(uri_find))
58 uri_replace_decoded = list(bb.decodeurl(uri_replace))
59 result_decoded = ['','','','','',{}]
60 for i in uri_find_decoded:
61 loc = uri_find_decoded.index(i)
62 result_decoded[loc] = uri_decoded[loc]
63 import types
64 if type(i) == types.StringType:
65 import re
66 if (re.match(i, uri_decoded[loc])):
67 result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc])
68 if uri_find_decoded.index(i) == 2:
69 if d:
70 localfn = bb.fetch.localpath(uri, d)
71 if localfn:
72 result_decoded[loc] = os.path.dirname(result_decoded[loc]) + "/" + os.path.basename(bb.fetch.localpath(uri, d))
73# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc]))
74 else:
75# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: no match")
76 return uri
77# else:
78# for j in i.keys():
79# FIXME: apply replacements against options
80 return bb.encodeurl(result_decoded)
81
82methods = []
83urldata_cache = {}
84
85def fetcher_init(d):
86 """
87 Called to initilize the fetchers once the configuration data is known
88 Calls before this must not hit the cache.
89 """
90 pd = persist_data.PersistData(d)
91 # When to drop SCM head revisions controled by user policy
92 srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
93 if srcrev_policy == "cache":
94 bb.msg.debug(1, bb.msg.domain.Fetcher, "Keeping SRCREV cache due to cache policy of: %s" % srcrev_policy)
95 elif srcrev_policy == "clear":
96 bb.msg.debug(1, bb.msg.domain.Fetcher, "Clearing SRCREV cache due to cache policy of: %s" % srcrev_policy)
97 pd.delDomain("BB_URI_HEADREVS")
98 else:
99 bb.msg.fatal(bb.msg.domain.Fetcher, "Invalid SRCREV cache policy of: %s" % srcrev_policy)
100 # Make sure our domains exist
101 pd.addDomain("BB_URI_HEADREVS")
102 pd.addDomain("BB_URI_LOCALCOUNT")
103
104# Function call order is usually:
105# 1. init
106# 2. go
107# 3. localpaths
108# localpath can be called at any time
109
110def init(urls, d, setup = True):
111 urldata = {}
112 fn = bb.data.getVar('FILE', d, 1)
113 if fn in urldata_cache:
114 urldata = urldata_cache[fn]
115
116 for url in urls:
117 if url not in urldata:
118 urldata[url] = FetchData(url, d)
119
120 if setup:
121 for url in urldata:
122 if not urldata[url].setup:
123 urldata[url].setup_localpath(d)
124
125 urldata_cache[fn] = urldata
126 return urldata
127
128def go(d):
129 """
130 Fetch all urls
131 init must have previously been called
132 """
133 urldata = init([], d, True)
134
135 for u in urldata:
136 ud = urldata[u]
137 m = ud.method
138 if ud.localfile:
139 if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5):
140 # File already present along with md5 stamp file
141 # Touch md5 file to show activity
142 try:
143 os.utime(ud.md5, None)
144 except:
145 # Errors aren't fatal here
146 pass
147 continue
148 lf = bb.utils.lockfile(ud.lockfile)
149 if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5):
150 # If someone else fetched this before we got the lock,
151 # notice and don't try again
152 try:
153 os.utime(ud.md5, None)
154 except:
155 # Errors aren't fatal here
156 pass
157 bb.utils.unlockfile(lf)
158 continue
159 m.go(u, ud, d)
160 if ud.localfile:
161 if not m.forcefetch(u, ud, d):
162 Fetch.write_md5sum(u, ud, d)
163 bb.utils.unlockfile(lf)
164
165
166def checkstatus(d):
167 """
168 Check all urls exist upstream
169 init must have previously been called
170 """
171 urldata = init([], d, True)
172
173 for u in urldata:
174 ud = urldata[u]
175 m = ud.method
176 bb.msg.note(1, bb.msg.domain.Fetcher, "Testing URL %s" % u)
177 ret = m.checkstatus(u, ud, d)
178 if not ret:
179 bb.msg.fatal(bb.msg.domain.Fetcher, "URL %s doesn't work" % u)
180
181def localpaths(d):
182 """
183 Return a list of the local filenames, assuming successful fetch
184 """
185 local = []
186 urldata = init([], d, True)
187
188 for u in urldata:
189 ud = urldata[u]
190 local.append(ud.localpath)
191
192 return local
193
194srcrev_internal_call = False
195
196def get_srcrev(d):
197 """
198 Return the version string for the current package
199 (usually to be used as PV)
200 Most packages usually only have one SCM so we just pass on the call.
201 In the multi SCM case, we build a value based on SRCREV_FORMAT which must
202 have been set.
203 """
204
205 #
206 # Ugly code alert. localpath in the fetchers will try to evaluate SRCREV which
207 # could translate into a call to here. If it does, we need to catch this
208 # and provide some way so it knows get_srcrev is active instead of being
209 # some number etc. hence the srcrev_internal_call tracking and the magic
210 # "SRCREVINACTION" return value.
211 #
212 # Neater solutions welcome!
213 #
214 if bb.fetch.srcrev_internal_call:
215 return "SRCREVINACTION"
216
217 scms = []
218
219 # Only call setup_localpath on URIs which suppports_srcrev()
220 urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False)
221 for u in urldata:
222 ud = urldata[u]
223 if ud.method.suppports_srcrev():
224 if not ud.setup:
225 ud.setup_localpath(d)
226 scms.append(u)
227
228 if len(scms) == 0:
229 bb.msg.error(bb.msg.domain.Fetcher, "SRCREV was used yet no valid SCM was found in SRC_URI")
230 raise ParameterError
231
232 if len(scms) == 1:
233 return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d)
234
235 #
236 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
237 #
238 format = bb.data.getVar('SRCREV_FORMAT', d, 1)
239 if not format:
240 bb.msg.error(bb.msg.domain.Fetcher, "The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
241 raise ParameterError
242
243 for scm in scms:
244 if 'name' in urldata[scm].parm:
245 name = urldata[scm].parm["name"]
246 rev = urldata[scm].method.sortable_revision(scm, urldata[scm], d)
247 format = format.replace(name, rev)
248
249 return format
250
251def localpath(url, d, cache = True):
252 """
253 Called from the parser with cache=False since the cache isn't ready
254 at this point. Also called from classed in OE e.g. patch.bbclass
255 """
256 ud = init([url], d)
257 if ud[url].method:
258 return ud[url].localpath
259 return url
260
261def runfetchcmd(cmd, d, quiet = False):
262 """
263 Run cmd returning the command output
264 Raise an error if interrupted or cmd fails
265 Optionally echo command output to stdout
266 """
267
268 # Need to export PATH as binary could be in metadata paths
269 # rather than host provided
270 # Also include some other variables.
271 # FIXME: Should really include all export varaiables?
272 exportvars = ['PATH', 'GIT_PROXY_HOST', 'GIT_PROXY_PORT', 'GIT_PROXY_COMMAND']
273
274 for var in exportvars:
275 val = data.getVar(var, d, True)
276 if val:
277 cmd = 'export ' + var + '=%s; %s' % (val, cmd)
278
279 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd)
280
281 # redirect stderr to stdout
282 stdout_handle = os.popen(cmd + " 2>&1", "r")
283 output = ""
284
285 while 1:
286 line = stdout_handle.readline()
287 if not line:
288 break
289 if not quiet:
290 print line,
291 output += line
292
293 status = stdout_handle.close() or 0
294 signal = status >> 8
295 exitstatus = status & 0xff
296
297 if signal:
298 raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output))
299 elif status != 0:
300 raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output))
301
302 return output
303
304class FetchData(object):
305 """
306 A class which represents the fetcher state for a given URI.
307 """
308 def __init__(self, url, d):
309 self.localfile = ""
310 (self.type, self.host, self.path, self.user, self.pswd, self.parm) = bb.decodeurl(data.expand(url, d))
311 self.date = Fetch.getSRCDate(self, d)
312 self.url = url
313 self.setup = False
314 for m in methods:
315 if m.supports(url, self, d):
316 self.method = m
317 return
318 raise NoMethodError("Missing implementation for url %s" % url)
319
320 def setup_localpath(self, d):
321 self.setup = True
322 if "localpath" in self.parm:
323 # if user sets localpath for file, use it instead.
324 self.localpath = self.parm["localpath"]
325 else:
326 bb.fetch.srcrev_internal_call = True
327 self.localpath = self.method.localpath(self.url, self, d)
328 bb.fetch.srcrev_internal_call = False
329 # We have to clear data's internal caches since the cached value of SRCREV is now wrong.
330 # Horrible...
331 bb.data.delVar("ISHOULDNEVEREXIST", d)
332 self.md5 = self.localpath + '.md5'
333 self.lockfile = self.localpath + '.lock'
334
335
336class Fetch(object):
337 """Base class for 'fetch'ing data"""
338
339 def __init__(self, urls = []):
340 self.urls = []
341
342 def supports(self, url, urldata, d):
343 """
344 Check to see if this fetch class supports a given url.
345 """
346 return 0
347
348 def localpath(self, url, urldata, d):
349 """
350 Return the local filename of a given url assuming a successful fetch.
351 Can also setup variables in urldata for use in go (saving code duplication
352 and duplicate code execution)
353 """
354 return url
355
356 def setUrls(self, urls):
357 self.__urls = urls
358
359 def getUrls(self):
360 return self.__urls
361
362 urls = property(getUrls, setUrls, None, "Urls property")
363
364 def forcefetch(self, url, urldata, d):
365 """
366 Force a fetch, even if localpath exists?
367 """
368 return False
369
370 def suppports_srcrev(self):
371 """
372 The fetcher supports auto source revisions (SRCREV)
373 """
374 return False
375
376 def go(self, url, urldata, d):
377 """
378 Fetch urls
379 Assumes localpath was called first
380 """
381 raise NoMethodError("Missing implementation for url")
382
383 def checkstatus(self, url, urldata, d):
384 """
385 Check the status of a URL
386 Assumes localpath was called first
387 """
388 bb.msg.note(1, bb.msg.domain.Fetcher, "URL %s could not be checked for status since no method exists." % url)
389 return True
390
391 def getSRCDate(urldata, d):
392 """
393 Return the SRC Date for the component
394
395 d the bb.data module
396 """
397 if "srcdate" in urldata.parm:
398 return urldata.parm['srcdate']
399
400 pn = data.getVar("PN", d, 1)
401
402 if pn:
403 return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
404
405 return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
406 getSRCDate = staticmethod(getSRCDate)
407
408 def srcrev_internal_helper(ud, d):
409 """
410 Return:
411 a) a source revision if specified
412 b) True if auto srcrev is in action
413 c) False otherwise
414 """
415
416 if 'rev' in ud.parm:
417 return ud.parm['rev']
418
419 if 'tag' in ud.parm:
420 return ud.parm['tag']
421
422 rev = None
423 if 'name' in ud.parm:
424 pn = data.getVar("PN", d, 1)
425 rev = data.getVar("SRCREV_pn-" + pn + "_" + ud.parm['name'], d, 1)
426 if not rev:
427 rev = data.getVar("SRCREV", d, 1)
428 if not rev:
429 return False
430 if rev is "SRCREVINACTION":
431 return True
432 return rev
433
434 srcrev_internal_helper = staticmethod(srcrev_internal_helper)
435
436 def try_mirror(d, tarfn):
437 """
438 Try to use a mirrored version of the sources. We do this
439 to avoid massive loads on foreign cvs and svn servers.
440 This method will be used by the different fetcher
441 implementations.
442
443 d Is a bb.data instance
444 tarfn is the name of the tarball
445 """
446 tarpath = os.path.join(data.getVar("DL_DIR", d, 1), tarfn)
447 if os.access(tarpath, os.R_OK):
448 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists, skipping checkout." % tarfn)
449 return True
450
451 pn = data.getVar('PN', d, True)
452 src_tarball_stash = None
453 if pn:
454 src_tarball_stash = (data.getVar('SRC_TARBALL_STASH_%s' % pn, d, True) or data.getVar('CVS_TARBALL_STASH_%s' % pn, d, True) or data.getVar('SRC_TARBALL_STASH', d, True) or data.getVar('CVS_TARBALL_STASH', d, True) or "").split()
455
456 for stash in src_tarball_stash:
457 fetchcmd = data.getVar("FETCHCOMMAND_mirror", d, True) or data.getVar("FETCHCOMMAND_wget", d, True)
458 uri = stash + tarfn
459 bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri)
460 fetchcmd = fetchcmd.replace("${URI}", uri)
461 ret = os.system(fetchcmd)
462 if ret == 0:
463 bb.msg.note(1, bb.msg.domain.Fetcher, "Fetched %s from tarball stash, skipping checkout" % tarfn)
464 return True
465 return False
466 try_mirror = staticmethod(try_mirror)
467
468 def verify_md5sum(ud, got_sum):
469 """
470 Verify the md5sum we wanted with the one we got
471 """
472 wanted_sum = None
473 if 'md5sum' in ud.parm:
474 wanted_sum = ud.parm['md5sum']
475 if not wanted_sum:
476 return True
477
478 return wanted_sum == got_sum
479 verify_md5sum = staticmethod(verify_md5sum)
480
481 def write_md5sum(url, ud, d):
482 md5data = bb.utils.md5_file(ud.localpath)
483 # verify the md5sum
484 if not Fetch.verify_md5sum(ud, md5data):
485 raise MD5SumError(url)
486
487 md5out = file(ud.md5, 'w')
488 md5out.write(md5data)
489 md5out.close()
490 write_md5sum = staticmethod(write_md5sum)
491
492 def latest_revision(self, url, ud, d):
493 """
494 Look in the cache for the latest revision, if not present ask the SCM.
495 """
496 if not hasattr(self, "_latest_revision"):
497 raise ParameterError
498
499 pd = persist_data.PersistData(d)
500 key = self._revision_key(url, ud, d)
501 rev = pd.getValue("BB_URI_HEADREVS", key)
502 if rev != None:
503 return str(rev)
504
505 rev = self._latest_revision(url, ud, d)
506 pd.setValue("BB_URI_HEADREVS", key, rev)
507 return rev
508
509 def sortable_revision(self, url, ud, d):
510 """
511
512 """
513 if hasattr(self, "_sortable_revision"):
514 return self._sortable_revision(url, ud, d)
515
516 pd = persist_data.PersistData(d)
517 key = self._revision_key(url, ud, d)
518 latest_rev = self._build_revision(url, ud, d)
519 last_rev = pd.getValue("BB_URI_LOCALCOUNT", key + "_rev")
520 count = pd.getValue("BB_URI_LOCALCOUNT", key + "_count")
521
522 if last_rev == latest_rev:
523 return str(count + "+" + latest_rev)
524
525 if count is None:
526 count = "0"
527 else:
528 count = str(int(count) + 1)
529
530 pd.setValue("BB_URI_LOCALCOUNT", key + "_rev", latest_rev)
531 pd.setValue("BB_URI_LOCALCOUNT", key + "_count", count)
532
533 return str(count + "+" + latest_rev)
534
535
536import cvs
537import git
538import local
539import svn
540import wget
541import svk
542import ssh
543import perforce
544import bzr
545import hg
546
547methods.append(local.Local())
548methods.append(wget.Wget())
549methods.append(svn.Svn())
550methods.append(git.Git())
551methods.append(cvs.Cvs())
552methods.append(svk.Svk())
553methods.append(ssh.SSH())
554methods.append(perforce.Perforce())
555methods.append(bzr.Bzr())
556methods.append(hg.Hg())