summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/fetch2
diff options
context:
space:
mode:
authorYu Ke <ke.yu@intel.com>2011-01-10 14:23:36 +0000
committerRichard Purdie <richard.purdie@linuxfoundation.org>2011-01-10 14:23:36 +0000
commit4dccd92439f3b21056c53f2317249228067defe6 (patch)
treeb5507598213381b870f53b3f816102b0b2899b86 /bitbake/lib/bb/fetch2
parentf46cdcbbae0ac3cbce423d262358e670add6065e (diff)
downloadpoky-4dccd92439f3b21056c53f2317249228067defe6.tar.gz
bitbake: copy bb.fetch to bb.fetch2 as initial code base for fetcher overhaul
Signed-off-by: Yu Ke <ke.yu@intel.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake/lib/bb/fetch2')
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py836
-rw-r--r--bitbake/lib/bb/fetch2/bzr.py148
-rw-r--r--bitbake/lib/bb/fetch2/cvs.py172
-rw-r--r--bitbake/lib/bb/fetch2/git.py261
-rw-r--r--bitbake/lib/bb/fetch2/hg.py180
-rw-r--r--bitbake/lib/bb/fetch2/local.py73
-rw-r--r--bitbake/lib/bb/fetch2/osc.py143
-rw-r--r--bitbake/lib/bb/fetch2/perforce.py206
-rw-r--r--bitbake/lib/bb/fetch2/repo.py98
-rw-r--r--bitbake/lib/bb/fetch2/ssh.py118
-rw-r--r--bitbake/lib/bb/fetch2/svk.py104
-rw-r--r--bitbake/lib/bb/fetch2/svn.py204
-rw-r--r--bitbake/lib/bb/fetch2/wget.py93
13 files changed, 2636 insertions, 0 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
new file mode 100644
index 0000000000..f7153ebce9
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -0,0 +1,836 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8"""
9
10# Copyright (C) 2003, 2004 Chris Larson
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24#
25# Based on functions from the base bb module, Copyright 2003 Holger Schurig
26
27from __future__ import absolute_import
28from __future__ import print_function
29import os, re
30import logging
31import bb
32from bb import data
33from bb import persist_data
34
35logger = logging.getLogger("BitBake.Fetch")
36
37class MalformedUrl(Exception):
38 """Exception raised when encountering an invalid url"""
39
40class FetchError(Exception):
41 """Exception raised when a download fails"""
42
43class NoMethodError(Exception):
44 """Exception raised when there is no method to obtain a supplied url or set of urls"""
45
46class MissingParameterError(Exception):
47 """Exception raised when a fetch method is missing a critical parameter in the url"""
48
49class ParameterError(Exception):
50 """Exception raised when a url cannot be proccessed due to invalid parameters."""
51
52class MD5SumError(Exception):
53 """Exception raised when a MD5SUM of a file does not match the expected one"""
54
55class InvalidSRCREV(Exception):
56 """Exception raised when an invalid SRCREV is encountered"""
57
58def decodeurl(url):
59 """Decodes an URL into the tokens (scheme, network location, path,
60 user, password, parameters).
61 """
62
63 m = re.compile('(?P<type>[^:]*)://((?P<user>.+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
64 if not m:
65 raise MalformedUrl(url)
66
67 type = m.group('type')
68 location = m.group('location')
69 if not location:
70 raise MalformedUrl(url)
71 user = m.group('user')
72 parm = m.group('parm')
73
74 locidx = location.find('/')
75 if locidx != -1 and type.lower() != 'file':
76 host = location[:locidx]
77 path = location[locidx:]
78 else:
79 host = ""
80 path = location
81 if user:
82 m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
83 if m:
84 user = m.group('user')
85 pswd = m.group('pswd')
86 else:
87 user = ''
88 pswd = ''
89
90 p = {}
91 if parm:
92 for s in parm.split(';'):
93 s1, s2 = s.split('=')
94 p[s1] = s2
95
96 return (type, host, path, user, pswd, p)
97
98def encodeurl(decoded):
99 """Encodes a URL from tokens (scheme, network location, path,
100 user, password, parameters).
101 """
102
103 (type, host, path, user, pswd, p) = decoded
104
105 if not type or not path:
106 raise MissingParameterError("Type or path url components missing when encoding %s" % decoded)
107 url = '%s://' % type
108 if user:
109 url += "%s" % user
110 if pswd:
111 url += ":%s" % pswd
112 url += "@"
113 if host:
114 url += "%s" % host
115 url += "%s" % path
116 if p:
117 for parm in p:
118 url += ";%s=%s" % (parm, p[parm])
119
120 return url
121
122def uri_replace(uri, uri_find, uri_replace, d):
123 if not uri or not uri_find or not uri_replace:
124 logger.debug(1, "uri_replace: passed an undefined value, not replacing")
125 uri_decoded = list(decodeurl(uri))
126 uri_find_decoded = list(decodeurl(uri_find))
127 uri_replace_decoded = list(decodeurl(uri_replace))
128 result_decoded = ['', '', '', '', '', {}]
129 for i in uri_find_decoded:
130 loc = uri_find_decoded.index(i)
131 result_decoded[loc] = uri_decoded[loc]
132 if isinstance(i, basestring):
133 if (re.match(i, uri_decoded[loc])):
134 result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc])
135 if uri_find_decoded.index(i) == 2:
136 if d:
137 localfn = bb.fetch.localpath(uri, d)
138 if localfn:
139 result_decoded[loc] = os.path.join(os.path.dirname(result_decoded[loc]), os.path.basename(bb.fetch.localpath(uri, d)))
140 else:
141 return uri
142 return encodeurl(result_decoded)
143
144methods = []
145urldata_cache = {}
146saved_headrevs = {}
147
148def fetcher_init(d):
149 """
150 Called to initialize the fetchers once the configuration data is known.
151 Calls before this must not hit the cache.
152 """
153 pd = persist_data.persist(d)
154 # When to drop SCM head revisions controlled by user policy
155 srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
156 if srcrev_policy == "cache":
157 logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
158 elif srcrev_policy == "clear":
159 logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
160 try:
161 bb.fetch.saved_headrevs = pd['BB_URI_HEADREVS'].items()
162 except:
163 pass
164 del pd['BB_URI_HEADREVS']
165 else:
166 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
167
168 for m in methods:
169 if hasattr(m, "init"):
170 m.init(d)
171
172def fetcher_compare_revisions(d):
173 """
174 Compare the revisions in the persistant cache with current values and
175 return true/false on whether they've changed.
176 """
177
178 pd = persist_data.persist(d)
179 data = pd['BB_URI_HEADREVS'].items()
180 data2 = bb.fetch.saved_headrevs
181
182 changed = False
183 for key in data:
184 if key not in data2 or data2[key] != data[key]:
185 logger.debug(1, "%s changed", key)
186 changed = True
187 return True
188 else:
189 logger.debug(2, "%s did not change", key)
190 return False
191
192# Function call order is usually:
193# 1. init
194# 2. go
195# 3. localpaths
196# localpath can be called at any time
197
198def init(urls, d, setup = True):
199 urldata = {}
200
201 fn = bb.data.getVar('FILE', d, 1)
202 if fn in urldata_cache:
203 urldata = urldata_cache[fn]
204
205 for url in urls:
206 if url not in urldata:
207 urldata[url] = FetchData(url, d)
208
209 if setup:
210 for url in urldata:
211 if not urldata[url].setup:
212 urldata[url].setup_localpath(d)
213
214 urldata_cache[fn] = urldata
215 return urldata
216
217def mirror_from_string(data):
218 return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
219
220def removefile(f):
221 try:
222 os.remove(f)
223 except:
224 pass
225
226def verify_checksum(u, ud, d):
227 """
228 verify the MD5 and SHA256 checksum for downloaded src
229
230 return value:
231 - True: checksum matched
232 - False: checksum unmatched
233
234 if checksum is missing in recipes file, "BB_STRICT_CHECKSUM" decide the return value.
235 if BB_STRICT_CHECKSUM = "1" then return false as unmatched, otherwise return true as
236 matched
237 """
238
239 if not ud.type in ["http", "https", "ftp", "ftps"]:
240 return
241
242 md5data = bb.utils.md5_file(ud.localpath)
243 sha256data = bb.utils.sha256_file(ud.localpath)
244
245 if (ud.md5_expected == None or ud.sha256_expected == None):
246 logger.warn('Missing SRC_URI checksum for %s, consider adding to the recipe:\n'
247 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"',
248 ud.localpath, ud.md5_name, md5data,
249 ud.sha256_name, sha256data)
250 if bb.data.getVar("BB_STRICT_CHECKSUM", d, True) == "1":
251 raise FetchError("No checksum specified for %s." % u)
252 return
253
254 if (ud.md5_expected != md5data or ud.sha256_expected != sha256data):
255 logger.error('The checksums for "%s" did not match.\n'
256 ' MD5: expected "%s", got "%s"\n'
257 ' SHA256: expected "%s", got "%s"\n',
258 ud.localpath, ud.md5_expected, md5data,
259 ud.sha256_expected, sha256data)
260 raise FetchError("%s checksum mismatch." % u)
261
262def go(d, urls = None):
263 """
264 Fetch all urls
265 init must have previously been called
266 """
267 if not urls:
268 urls = d.getVar("SRC_URI", 1).split()
269 urldata = init(urls, d, True)
270
271 for u in urls:
272 ud = urldata[u]
273 m = ud.method
274 localpath = ""
275
276 if not ud.localfile:
277 continue
278
279 lf = bb.utils.lockfile(ud.lockfile)
280
281 if m.try_premirror(u, ud, d):
282 # First try fetching uri, u, from PREMIRRORS
283 mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True))
284 localpath = try_mirrors(d, u, mirrors, False, m.forcefetch(u, ud, d))
285 elif os.path.exists(ud.localfile):
286 localpath = ud.localfile
287
288 # Need to re-test forcefetch() which will return true if our copy is too old
289 if m.forcefetch(u, ud, d) or not localpath:
290 # Next try fetching from the original uri, u
291 try:
292 m.go(u, ud, d)
293 localpath = ud.localpath
294 except FetchError:
295 # Remove any incomplete file
296 removefile(ud.localpath)
297 # Finally, try fetching uri, u, from MIRRORS
298 mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True))
299 localpath = try_mirrors (d, u, mirrors)
300 if not localpath or not os.path.exists(localpath):
301 raise FetchError("Unable to fetch URL %s from any source." % u)
302
303 ud.localpath = localpath
304
305 if os.path.exists(ud.md5):
306 # Touch the md5 file to show active use of the download
307 try:
308 os.utime(ud.md5, None)
309 except:
310 # Errors aren't fatal here
311 pass
312 else:
313 # Only check the checksums if we've not seen this item before
314 verify_checksum(u, ud, d)
315 Fetch.write_md5sum(u, ud, d)
316
317 bb.utils.unlockfile(lf)
318
319def checkstatus(d, urls = None):
320 """
321 Check all urls exist upstream
322 init must have previously been called
323 """
324 urldata = init([], d, True)
325
326 if not urls:
327 urls = urldata
328
329 for u in urls:
330 ud = urldata[u]
331 m = ud.method
332 logger.debug(1, "Testing URL %s", u)
333 # First try checking uri, u, from PREMIRRORS
334 mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True))
335 ret = try_mirrors(d, u, mirrors, True)
336 if not ret:
337 # Next try checking from the original uri, u
338 try:
339 ret = m.checkstatus(u, ud, d)
340 except:
341 # Finally, try checking uri, u, from MIRRORS
342 mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True))
343 ret = try_mirrors (d, u, mirrors, True)
344
345 if not ret:
346 raise FetchError("URL %s doesn't work" % u)
347
348def localpaths(d):
349 """
350 Return a list of the local filenames, assuming successful fetch
351 """
352 local = []
353 urldata = init([], d, True)
354
355 for u in urldata:
356 ud = urldata[u]
357 local.append(ud.localpath)
358
359 return local
360
361srcrev_internal_call = False
362
363def get_srcrev(d):
364 """
365 Return the version string for the current package
366 (usually to be used as PV)
367 Most packages usually only have one SCM so we just pass on the call.
368 In the multi SCM case, we build a value based on SRCREV_FORMAT which must
369 have been set.
370 """
371
372 #
373 # Ugly code alert. localpath in the fetchers will try to evaluate SRCREV which
374 # could translate into a call to here. If it does, we need to catch this
375 # and provide some way so it knows get_srcrev is active instead of being
376 # some number etc. hence the srcrev_internal_call tracking and the magic
377 # "SRCREVINACTION" return value.
378 #
379 # Neater solutions welcome!
380 #
381 if bb.fetch.srcrev_internal_call:
382 return "SRCREVINACTION"
383
384 scms = []
385
386 # Only call setup_localpath on URIs which supports_srcrev()
387 urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False)
388 for u in urldata:
389 ud = urldata[u]
390 if ud.method.supports_srcrev():
391 if not ud.setup:
392 ud.setup_localpath(d)
393 scms.append(u)
394
395 if len(scms) == 0:
396 logger.error("SRCREV was used yet no valid SCM was found in SRC_URI")
397 raise ParameterError
398
399 if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache":
400 bb.data.setVar('__BB_DONT_CACHE', '1', d)
401
402 if len(scms) == 1:
403 return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d)
404
405 #
406 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
407 #
408 format = bb.data.getVar('SRCREV_FORMAT', d, 1)
409 if not format:
410 logger.error("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
411 raise ParameterError
412
413 for scm in scms:
414 if 'name' in urldata[scm].parm:
415 name = urldata[scm].parm["name"]
416 rev = urldata[scm].method.sortable_revision(scm, urldata[scm], d)
417 format = format.replace(name, rev)
418
419 return format
420
421def localpath(url, d, cache = True):
422 """
423 Called from the parser with cache=False since the cache isn't ready
424 at this point. Also called from classed in OE e.g. patch.bbclass
425 """
426 ud = init([url], d)
427 if ud[url].method:
428 return ud[url].localpath
429 return url
430
431def runfetchcmd(cmd, d, quiet = False):
432 """
433 Run cmd returning the command output
434 Raise an error if interrupted or cmd fails
435 Optionally echo command output to stdout
436 """
437
438 # Need to export PATH as binary could be in metadata paths
439 # rather than host provided
440 # Also include some other variables.
441 # FIXME: Should really include all export varaiables?
442 exportvars = ['PATH', 'GIT_PROXY_COMMAND', 'GIT_PROXY_HOST',
443 'GIT_PROXY_PORT', 'GIT_CONFIG', 'http_proxy', 'ftp_proxy',
444 'https_proxy', 'no_proxy', 'ALL_PROXY', 'all_proxy',
445 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', 'HOME']
446
447 for var in exportvars:
448 val = data.getVar(var, d, True)
449 if val:
450 cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
451
452 logger.debug(1, "Running %s", cmd)
453
454 # redirect stderr to stdout
455 stdout_handle = os.popen(cmd + " 2>&1", "r")
456 output = ""
457
458 while True:
459 line = stdout_handle.readline()
460 if not line:
461 break
462 if not quiet:
463 print(line, end=' ')
464 output += line
465
466 status = stdout_handle.close() or 0
467 signal = status >> 8
468 exitstatus = status & 0xff
469
470 if signal:
471 raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output))
472 elif status != 0:
473 raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output))
474
475 return output
476
477def try_mirrors(d, uri, mirrors, check = False, force = False):
478 """
479 Try to use a mirrored version of the sources.
480 This method will be automatically called before the fetchers go.
481
482 d Is a bb.data instance
483 uri is the original uri we're trying to download
484 mirrors is the list of mirrors we're going to try
485 """
486 fpath = os.path.join(data.getVar("DL_DIR", d, 1), os.path.basename(uri))
487 if not check and os.access(fpath, os.R_OK) and not force:
488 logger.debug(1, "%s already exists, skipping checkout.", fpath)
489 return fpath
490
491 ld = d.createCopy()
492 for (find, replace) in mirrors:
493 newuri = uri_replace(uri, find, replace, ld)
494 if newuri != uri:
495 try:
496 ud = FetchData(newuri, ld)
497 except bb.fetch.NoMethodError:
498 logger.debug(1, "No method for %s", uri)
499 continue
500
501 ud.setup_localpath(ld)
502
503 try:
504 if check:
505 found = ud.method.checkstatus(newuri, ud, ld)
506 if found:
507 return found
508 else:
509 ud.method.go(newuri, ud, ld)
510 return ud.localpath
511 except (bb.fetch.MissingParameterError,
512 bb.fetch.FetchError,
513 bb.fetch.MD5SumError):
514 import sys
515 (type, value, traceback) = sys.exc_info()
516 logger.debug(2, "Mirror fetch failure: %s", value)
517 removefile(ud.localpath)
518 continue
519 return None
520
521
522class FetchData(object):
523 """
524 A class which represents the fetcher state for a given URI.
525 """
526 def __init__(self, url, d):
527 self.localfile = ""
528 (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
529 self.date = Fetch.getSRCDate(self, d)
530 self.url = url
531 if not self.user and "user" in self.parm:
532 self.user = self.parm["user"]
533 if not self.pswd and "pswd" in self.parm:
534 self.pswd = self.parm["pswd"]
535 self.setup = False
536
537 if "name" in self.parm:
538 self.md5_name = "%s.md5sum" % self.parm["name"]
539 self.sha256_name = "%s.sha256sum" % self.parm["name"]
540 else:
541 self.md5_name = "md5sum"
542 self.sha256_name = "sha256sum"
543 self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d)
544 self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d)
545
546 for m in methods:
547 if m.supports(url, self, d):
548 self.method = m
549 return
550 raise NoMethodError("Missing implementation for url %s" % url)
551
552 def setup_localpath(self, d):
553 self.setup = True
554 if "localpath" in self.parm:
555 # if user sets localpath for file, use it instead.
556 self.localpath = self.parm["localpath"]
557 self.basename = os.path.basename(self.localpath)
558 else:
559 premirrors = bb.data.getVar('PREMIRRORS', d, True)
560 local = ""
561 if premirrors and self.url:
562 aurl = self.url.split(";")[0]
563 mirrors = mirror_from_string(premirrors)
564 for (find, replace) in mirrors:
565 if replace.startswith("file://"):
566 path = aurl.split("://")[1]
567 path = path.split(";")[0]
568 local = replace.split("://")[1] + os.path.basename(path)
569 if local == aurl or not os.path.exists(local) or os.path.isdir(local):
570 local = ""
571 self.localpath = local
572 if not local:
573 try:
574 bb.fetch.srcrev_internal_call = True
575 self.localpath = self.method.localpath(self.url, self, d)
576 finally:
577 bb.fetch.srcrev_internal_call = False
578 # We have to clear data's internal caches since the cached value of SRCREV is now wrong.
579 # Horrible...
580 bb.data.delVar("ISHOULDNEVEREXIST", d)
581
582 if self.localpath is not None:
583 # Note: These files should always be in DL_DIR whereas localpath may not be.
584 basepath = bb.data.expand("${DL_DIR}/%s" % os.path.basename(self.localpath), d)
585 self.md5 = basepath + '.md5'
586 self.lockfile = basepath + '.lock'
587
588
589class Fetch(object):
590 """Base class for 'fetch'ing data"""
591
592 def __init__(self, urls = []):
593 self.urls = []
594
595 def supports(self, url, urldata, d):
596 """
597 Check to see if this fetch class supports a given url.
598 """
599 return 0
600
601 def localpath(self, url, urldata, d):
602 """
603 Return the local filename of a given url assuming a successful fetch.
604 Can also setup variables in urldata for use in go (saving code duplication
605 and duplicate code execution)
606 """
607 return url
608 def _strip_leading_slashes(self, relpath):
609 """
610 Remove leading slash as os.path.join can't cope
611 """
612 while os.path.isabs(relpath):
613 relpath = relpath[1:]
614 return relpath
615
616 def setUrls(self, urls):
617 self.__urls = urls
618
619 def getUrls(self):
620 return self.__urls
621
622 urls = property(getUrls, setUrls, None, "Urls property")
623
624 def forcefetch(self, url, urldata, d):
625 """
626 Force a fetch, even if localpath exists?
627 """
628 return False
629
630 def supports_srcrev(self):
631 """
632 The fetcher supports auto source revisions (SRCREV)
633 """
634 return False
635
636 def go(self, url, urldata, d):
637 """
638 Fetch urls
639 Assumes localpath was called first
640 """
641 raise NoMethodError("Missing implementation for url")
642
643 def try_premirror(self, url, urldata, d):
644 """
645 Should premirrors be used?
646 """
647 if urldata.method.forcefetch(url, urldata, d):
648 return True
649 elif os.path.exists(urldata.md5) and os.path.exists(urldata.localfile):
650 return False
651 else:
652 return True
653
654 def checkstatus(self, url, urldata, d):
655 """
656 Check the status of a URL
657 Assumes localpath was called first
658 """
659 logger.info("URL %s could not be checked for status since no method exists.", url)
660 return True
661
662 def getSRCDate(urldata, d):
663 """
664 Return the SRC Date for the component
665
666 d the bb.data module
667 """
668 if "srcdate" in urldata.parm:
669 return urldata.parm['srcdate']
670
671 pn = data.getVar("PN", d, 1)
672
673 if pn:
674 return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
675
676 return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
677 getSRCDate = staticmethod(getSRCDate)
678
679 def srcrev_internal_helper(ud, d):
680 """
681 Return:
682 a) a source revision if specified
683 b) True if auto srcrev is in action
684 c) False otherwise
685 """
686
687 if 'rev' in ud.parm:
688 return ud.parm['rev']
689
690 if 'tag' in ud.parm:
691 return ud.parm['tag']
692
693 rev = None
694 if 'name' in ud.parm:
695 pn = data.getVar("PN", d, 1)
696 rev = data.getVar("SRCREV_%s_pn-%s" % (ud.parm['name'], pn), d, 1)
697 if not rev:
698 rev = data.getVar("SRCREV_pn-%s_%s" % (pn, ud.parm['name']), d, 1)
699 if not rev:
700 rev = data.getVar("SRCREV_%s" % (ud.parm['name']), d, 1)
701 if not rev:
702 rev = data.getVar("SRCREV", d, 1)
703 if rev == "INVALID":
704 raise InvalidSRCREV("Please set SRCREV to a valid value")
705 if not rev:
706 return False
707 if rev is "SRCREVINACTION":
708 return True
709 return rev
710
711 srcrev_internal_helper = staticmethod(srcrev_internal_helper)
712
713 def localcount_internal_helper(ud, d):
714 """
715 Return:
716 a) a locked localcount if specified
717 b) None otherwise
718 """
719
720 localcount = None
721 if 'name' in ud.parm:
722 pn = data.getVar("PN", d, 1)
723 localcount = data.getVar("LOCALCOUNT_" + ud.parm['name'], d, 1)
724 if not localcount:
725 localcount = data.getVar("LOCALCOUNT", d, 1)
726 return localcount
727
728 localcount_internal_helper = staticmethod(localcount_internal_helper)
729
730 def verify_md5sum(ud, got_sum):
731 """
732 Verify the md5sum we wanted with the one we got
733 """
734 wanted_sum = ud.parm.get('md5sum')
735 if not wanted_sum:
736 return True
737
738 return wanted_sum == got_sum
739 verify_md5sum = staticmethod(verify_md5sum)
740
741 def write_md5sum(url, ud, d):
742 md5data = bb.utils.md5_file(ud.localpath)
743 # verify the md5sum
744 if not Fetch.verify_md5sum(ud, md5data):
745 raise MD5SumError(url)
746
747 md5out = file(ud.md5, 'w')
748 md5out.write(md5data)
749 md5out.close()
750 write_md5sum = staticmethod(write_md5sum)
751
752 def latest_revision(self, url, ud, d):
753 """
754 Look in the cache for the latest revision, if not present ask the SCM.
755 """
756 if not hasattr(self, "_latest_revision"):
757 raise ParameterError
758
759 pd = persist_data.persist(d)
760 revs = pd['BB_URI_HEADREVS']
761 key = self.generate_revision_key(url, ud, d)
762 rev = revs[key]
763 if rev != None:
764 return str(rev)
765
766 revs[key] = rev = self._latest_revision(url, ud, d)
767 return rev
768
769 def sortable_revision(self, url, ud, d):
770 """
771
772 """
773 if hasattr(self, "_sortable_revision"):
774 return self._sortable_revision(url, ud, d)
775
776 pd = persist_data.persist(d)
777 localcounts = pd['BB_URI_LOCALCOUNT']
778 key = self.generate_revision_key(url, ud, d)
779
780 latest_rev = self._build_revision(url, ud, d)
781 last_rev = localcounts[key + '_rev']
782 uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False
783 count = None
784 if uselocalcount:
785 count = Fetch.localcount_internal_helper(ud, d)
786 if count is None:
787 count = localcounts[key + '_count']
788
789 if last_rev == latest_rev:
790 return str(count + "+" + latest_rev)
791
792 buildindex_provided = hasattr(self, "_sortable_buildindex")
793 if buildindex_provided:
794 count = self._sortable_buildindex(url, ud, d, latest_rev)
795
796 if count is None:
797 count = "0"
798 elif uselocalcount or buildindex_provided:
799 count = str(count)
800 else:
801 count = str(int(count) + 1)
802
803 localcounts[key + '_rev'] = latest_rev
804 localcounts[key + '_count'] = count
805
806 return str(count + "+" + latest_rev)
807
808 def generate_revision_key(self, url, ud, d):
809 key = self._revision_key(url, ud, d)
810 return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "")
811
812from . import cvs
813from . import git
814from . import local
815from . import svn
816from . import wget
817from . import svk
818from . import ssh
819from . import perforce
820from . import bzr
821from . import hg
822from . import osc
823from . import repo
824
825methods.append(local.Local())
826methods.append(wget.Wget())
827methods.append(svn.Svn())
828methods.append(git.Git())
829methods.append(cvs.Cvs())
830methods.append(svk.Svk())
831methods.append(ssh.SSH())
832methods.append(perforce.Perforce())
833methods.append(bzr.Bzr())
834methods.append(hg.Hg())
835methods.append(osc.Osc())
836methods.append(repo.Repo())
diff --git a/bitbake/lib/bb/fetch2/bzr.py b/bitbake/lib/bb/fetch2/bzr.py
new file mode 100644
index 0000000000..afaf799900
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/bzr.py
@@ -0,0 +1,148 @@
1"""
2BitBake 'Fetch' implementation for bzr.
3
4"""
5
6# Copyright (C) 2007 Ross Burton
7# Copyright (C) 2007 Richard Purdie
8#
9# Classes for obtaining upstream sources for the
10# BitBake build tools.
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25
26import os
27import sys
28import logging
29import bb
30from bb import data
31from bb.fetch import Fetch, FetchError, runfetchcmd, logger
32
33class Bzr(Fetch):
34 def supports(self, url, ud, d):
35 return ud.type in ['bzr']
36
37 def localpath (self, url, ud, d):
38
39 # Create paths to bzr checkouts
40 relpath = self._strip_leading_slashes(ud.path)
41 ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath)
42
43 revision = Fetch.srcrev_internal_helper(ud, d)
44 if revision is True:
45 ud.revision = self.latest_revision(url, ud, d)
46 elif revision:
47 ud.revision = revision
48
49 if not ud.revision:
50 ud.revision = self.latest_revision(url, ud, d)
51
52 ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d)
53
54 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
55
56 def _buildbzrcommand(self, ud, d, command):
57 """
58 Build up an bzr commandline based on ud
59 command is "fetch", "update", "revno"
60 """
61
62 basecmd = data.expand('${FETCHCMD_bzr}', d)
63
64 proto = ud.parm.get('proto', 'http')
65
66 bzrroot = ud.host + ud.path
67
68 options = []
69
70 if command is "revno":
71 bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
72 else:
73 if ud.revision:
74 options.append("-r %s" % ud.revision)
75
76 if command is "fetch":
77 bzrcmd = "%s co %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
78 elif command is "update":
79 bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options))
80 else:
81 raise FetchError("Invalid bzr command %s" % command)
82
83 return bzrcmd
84
85 def go(self, loc, ud, d):
86 """Fetch url"""
87
88 if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
89 bzrcmd = self._buildbzrcommand(ud, d, "update")
90 logger.debug(1, "BZR Update %s", loc)
91 os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
92 runfetchcmd(bzrcmd, d)
93 else:
94 bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
95 bzrcmd = self._buildbzrcommand(ud, d, "fetch")
96 logger.debug(1, "BZR Checkout %s", loc)
97 bb.mkdirhier(ud.pkgdir)
98 os.chdir(ud.pkgdir)
99 logger.debug(1, "Running %s", bzrcmd)
100 runfetchcmd(bzrcmd, d)
101
102 os.chdir(ud.pkgdir)
103
104 scmdata = ud.parm.get("scmdata", "")
105 if scmdata == "keep":
106 tar_flags = ""
107 else:
108 tar_flags = "--exclude '.bzr' --exclude '.bzrtags'"
109
110 # tar them up to a defined filename
111 try:
112 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d)
113 except:
114 t, v, tb = sys.exc_info()
115 try:
116 os.unlink(ud.localpath)
117 except OSError:
118 pass
119 raise t, v, tb
120
121 def supports_srcrev(self):
122 return True
123
124 def _revision_key(self, url, ud, d):
125 """
126 Return a unique key for the url
127 """
128 return "bzr:" + ud.pkgdir
129
130 def _latest_revision(self, url, ud, d):
131 """
132 Return the latest upstream revision number
133 """
134 logger.debug(2, "BZR fetcher hitting network for %s", url)
135
136 output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)
137
138 return output.strip()
139
140 def _sortable_revision(self, url, ud, d):
141 """
142 Return a sortable revision number which in our case is the revision number
143 """
144
145 return self._build_revision(url, ud, d)
146
147 def _build_revision(self, url, ud, d):
148 return ud.revision
diff --git a/bitbake/lib/bb/fetch2/cvs.py b/bitbake/lib/bb/fetch2/cvs.py
new file mode 100644
index 0000000000..0edb794b04
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/cvs.py
@@ -0,0 +1,172 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26#Based on functions from the base bb module, Copyright 2003 Holger Schurig
27#
28
29import os
30import logging
31import bb
32from bb import data
33from bb.fetch import Fetch, FetchError, MissingParameterError, logger
34
35class Cvs(Fetch):
36 """
37 Class to fetch a module or modules from cvs repositories
38 """
39 def supports(self, url, ud, d):
40 """
41 Check to see if a given url can be fetched with cvs.
42 """
43 return ud.type in ['cvs']
44
45 def localpath(self, url, ud, d):
46 if not "module" in ud.parm:
47 raise MissingParameterError("cvs method needs a 'module' parameter")
48 ud.module = ud.parm["module"]
49
50 ud.tag = ud.parm.get('tag', "")
51
52 # Override the default date in certain cases
53 if 'date' in ud.parm:
54 ud.date = ud.parm['date']
55 elif ud.tag:
56 ud.date = ""
57
58 norecurse = ''
59 if 'norecurse' in ud.parm:
60 norecurse = '_norecurse'
61
62 fullpath = ''
63 if 'fullpath' in ud.parm:
64 fullpath = '_fullpath'
65
66 ud.localfile = data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
67
68 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
69
70 def forcefetch(self, url, ud, d):
71 if (ud.date == "now"):
72 return True
73 return False
74
75 def go(self, loc, ud, d):
76
77 method = ud.parm.get('method', 'pserver')
78 localdir = ud.parm.get('localdir', ud.module)
79 cvs_port = ud.parm.get('port', '')
80
81 cvs_rsh = None
82 if method == "ext":
83 if "rsh" in ud.parm:
84 cvs_rsh = ud.parm["rsh"]
85
86 if method == "dir":
87 cvsroot = ud.path
88 else:
89 cvsroot = ":" + method
90 cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True)
91 if cvsproxyhost:
92 cvsroot += ";proxy=" + cvsproxyhost
93 cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True)
94 if cvsproxyport:
95 cvsroot += ";proxyport=" + cvsproxyport
96 cvsroot += ":" + ud.user
97 if ud.pswd:
98 cvsroot += ":" + ud.pswd
99 cvsroot += "@" + ud.host + ":" + cvs_port + ud.path
100
101 options = []
102 if 'norecurse' in ud.parm:
103 options.append("-l")
104 if ud.date:
105 # treat YYYYMMDDHHMM specially for CVS
106 if len(ud.date) == 12:
107 options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12]))
108 else:
109 options.append("-D \"%s UTC\"" % ud.date)
110 if ud.tag:
111 options.append("-r %s" % ud.tag)
112
113 localdata = data.createCopy(d)
114 data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
115 data.update_data(localdata)
116
117 data.setVar('CVSROOT', cvsroot, localdata)
118 data.setVar('CVSCOOPTS', " ".join(options), localdata)
119 data.setVar('CVSMODULE', ud.module, localdata)
120 cvscmd = data.getVar('FETCHCOMMAND', localdata, 1)
121 cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1)
122
123 if cvs_rsh:
124 cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
125 cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
126
127 # create module directory
128 logger.debug(2, "Fetch: checking for module directory")
129 pkg = data.expand('${PN}', d)
130 pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
131 moddir = os.path.join(pkgdir, localdir)
132 if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
133 logger.info("Update " + loc)
134 # update sources there
135 os.chdir(moddir)
136 myret = os.system(cvsupdatecmd)
137 else:
138 logger.info("Fetch " + loc)
139 # check out sources there
140 bb.mkdirhier(pkgdir)
141 os.chdir(pkgdir)
142 logger.debug(1, "Running %s", cvscmd)
143 myret = os.system(cvscmd)
144
145 if myret != 0 or not os.access(moddir, os.R_OK):
146 try:
147 os.rmdir(moddir)
148 except OSError:
149 pass
150 raise FetchError(ud.module)
151
152 scmdata = ud.parm.get("scmdata", "")
153 if scmdata == "keep":
154 tar_flags = ""
155 else:
156 tar_flags = "--exclude 'CVS'"
157
158 # tar them up to a defined filename
159 if 'fullpath' in ud.parm:
160 os.chdir(pkgdir)
161 myret = os.system("tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir))
162 else:
163 os.chdir(moddir)
164 os.chdir('..')
165 myret = os.system("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir)))
166
167 if myret != 0:
168 try:
169 os.unlink(ud.localpath)
170 except OSError:
171 pass
172 raise FetchError(ud.module)
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
new file mode 100644
index 0000000000..de415ec309
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/git.py
@@ -0,0 +1,261 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' git implementation
5
6"""
7
8#Copyright (C) 2005 Richard Purdie
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import os
24import bb
25from bb import data
26from bb.fetch import Fetch
27from bb.fetch import runfetchcmd
28from bb.fetch import logger
29
30class Git(Fetch):
31 """Class to fetch a module or modules from git repositories"""
32 def init(self, d):
33 #
34 # Only enable _sortable revision if the key is set
35 #
36 if bb.data.getVar("BB_GIT_CLONE_FOR_SRCREV", d, True):
37 self._sortable_buildindex = self._sortable_buildindex_disabled
38 def supports(self, url, ud, d):
39 """
40 Check to see if a given url can be fetched with git.
41 """
42 return ud.type in ['git']
43
44 def localpath(self, url, ud, d):
45
46 if 'protocol' in ud.parm:
47 ud.proto = ud.parm['protocol']
48 elif not ud.host:
49 ud.proto = 'file'
50 else:
51 ud.proto = "rsync"
52
53 ud.branch = ud.parm.get("branch", "master")
54
55 gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.'))
56 ud.mirrortarball = 'git_%s.tar.gz' % (gitsrcname)
57 ud.clonedir = os.path.join(data.expand('${GITDIR}', d), gitsrcname)
58
59 tag = Fetch.srcrev_internal_helper(ud, d)
60 if tag is True:
61 ud.tag = self.latest_revision(url, ud, d)
62 elif tag:
63 ud.tag = tag
64
65 if not ud.tag or ud.tag == "master":
66 ud.tag = self.latest_revision(url, ud, d)
67
68 subdir = ud.parm.get("subpath", "")
69 if subdir != "":
70 if subdir.endswith("/"):
71 subdir = subdir[:-1]
72 subdirpath = os.path.join(ud.path, subdir);
73 else:
74 subdirpath = ud.path;
75
76 if 'fullclone' in ud.parm:
77 ud.localfile = ud.mirrortarball
78 else:
79 ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, subdirpath.replace('/', '.'), ud.tag), d)
80
81 ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
82
83 if 'noclone' in ud.parm:
84 ud.localfile = None
85 return None
86
87 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
88
89 def forcefetch(self, url, ud, d):
90 if 'fullclone' in ud.parm:
91 return True
92 if 'noclone' in ud.parm:
93 return False
94 if os.path.exists(ud.localpath):
95 return False
96 if not self._contains_ref(ud.tag, d):
97 return True
98 return False
99
100 def try_premirror(self, u, ud, d):
101 if 'noclone' in ud.parm:
102 return False
103 if os.path.exists(ud.clonedir):
104 return False
105 if os.path.exists(ud.localpath):
106 return False
107
108 return True
109
110 def go(self, loc, ud, d):
111 """Fetch url"""
112
113 if ud.user:
114 username = ud.user + '@'
115 else:
116 username = ""
117
118 repofile = os.path.join(data.getVar("DL_DIR", d, 1), ud.mirrortarball)
119
120 coname = '%s' % (ud.tag)
121 codir = os.path.join(ud.clonedir, coname)
122
123 # If we have no existing clone and no mirror tarball, try and obtain one
124 if not os.path.exists(ud.clonedir) and not os.path.exists(repofile):
125 try:
126 Fetch.try_mirrors(ud.mirrortarball)
127 except:
128 pass
129
130 # If the checkout doesn't exist and the mirror tarball does, extract it
131 if not os.path.exists(ud.clonedir) and os.path.exists(repofile):
132 bb.mkdirhier(ud.clonedir)
133 os.chdir(ud.clonedir)
134 runfetchcmd("tar -xzf %s" % (repofile), d)
135
136 # If the repo still doesn't exist, fallback to cloning it
137 if not os.path.exists(ud.clonedir):
138 runfetchcmd("%s clone -n %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.clonedir), d)
139
140 os.chdir(ud.clonedir)
141 # Update the checkout if needed
142 if not self._contains_ref(ud.tag, d) or 'fullclone' in ud.parm:
143 # Remove all but the .git directory
144 runfetchcmd("rm * -Rf", d)
145 if 'fullclone' in ud.parm:
146 runfetchcmd("%s fetch --all" % (ud.basecmd), d)
147 else:
148 runfetchcmd("%s fetch %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.branch), d)
149 runfetchcmd("%s fetch --tags %s://%s%s%s" % (ud.basecmd, ud.proto, username, ud.host, ud.path), d)
150 runfetchcmd("%s prune-packed" % ud.basecmd, d)
151 runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
152
153 # Generate a mirror tarball if needed
154 os.chdir(ud.clonedir)
155 mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True)
156 if mirror_tarballs != "0" or 'fullclone' in ud.parm:
157 logger.info("Creating tarball of git repository")
158 runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d)
159
160 if 'fullclone' in ud.parm:
161 return
162
163 if os.path.exists(codir):
164 bb.utils.prunedir(codir)
165
166 subdir = ud.parm.get("subpath", "")
167 if subdir != "":
168 if subdir.endswith("/"):
169 subdirbase = os.path.basename(subdir[:-1])
170 else:
171 subdirbase = os.path.basename(subdir)
172 else:
173 subdirbase = ""
174
175 if subdir != "":
176 readpathspec = ":%s" % (subdir)
177 codir = os.path.join(codir, "git")
178 coprefix = os.path.join(codir, subdirbase, "")
179 else:
180 readpathspec = ""
181 coprefix = os.path.join(codir, "git", "")
182
183 scmdata = ud.parm.get("scmdata", "")
184 if scmdata == "keep":
185 runfetchcmd("%s clone -n %s %s" % (ud.basecmd, ud.clonedir, coprefix), d)
186 os.chdir(coprefix)
187 runfetchcmd("%s checkout -q -f %s%s" % (ud.basecmd, ud.tag, readpathspec), d)
188 else:
189 bb.mkdirhier(codir)
190 os.chdir(ud.clonedir)
191 runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.tag, readpathspec), d)
192 runfetchcmd("%s checkout-index -q -f --prefix=%s -a" % (ud.basecmd, coprefix), d)
193
194 os.chdir(codir)
195 logger.info("Creating tarball of git checkout")
196 runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d)
197
198 os.chdir(ud.clonedir)
199 bb.utils.prunedir(codir)
200
201 def supports_srcrev(self):
202 return True
203
204 def _contains_ref(self, tag, d):
205 basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
206 output = runfetchcmd("%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (basecmd, tag), d, quiet=True)
207 return output.split()[0] != "0"
208
209 def _revision_key(self, url, ud, d):
210 """
211 Return a unique key for the url
212 """
213 return "git:" + ud.host + ud.path.replace('/', '.') + ud.branch
214
215 def _latest_revision(self, url, ud, d):
216 """
217 Compute the HEAD revision for the url
218 """
219 if ud.user:
220 username = ud.user + '@'
221 else:
222 username = ""
223
224 basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
225 cmd = "%s ls-remote %s://%s%s%s %s" % (basecmd, ud.proto, username, ud.host, ud.path, ud.branch)
226 output = runfetchcmd(cmd, d, True)
227 if not output:
228 raise bb.fetch.FetchError("Fetch command %s gave empty output\n" % (cmd))
229 return output.split()[0]
230
231 def _build_revision(self, url, ud, d):
232 return ud.tag
233
234 def _sortable_buildindex_disabled(self, url, ud, d, rev):
235 """
236 Return a suitable buildindex for the revision specified. This is done by counting revisions
237 using "git rev-list" which may or may not work in different circumstances.
238 """
239
240 cwd = os.getcwd()
241
242 # Check if we have the rev already
243
244 if not os.path.exists(ud.clonedir):
245 print("no repo")
246 self.go(None, ud, d)
247 if not os.path.exists(ud.clonedir):
248 logger.error("GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value", url, ud.clonedir)
249 return None
250
251
252 os.chdir(ud.clonedir)
253 if not self._contains_ref(rev, d):
254 self.go(None, ud, d)
255
256 output = runfetchcmd("%s rev-list %s -- 2> /dev/null | wc -l" % (ud.basecmd, rev), d, quiet=True)
257 os.chdir(cwd)
258
259 buildindex = "%s" % output.split()[0]
260 logger.debug(1, "GIT repository for %s in %s is returning %s revisions in rev-list before %s", url, ud.clonedir, buildindex, rev)
261 return buildindex
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py
new file mode 100644
index 0000000000..3c649a6ad0
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/hg.py
@@ -0,0 +1,180 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementation for mercurial DRCS (hg).
5
6"""
7
8# Copyright (C) 2003, 2004 Chris Larson
9# Copyright (C) 2004 Marcin Juszkiewicz
10# Copyright (C) 2007 Robert Schuster
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24#
25# Based on functions from the base bb module, Copyright 2003 Holger Schurig
26
27import os
28import sys
29import logging
30import bb
31from bb import data
32from bb.fetch import Fetch
33from bb.fetch import FetchError
34from bb.fetch import MissingParameterError
35from bb.fetch import runfetchcmd
36from bb.fetch import logger
37
38class Hg(Fetch):
39 """Class to fetch from mercurial repositories"""
40 def supports(self, url, ud, d):
41 """
42 Check to see if a given url can be fetched with mercurial.
43 """
44 return ud.type in ['hg']
45
46 def forcefetch(self, url, ud, d):
47 revTag = ud.parm.get('rev', 'tip')
48 return revTag == "tip"
49
50 def localpath(self, url, ud, d):
51 if not "module" in ud.parm:
52 raise MissingParameterError("hg method needs a 'module' parameter")
53
54 ud.module = ud.parm["module"]
55
56 # Create paths to mercurial checkouts
57 relpath = self._strip_leading_slashes(ud.path)
58 ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath)
59 ud.moddir = os.path.join(ud.pkgdir, ud.module)
60
61 if 'rev' in ud.parm:
62 ud.revision = ud.parm['rev']
63 else:
64 tag = Fetch.srcrev_internal_helper(ud, d)
65 if tag is True:
66 ud.revision = self.latest_revision(url, ud, d)
67 elif tag:
68 ud.revision = tag
69 else:
70 ud.revision = self.latest_revision(url, ud, d)
71
72 ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
73
74 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
75
76 def _buildhgcommand(self, ud, d, command):
77 """
78 Build up an hg commandline based on ud
79 command is "fetch", "update", "info"
80 """
81
82 basecmd = data.expand('${FETCHCMD_hg}', d)
83
84 proto = ud.parm.get('proto', 'http')
85
86 host = ud.host
87 if proto == "file":
88 host = "/"
89 ud.host = "localhost"
90
91 if not ud.user:
92 hgroot = host + ud.path
93 else:
94 hgroot = ud.user + "@" + host + ud.path
95
96 if command is "info":
97 return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module)
98
99 options = [];
100 if ud.revision:
101 options.append("-r %s" % ud.revision)
102
103 if command is "fetch":
104 cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
105 elif command is "pull":
106 # do not pass options list; limiting pull to rev causes the local
107 # repo not to contain it and immediately following "update" command
108 # will crash
109 cmd = "%s pull" % (basecmd)
110 elif command is "update":
111 cmd = "%s update -C %s" % (basecmd, " ".join(options))
112 else:
113 raise FetchError("Invalid hg command %s" % command)
114
115 return cmd
116
117 def go(self, loc, ud, d):
118 """Fetch url"""
119
120 logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
121
122 if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
123 updatecmd = self._buildhgcommand(ud, d, "pull")
124 logger.info("Update " + loc)
125 # update sources there
126 os.chdir(ud.moddir)
127 logger.debug(1, "Running %s", updatecmd)
128 runfetchcmd(updatecmd, d)
129
130 else:
131 fetchcmd = self._buildhgcommand(ud, d, "fetch")
132 logger.info("Fetch " + loc)
133 # check out sources there
134 bb.mkdirhier(ud.pkgdir)
135 os.chdir(ud.pkgdir)
136 logger.debug(1, "Running %s", fetchcmd)
137 runfetchcmd(fetchcmd, d)
138
139 # Even when we clone (fetch), we still need to update as hg's clone
140 # won't checkout the specified revision if its on a branch
141 updatecmd = self._buildhgcommand(ud, d, "update")
142 os.chdir(ud.moddir)
143 logger.debug(1, "Running %s", updatecmd)
144 runfetchcmd(updatecmd, d)
145
146 scmdata = ud.parm.get("scmdata", "")
147 if scmdata == "keep":
148 tar_flags = ""
149 else:
150 tar_flags = "--exclude '.hg' --exclude '.hgrags'"
151
152 os.chdir(ud.pkgdir)
153 try:
154 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d)
155 except:
156 t, v, tb = sys.exc_info()
157 try:
158 os.unlink(ud.localpath)
159 except OSError:
160 pass
161 raise t, v, tb
162
163 def supports_srcrev(self):
164 return True
165
166 def _latest_revision(self, url, ud, d):
167 """
168 Compute tip revision for the url
169 """
170 output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d)
171 return output.strip()
172
173 def _build_revision(self, url, ud, d):
174 return ud.revision
175
176 def _revision_key(self, url, ud, d):
177 """
178 Return a unique key for the url
179 """
180 return "hg:" + ud.moddir
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py
new file mode 100644
index 0000000000..6aa9e45768
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/local.py
@@ -0,0 +1,73 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28import os
29import bb
30import bb.utils
31from bb import data
32from bb.fetch import Fetch
33
34class Local(Fetch):
35 def supports(self, url, urldata, d):
36 """
37 Check to see if a given url represents a local fetch.
38 """
39 return urldata.type in ['file']
40
41 def localpath(self, url, urldata, d):
42 """
43 Return the local filename of a given url assuming a successful fetch.
44 """
45 path = url.split("://")[1]
46 path = path.split(";")[0]
47 newpath = path
48 if path[0] != "/":
49 filespath = data.getVar('FILESPATH', d, 1)
50 if filespath:
51 newpath = bb.utils.which(filespath, path)
52 if not newpath:
53 filesdir = data.getVar('FILESDIR', d, 1)
54 if filesdir:
55 newpath = os.path.join(filesdir, path)
56 # We don't set localfile as for this fetcher the file is already local!
57 return newpath
58
59 def go(self, url, urldata, d):
60 """Fetch urls (no-op for Local method)"""
61 # no need to fetch local files, we'll deal with them in place.
62 return 1
63
64 def checkstatus(self, url, urldata, d):
65 """
66 Check the status of the url
67 """
68 if urldata.localpath.find("*") != -1:
69 logger.info("URL %s looks like a glob and was therefore not checked.", url)
70 return True
71 if os.path.exists(urldata.localpath):
72 return True
73 return False
diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py
new file mode 100644
index 0000000000..26820967a3
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/osc.py
@@ -0,0 +1,143 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4Bitbake "Fetch" implementation for osc (Opensuse build service client).
5Based on the svn "Fetch" implementation.
6
7"""
8
9import os
10import sys
11import logging
12import bb
13from bb import data
14from bb.fetch import Fetch
15from bb.fetch import FetchError
16from bb.fetch import MissingParameterError
17from bb.fetch import runfetchcmd
18
19class Osc(Fetch):
20 """Class to fetch a module or modules from Opensuse build server
21 repositories."""
22
23 def supports(self, url, ud, d):
24 """
25 Check to see if a given url can be fetched with osc.
26 """
27 return ud.type in ['osc']
28
29 def localpath(self, url, ud, d):
30 if not "module" in ud.parm:
31 raise MissingParameterError("osc method needs a 'module' parameter.")
32
33 ud.module = ud.parm["module"]
34
35 # Create paths to osc checkouts
36 relpath = self._strip_leading_slashes(ud.path)
37 ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host)
38 ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
39
40 if 'rev' in ud.parm:
41 ud.revision = ud.parm['rev']
42 else:
43 pv = data.getVar("PV", d, 0)
44 rev = Fetch.srcrev_internal_helper(ud, d)
45 if rev and rev != True:
46 ud.revision = rev
47 else:
48 ud.revision = ""
49
50 ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d)
51
52 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
53
54 def _buildosccommand(self, ud, d, command):
55 """
56 Build up an ocs commandline based on ud
57 command is "fetch", "update", "info"
58 """
59
60 basecmd = data.expand('${FETCHCMD_osc}', d)
61
62 proto = ud.parm.get('proto', 'ocs')
63
64 options = []
65
66 config = "-c %s" % self.generate_config(ud, d)
67
68 if ud.revision:
69 options.append("-r %s" % ud.revision)
70
71 coroot = self._strip_leading_slashes(ud.path)
72
73 if command is "fetch":
74 osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options))
75 elif command is "update":
76 osccmd = "%s %s up %s" % (basecmd, config, " ".join(options))
77 else:
78 raise FetchError("Invalid osc command %s" % command)
79
80 return osccmd
81
82 def go(self, loc, ud, d):
83 """
84 Fetch url
85 """
86
87 logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
88
89 if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
90 oscupdatecmd = self._buildosccommand(ud, d, "update")
91 logger.info("Update "+ loc)
92 # update sources there
93 os.chdir(ud.moddir)
94 logger.debug(1, "Running %s", oscupdatecmd)
95 runfetchcmd(oscupdatecmd, d)
96 else:
97 oscfetchcmd = self._buildosccommand(ud, d, "fetch")
98 logger.info("Fetch " + loc)
99 # check out sources there
100 bb.mkdirhier(ud.pkgdir)
101 os.chdir(ud.pkgdir)
102 logger.debug(1, "Running %s", oscfetchcmd)
103 runfetchcmd(oscfetchcmd, d)
104
105 os.chdir(os.path.join(ud.pkgdir + ud.path))
106 # tar them up to a defined filename
107 try:
108 runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d)
109 except:
110 t, v, tb = sys.exc_info()
111 try:
112 os.unlink(ud.localpath)
113 except OSError:
114 pass
115 raise t, v, tb
116
117 def supports_srcrev(self):
118 return False
119
120 def generate_config(self, ud, d):
121 """
122 Generate a .oscrc to be used for this run.
123 """
124
125 config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc")
126 if (os.path.exists(config_path)):
127 os.remove(config_path)
128
129 f = open(config_path, 'w')
130 f.write("[general]\n")
131 f.write("apisrv = %s\n" % ud.host)
132 f.write("scheme = http\n")
133 f.write("su-wrapper = su -c\n")
134 f.write("build-root = %s\n" % data.expand('${WORKDIR}', d))
135 f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n")
136 f.write("extra-pkgs = gzip\n")
137 f.write("\n")
138 f.write("[%s]\n" % ud.host)
139 f.write("user = %s\n" % ud.parm["user"])
140 f.write("pass = %s\n" % ud.parm["pswd"])
141 f.close()
142
143 return config_path
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py
new file mode 100644
index 0000000000..222ed7eaaa
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/perforce.py
@@ -0,0 +1,206 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28from future_builtins import zip
29import os
30import logging
31import bb
32from bb import data
33from bb.fetch import Fetch
34from bb.fetch import FetchError
35from bb.fetch import logger
36
37class Perforce(Fetch):
38 def supports(self, url, ud, d):
39 return ud.type in ['p4']
40
41 def doparse(url, d):
42 parm = {}
43 path = url.split("://")[1]
44 delim = path.find("@");
45 if delim != -1:
46 (user, pswd, host, port) = path.split('@')[0].split(":")
47 path = path.split('@')[1]
48 else:
49 (host, port) = data.getVar('P4PORT', d).split(':')
50 user = ""
51 pswd = ""
52
53 if path.find(";") != -1:
54 keys=[]
55 values=[]
56 plist = path.split(';')
57 for item in plist:
58 if item.count('='):
59 (key, value) = item.split('=')
60 keys.append(key)
61 values.append(value)
62
63 parm = dict(zip(keys, values))
64 path = "//" + path.split(';')[0]
65 host += ":%s" % (port)
66 parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
67
68 return host, path, user, pswd, parm
69 doparse = staticmethod(doparse)
70
71 def getcset(d, depot, host, user, pswd, parm):
72 p4opt = ""
73 if "cset" in parm:
74 return parm["cset"];
75 if user:
76 p4opt += " -u %s" % (user)
77 if pswd:
78 p4opt += " -P %s" % (pswd)
79 if host:
80 p4opt += " -p %s" % (host)
81
82 p4date = data.getVar("P4DATE", d, 1)
83 if "revision" in parm:
84 depot += "#%s" % (parm["revision"])
85 elif "label" in parm:
86 depot += "@%s" % (parm["label"])
87 elif p4date:
88 depot += "@%s" % (p4date)
89
90 p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1)
91 logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
92 p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
93 cset = p4file.readline().strip()
94 logger.debug(1, "READ %s", cset)
95 if not cset:
96 return -1
97
98 return cset.split(' ')[1]
99 getcset = staticmethod(getcset)
100
101 def localpath(self, url, ud, d):
102
103 (host, path, user, pswd, parm) = Perforce.doparse(url, d)
104
105 # If a label is specified, we use that as our filename
106
107 if "label" in parm:
108 ud.localfile = "%s.tar.gz" % (parm["label"])
109 return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile)
110
111 base = path
112 which = path.find('/...')
113 if which != -1:
114 base = path[:which]
115
116 base = self._strip_leading_slashes(base)
117
118 cset = Perforce.getcset(d, path, host, user, pswd, parm)
119
120 ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d)
121
122 return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile)
123
124 def go(self, loc, ud, d):
125 """
126 Fetch urls
127 """
128
129 (host, depot, user, pswd, parm) = Perforce.doparse(loc, d)
130
131 if depot.find('/...') != -1:
132 path = depot[:depot.find('/...')]
133 else:
134 path = depot
135
136 module = parm.get('module', os.path.basename(path))
137
138 localdata = data.createCopy(d)
139 data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata)
140 data.update_data(localdata)
141
142 # Get the p4 command
143 p4opt = ""
144 if user:
145 p4opt += " -u %s" % (user)
146
147 if pswd:
148 p4opt += " -P %s" % (pswd)
149
150 if host:
151 p4opt += " -p %s" % (host)
152
153 p4cmd = data.getVar('FETCHCOMMAND', localdata, 1)
154
155 # create temp directory
156 logger.debug(2, "Fetch: creating temporary directory")
157 bb.mkdirhier(data.expand('${WORKDIR}', localdata))
158 data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
159 tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
160 tmpfile = tmppipe.readline().strip()
161 if not tmpfile:
162 logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
163 raise FetchError(module)
164
165 if "label" in parm:
166 depot = "%s@%s" % (depot, parm["label"])
167 else:
168 cset = Perforce.getcset(d, depot, host, user, pswd, parm)
169 depot = "%s@%s" % (depot, cset)
170
171 os.chdir(tmpfile)
172 logger.info("Fetch " + loc)
173 logger.info("%s%s files %s", p4cmd, p4opt, depot)
174 p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot))
175
176 if not p4file:
177 logger.error("Fetch: unable to get the P4 files from %s", depot)
178 raise FetchError(module)
179
180 count = 0
181
182 for file in p4file:
183 list = file.split()
184
185 if list[2] == "delete":
186 continue
187
188 dest = list[0][len(path)+1:]
189 where = dest.find("#")
190
191 os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]))
192 count = count + 1
193
194 if count == 0:
195 logger.error("Fetch: No files gathered from the P4 fetch")
196 raise FetchError(module)
197
198 myret = os.system("tar -czf %s %s" % (ud.localpath, module))
199 if myret != 0:
200 try:
201 os.unlink(ud.localpath)
202 except OSError:
203 pass
204 raise FetchError(module)
205 # cleanup
206 bb.utils.prunedir(tmpfile)
diff --git a/bitbake/lib/bb/fetch2/repo.py b/bitbake/lib/bb/fetch2/repo.py
new file mode 100644
index 0000000000..03642e7a0d
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/repo.py
@@ -0,0 +1,98 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake "Fetch" repo (git) implementation
5
6"""
7
8# Copyright (C) 2009 Tom Rini <trini@embeddedalley.com>
9#
10# Based on git.py which is:
11#Copyright (C) 2005 Richard Purdie
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25
26import os
27import bb
28from bb import data
29from bb.fetch import Fetch
30from bb.fetch import runfetchcmd
31
32class Repo(Fetch):
33 """Class to fetch a module or modules from repo (git) repositories"""
34 def supports(self, url, ud, d):
35 """
36 Check to see if a given url can be fetched with repo.
37 """
38 return ud.type in ["repo"]
39
40 def localpath(self, url, ud, d):
41 """
42 We don"t care about the git rev of the manifests repository, but
43 we do care about the manifest to use. The default is "default".
44 We also care about the branch or tag to be used. The default is
45 "master".
46 """
47
48 ud.proto = ud.parm.get('protocol', 'git')
49 ud.branch = ud.parm.get('branch', 'master')
50 ud.manifest = ud.parm.get('manifest', 'default.xml')
51 if not ud.manifest.endswith('.xml'):
52 ud.manifest += '.xml'
53
54 ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d)
55
56 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
57
58 def go(self, loc, ud, d):
59 """Fetch url"""
60
61 if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
62 logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
63 return
64
65 gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
66 repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo")
67 codir = os.path.join(repodir, gitsrcname, ud.manifest)
68
69 if ud.user:
70 username = ud.user + "@"
71 else:
72 username = ""
73
74 bb.mkdirhier(os.path.join(codir, "repo"))
75 os.chdir(os.path.join(codir, "repo"))
76 if not os.path.exists(os.path.join(codir, "repo", ".repo")):
77 runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d)
78
79 runfetchcmd("repo sync", d)
80 os.chdir(codir)
81
82 scmdata = ud.parm.get("scmdata", "")
83 if scmdata == "keep":
84 tar_flags = ""
85 else:
86 tar_flags = "--exclude '.repo' --exclude '.git'"
87
88 # Create a cache
89 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
90
91 def supports_srcrev(self):
92 return False
93
94 def _build_revision(self, url, ud, d):
95 return ud.manifest
96
97 def _want_sortable_revision(self, url, ud, d):
98 return False
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py
new file mode 100644
index 0000000000..86c76f4e44
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/ssh.py
@@ -0,0 +1,118 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3'''
4BitBake 'Fetch' implementations
5
6This implementation is for Secure Shell (SSH), and attempts to comply with the
7IETF secsh internet draft:
8 http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/
9
10 Currently does not support the sftp parameters, as this uses scp
11 Also does not support the 'fingerprint' connection parameter.
12
13'''
14
15# Copyright (C) 2006 OpenedHand Ltd.
16#
17#
18# Based in part on svk.py:
19# Copyright (C) 2006 Holger Hans Peter Freyther
20# Based on svn.py:
21# Copyright (C) 2003, 2004 Chris Larson
22# Based on functions from the base bb module:
23# Copyright 2003 Holger Schurig
24#
25#
26# This program is free software; you can redistribute it and/or modify
27# it under the terms of the GNU General Public License version 2 as
28# published by the Free Software Foundation.
29#
30# This program is distributed in the hope that it will be useful,
31# but WITHOUT ANY WARRANTY; without even the implied warranty of
32# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
33# GNU General Public License for more details.
34#
35# You should have received a copy of the GNU General Public License along
36# with this program; if not, write to the Free Software Foundation, Inc.,
37# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
38
39import re, os
40from bb import data
41from bb.fetch import Fetch
42from bb.fetch import FetchError
43
44
45__pattern__ = re.compile(r'''
46 \s* # Skip leading whitespace
47 ssh:// # scheme
48 ( # Optional username/password block
49 (?P<user>\S+) # username
50 (:(?P<pass>\S+))? # colon followed by the password (optional)
51 )?
52 (?P<cparam>(;[^;]+)*)? # connection parameters block (optional)
53 @
54 (?P<host>\S+?) # non-greedy match of the host
55 (:(?P<port>[0-9]+))? # colon followed by the port (optional)
56 /
57 (?P<path>[^;]+) # path on the remote system, may be absolute or relative,
58 # and may include the use of '~' to reference the remote home
59 # directory
60 (?P<sparam>(;[^;]+)*)? # parameters block (optional)
61 $
62''', re.VERBOSE)
63
64class SSH(Fetch):
65 '''Class to fetch a module or modules via Secure Shell'''
66
67 def supports(self, url, urldata, d):
68 return __pattern__.match(url) != None
69
70 def localpath(self, url, urldata, d):
71 m = __pattern__.match(url)
72 path = m.group('path')
73 host = m.group('host')
74 lpath = os.path.join(data.getVar('DL_DIR', d, True), host, os.path.basename(path))
75 return lpath
76
77 def go(self, url, urldata, d):
78 dldir = data.getVar('DL_DIR', d, 1)
79
80 m = __pattern__.match(url)
81 path = m.group('path')
82 host = m.group('host')
83 port = m.group('port')
84 user = m.group('user')
85 password = m.group('pass')
86
87 ldir = os.path.join(dldir, host)
88 lpath = os.path.join(ldir, os.path.basename(path))
89
90 if not os.path.exists(ldir):
91 os.makedirs(ldir)
92
93 if port:
94 port = '-P %s' % port
95 else:
96 port = ''
97
98 if user:
99 fr = user
100 if password:
101 fr += ':%s' % password
102 fr += '@%s' % host
103 else:
104 fr = host
105 fr += ':%s' % path
106
107
108 import commands
109 cmd = 'scp -B -r %s %s %s/' % (
110 port,
111 commands.mkarg(fr),
112 commands.mkarg(ldir)
113 )
114
115 (exitstatus, output) = commands.getstatusoutput(cmd)
116 if exitstatus != 0:
117 print(output)
118 raise FetchError('Unable to fetch %s' % url)
diff --git a/bitbake/lib/bb/fetch2/svk.py b/bitbake/lib/bb/fetch2/svk.py
new file mode 100644
index 0000000000..595a9da255
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/svk.py
@@ -0,0 +1,104 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6This implementation is for svk. It is based on the svn implementation
7
8"""
9
10# Copyright (C) 2006 Holger Hans Peter Freyther
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28import os
29import logging
30import bb
31from bb import data
32from bb.fetch import Fetch
33from bb.fetch import FetchError
34from bb.fetch import MissingParameterError
35from bb.fetch import logger
36
37class Svk(Fetch):
38 """Class to fetch a module or modules from svk repositories"""
39 def supports(self, url, ud, d):
40 """
41 Check to see if a given url can be fetched with svk.
42 """
43 return ud.type in ['svk']
44
45 def localpath(self, url, ud, d):
46 if not "module" in ud.parm:
47 raise MissingParameterError("svk method needs a 'module' parameter")
48 else:
49 ud.module = ud.parm["module"]
50
51 ud.revision = ud.parm.get('rev', "")
52
53 ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
54
55 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
56
57 def forcefetch(self, url, ud, d):
58 return ud.date == "now"
59
60 def go(self, loc, ud, d):
61 """Fetch urls"""
62
63 svkroot = ud.host + ud.path
64
65 svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module)
66
67 if ud.revision:
68 svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module)
69
70 # create temp directory
71 localdata = data.createCopy(d)
72 data.update_data(localdata)
73 logger.debug(2, "Fetch: creating temporary directory")
74 bb.mkdirhier(data.expand('${WORKDIR}', localdata))
75 data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
76 tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
77 tmpfile = tmppipe.readline().strip()
78 if not tmpfile:
79 logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
80 raise FetchError(ud.module)
81
82 # check out sources there
83 os.chdir(tmpfile)
84 logger.info("Fetch " + loc)
85 logger.debug(1, "Running %s", svkcmd)
86 myret = os.system(svkcmd)
87 if myret != 0:
88 try:
89 os.rmdir(tmpfile)
90 except OSError:
91 pass
92 raise FetchError(ud.module)
93
94 os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module)))
95 # tar them up to a defined filename
96 myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)))
97 if myret != 0:
98 try:
99 os.unlink(ud.localpath)
100 except OSError:
101 pass
102 raise FetchError(ud.module)
103 # cleanup
104 bb.utils.prunedir(tmpfile)
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py
new file mode 100644
index 0000000000..8f053abf74
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/svn.py
@@ -0,0 +1,204 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementation for svn.
5
6"""
7
8# Copyright (C) 2003, 2004 Chris Larson
9# Copyright (C) 2004 Marcin Juszkiewicz
10#
11# This program is free software; you can redistribute it and/or modify
12# it under the terms of the GNU General Public License version 2 as
13# published by the Free Software Foundation.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License along
21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23#
24# Based on functions from the base bb module, Copyright 2003 Holger Schurig
25
26import os
27import sys
28import logging
29import bb
30from bb import data
31from bb.fetch import Fetch
32from bb.fetch import FetchError
33from bb.fetch import MissingParameterError
34from bb.fetch import runfetchcmd
35from bb.fetch import logger
36
37class Svn(Fetch):
38 """Class to fetch a module or modules from svn repositories"""
39 def supports(self, url, ud, d):
40 """
41 Check to see if a given url can be fetched with svn.
42 """
43 return ud.type in ['svn']
44
45 def localpath(self, url, ud, d):
46 if not "module" in ud.parm:
47 raise MissingParameterError("svn method needs a 'module' parameter")
48
49 ud.module = ud.parm["module"]
50
51 # Create paths to svn checkouts
52 relpath = self._strip_leading_slashes(ud.path)
53 ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
54 ud.moddir = os.path.join(ud.pkgdir, ud.module)
55
56 if 'rev' in ud.parm:
57 ud.date = ""
58 ud.revision = ud.parm['rev']
59 elif 'date' in ud.date:
60 ud.date = ud.parm['date']
61 ud.revision = ""
62 else:
63 #
64 # ***Nasty hack***
65 # If DATE in unexpanded PV, use ud.date (which is set from SRCDATE)
66 # Should warn people to switch to SRCREV here
67 #
68 pv = data.getVar("PV", d, 0)
69 if "DATE" in pv:
70 ud.revision = ""
71 else:
72 rev = Fetch.srcrev_internal_helper(ud, d)
73 if rev is True:
74 ud.revision = self.latest_revision(url, ud, d)
75 ud.date = ""
76 elif rev:
77 ud.revision = rev
78 ud.date = ""
79 else:
80 ud.revision = ""
81
82 ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
83
84 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
85
86 def _buildsvncommand(self, ud, d, command):
87 """
88 Build up an svn commandline based on ud
89 command is "fetch", "update", "info"
90 """
91
92 basecmd = data.expand('${FETCHCMD_svn}', d)
93
94 proto = ud.parm.get('proto', 'svn')
95
96 svn_rsh = None
97 if proto == "svn+ssh" and "rsh" in ud.parm:
98 svn_rsh = ud.parm["rsh"]
99
100 svnroot = ud.host + ud.path
101
102 # either use the revision, or SRCDATE in braces,
103 options = []
104
105 if ud.user:
106 options.append("--username %s" % ud.user)
107
108 if ud.pswd:
109 options.append("--password %s" % ud.pswd)
110
111 if command is "info":
112 svncmd = "%s info %s %s://%s/%s/" % (basecmd, " ".join(options), proto, svnroot, ud.module)
113 else:
114 suffix = ""
115 if ud.revision:
116 options.append("-r %s" % ud.revision)
117 suffix = "@%s" % (ud.revision)
118 elif ud.date:
119 options.append("-r {%s}" % ud.date)
120
121 if command is "fetch":
122 svncmd = "%s co %s %s://%s/%s%s %s" % (basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module)
123 elif command is "update":
124 svncmd = "%s update %s" % (basecmd, " ".join(options))
125 else:
126 raise FetchError("Invalid svn command %s" % command)
127
128 if svn_rsh:
129 svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
130
131 return svncmd
132
133 def go(self, loc, ud, d):
134 """Fetch url"""
135
136 logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
137
138 if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
139 svnupdatecmd = self._buildsvncommand(ud, d, "update")
140 logger.info("Update " + loc)
141 # update sources there
142 os.chdir(ud.moddir)
143 logger.debug(1, "Running %s", svnupdatecmd)
144 runfetchcmd(svnupdatecmd, d)
145 else:
146 svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
147 logger.info("Fetch " + loc)
148 # check out sources there
149 bb.mkdirhier(ud.pkgdir)
150 os.chdir(ud.pkgdir)
151 logger.debug(1, "Running %s", svnfetchcmd)
152 runfetchcmd(svnfetchcmd, d)
153
154 scmdata = ud.parm.get("scmdata", "")
155 if scmdata == "keep":
156 tar_flags = ""
157 else:
158 tar_flags = "--exclude '.svn'"
159
160 os.chdir(ud.pkgdir)
161 # tar them up to a defined filename
162 try:
163 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d)
164 except:
165 t, v, tb = sys.exc_info()
166 try:
167 os.unlink(ud.localpath)
168 except OSError:
169 pass
170 raise t, v, tb
171
172 def supports_srcrev(self):
173 return True
174
175 def _revision_key(self, url, ud, d):
176 """
177 Return a unique key for the url
178 """
179 return "svn:" + ud.moddir
180
181 def _latest_revision(self, url, ud, d):
182 """
183 Return the latest upstream revision number
184 """
185 logger.debug(2, "SVN fetcher hitting network for %s", url)
186
187 output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True)
188
189 revision = None
190 for line in output.splitlines():
191 if "Last Changed Rev" in line:
192 revision = line.split(":")[1].strip()
193
194 return revision
195
196 def _sortable_revision(self, url, ud, d):
197 """
198 Return a sortable revision number which in our case is the revision number
199 """
200
201 return self._build_revision(url, ud, d)
202
203 def _build_revision(self, url, ud, d):
204 return ud.revision
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
new file mode 100644
index 0000000000..4d4bdfd493
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/wget.py
@@ -0,0 +1,93 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28import os
29import logging
30import bb
31import urllib
32from bb import data
33from bb.fetch import Fetch, FetchError, encodeurl, decodeurl, logger, runfetchcmd
34
35class Wget(Fetch):
36 """Class to fetch urls via 'wget'"""
37 def supports(self, url, ud, d):
38 """
39 Check to see if a given url can be fetched with wget.
40 """
41 return ud.type in ['http', 'https', 'ftp']
42
43 def localpath(self, url, ud, d):
44
45 url = encodeurl([ud.type, ud.host, ud.path, ud.user, ud.pswd, {}])
46 ud.basename = os.path.basename(ud.path)
47 ud.localfile = data.expand(urllib.unquote(ud.basename), d)
48
49 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
50
51 def go(self, uri, ud, d, checkonly = False):
52 """Fetch urls"""
53
54 def fetch_uri(uri, ud, d):
55 if checkonly:
56 fetchcmd = data.getVar("CHECKCOMMAND", d, 1)
57 elif os.path.exists(ud.localpath):
58 # file exists, but we didnt complete it.. trying again..
59 fetchcmd = data.getVar("RESUMECOMMAND", d, 1)
60 else:
61 fetchcmd = data.getVar("FETCHCOMMAND", d, 1)
62
63 uri = uri.split(";")[0]
64 uri_decoded = list(decodeurl(uri))
65 uri_type = uri_decoded[0]
66 uri_host = uri_decoded[1]
67
68 fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
69 fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
70 logger.info("fetch " + uri)
71 logger.debug(2, "executing " + fetchcmd)
72 runfetchcmd(fetchcmd, d)
73
74 # Sanity check since wget can pretend it succeed when it didn't
75 # Also, this used to happen if sourceforge sent us to the mirror page
76 if not os.path.exists(ud.localpath) and not checkonly:
77 logger.debug(2, "The fetch command for %s returned success but %s doesn't exist?...", uri, ud.localpath)
78 return False
79
80 return True
81
82 localdata = data.createCopy(d)
83 data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata)
84 data.update_data(localdata)
85
86 if fetch_uri(uri, ud, localdata):
87 return True
88
89 raise FetchError(uri)
90
91
92 def checkstatus(self, uri, ud, d):
93 return self.go(uri, ud, d, True)