summaryrefslogtreecommitdiffstats
path: root/bitbake/lib
diff options
context:
space:
mode:
authorRichard Purdie <richard@openedhand.com>2007-08-03 13:40:52 +0000
committerRichard Purdie <richard@openedhand.com>2007-08-03 13:40:52 +0000
commitbfc70eb24e3ded25007811b1531673fa70b02401 (patch)
treefa4a29290d3178937fa085c147e8a51f815c6fdc /bitbake/lib
parent034bbb805be0002fe6d689abde19662868b57b2c (diff)
downloadpoky-bfc70eb24e3ded25007811b1531673fa70b02401.tar.gz
bitbake: Update along 1.8 branch
git-svn-id: https://svn.o-hand.com/repos/poky/trunk@2345 311d38ba-8fff-0310-9ca6-ca027cbcb966
Diffstat (limited to 'bitbake/lib')
-rw-r--r--bitbake/lib/bb/__init__.py2
-rw-r--r--bitbake/lib/bb/build.py4
-rw-r--r--bitbake/lib/bb/cooker.py4
-rw-r--r--bitbake/lib/bb/data.py40
-rw-r--r--bitbake/lib/bb/event.py2
-rw-r--r--bitbake/lib/bb/fetch/__init__.py301
-rw-r--r--bitbake/lib/bb/fetch/git.py70
-rw-r--r--bitbake/lib/bb/fetch/perforce.py2
-rw-r--r--bitbake/lib/bb/fetch/svn.py177
-rw-r--r--bitbake/lib/bb/msg.py1
-rw-r--r--bitbake/lib/bb/parse/parse_py/BBHandler.py6
-rw-r--r--bitbake/lib/bb/parse/parse_py/ConfHandler.py11
-rw-r--r--bitbake/lib/bb/persist_data.py94
13 files changed, 529 insertions, 185 deletions
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
index c12eda25f3..e601eda469 100644
--- a/bitbake/lib/bb/__init__.py
+++ b/bitbake/lib/bb/__init__.py
@@ -21,7 +21,7 @@
21# with this program; if not, write to the Free Software Foundation, Inc., 21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23 23
24__version__ = "1.8.5" 24__version__ = "1.8.7"
25 25
26__all__ = [ 26__all__ = [
27 27
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py
index 4aaa6b262c..bcbc55eea5 100644
--- a/bitbake/lib/bb/build.py
+++ b/bitbake/lib/bb/build.py
@@ -150,7 +150,7 @@ def exec_func_shell(func, d):
150 if bb.msg.debug_level['default'] > 0: f.write("set -x\n") 150 if bb.msg.debug_level['default'] > 0: f.write("set -x\n")
151 data.emit_env(f, d) 151 data.emit_env(f, d)
152 152
153 f.write("cd '%s'\n" % os.getcwd()) 153 f.write("cd %s\n" % os.getcwd())
154 if func: f.write("%s\n" % func) 154 if func: f.write("%s\n" % func)
155 f.close() 155 f.close()
156 os.chmod(runfile, 0775) 156 os.chmod(runfile, 0775)
@@ -189,7 +189,7 @@ def exec_func_shell(func, d):
189 else: 189 else:
190 maybe_fakeroot = '' 190 maybe_fakeroot = ''
191 lang_environment = "LC_ALL=C " 191 lang_environment = "LC_ALL=C "
192 ret = os.system('%s%ssh -e "%s"' % (lang_environment, maybe_fakeroot, runfile)) 192 ret = os.system('%s%ssh -e %s' % (lang_environment, maybe_fakeroot, runfile))
193 try: 193 try:
194 os.chdir(prevdir) 194 os.chdir(prevdir)
195 except: 195 except:
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index 77f71a7169..7db3529bb4 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -336,6 +336,10 @@ class BBCooker:
336 if bb.data.getVarFlag(var, 'handler', data): 336 if bb.data.getVarFlag(var, 'handler', data):
337 bb.event.register(var,bb.data.getVar(var, data)) 337 bb.event.register(var,bb.data.getVar(var, data))
338 338
339 bb.fetch.fetcher_init(self.configuration.data)
340
341 bb.event.fire(bb.event.ConfigParsed(self.configuration.data))
342
339 except IOError: 343 except IOError:
340 bb.msg.fatal(bb.msg.domain.Parsing, "Unable to open %s" % afile ) 344 bb.msg.fatal(bb.msg.domain.Parsing, "Unable to open %s" % afile )
341 except bb.parse.ParseError, details: 345 except bb.parse.ParseError, details:
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py
index 14f1d896d7..9782c9f546 100644
--- a/bitbake/lib/bb/data.py
+++ b/bitbake/lib/bb/data.py
@@ -337,6 +337,12 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
337 if getVarFlag(var, "python", d): 337 if getVarFlag(var, "python", d):
338 return 0 338 return 0
339 339
340 export = getVarFlag(var, "export", d)
341 unexport = getVarFlag(var, "unexport", d)
342 func = getVarFlag(var, "func", d)
343 if not all and not export and not unexport and not func:
344 return 0
345
340 try: 346 try:
341 if all: 347 if all:
342 oval = getVar(var, d, 0) 348 oval = getVar(var, d, 0)
@@ -362,28 +368,28 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
362 if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all: 368 if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all:
363 return 0 369 return 0
364 370
371 varExpanded = expand(var, d)
372
373 if unexport:
374 o.write('unset %s\n' % varExpanded)
375 return 1
376
365 val.rstrip() 377 val.rstrip()
366 if not val: 378 if not val:
367 return 0 379 return 0
368
369 varExpanded = expand(var, d)
370 380
371 if getVarFlag(var, "func", d): 381 if func:
372# NOTE: should probably check for unbalanced {} within the var 382 # NOTE: should probably check for unbalanced {} within the var
373 o.write("%s() {\n%s\n}\n" % (varExpanded, val)) 383 o.write("%s() {\n%s\n}\n" % (varExpanded, val))
374 else: 384 return 1
375 if getVarFlag(var, "unexport", d): 385
376 o.write('unset %s\n' % varExpanded) 386 if export:
377 return 1 387 o.write('export ')
378 if getVarFlag(var, "export", d): 388
379 o.write('export ') 389 # if we're going to output this within doublequotes,
380 else: 390 # to a shell, we need to escape the quotes in the var
381 if not all: 391 alter = re.sub('"', '\\"', val.strip())
382 return 0 392 o.write('%s="%s"\n' % (varExpanded, alter))
383# if we're going to output this within doublequotes,
384# to a shell, we need to escape the quotes in the var
385 alter = re.sub('"', '\\"', val.strip())
386 o.write('%s="%s"\n' % (varExpanded, alter))
387 return 1 393 return 1
388 394
389 395
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py
index cfbda3e9fc..7148a2b7d6 100644
--- a/bitbake/lib/bb/event.py
+++ b/bitbake/lib/bb/event.py
@@ -124,6 +124,8 @@ def getName(e):
124 else: 124 else:
125 return e.__name__ 125 return e.__name__
126 126
127class ConfigParsed(Event):
128 """Configuration Parsing Complete"""
127 129
128class PkgBase(Event): 130class PkgBase(Event):
129 """Base class for package events""" 131 """Base class for package events"""
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py
index 31a4adccb1..6ebf5a34ad 100644
--- a/bitbake/lib/bb/fetch/__init__.py
+++ b/bitbake/lib/bb/fetch/__init__.py
@@ -27,6 +27,12 @@ BitBake build tools.
27import os, re 27import os, re
28import bb 28import bb
29from bb import data 29from bb import data
30from bb import persist_data
31
32try:
33 import cPickle as pickle
34except ImportError:
35 import pickle
30 36
31class FetchError(Exception): 37class FetchError(Exception):
32 """Exception raised when a download fails""" 38 """Exception raised when a download fails"""
@@ -74,78 +80,193 @@ def uri_replace(uri, uri_find, uri_replace, d):
74 return bb.encodeurl(result_decoded) 80 return bb.encodeurl(result_decoded)
75 81
76methods = [] 82methods = []
77urldata = {}
78
79def init(urls = [], d = None):
80 if d == None:
81 bb.msg.debug(2, bb.msg.domain.Fetcher, "BUG init called with None as data object!!!")
82 return
83
84 for m in methods:
85 m.urls = []
86 83
87 for u in urls: 84def fetcher_init(d):
88 ud = initdata(u, d) 85 """
89 if ud.method: 86 Called to initilize the fetchers once the configuration data is known
90 ud.method.urls.append(u) 87 Calls before this must not hit the cache.
91 88 """
92def initdata(url, d): 89 pd = persist_data.PersistData(d)
93 fn = bb.data.getVar('FILE', d, 1) 90 # Clear any cached url data
94 if fn not in urldata: 91 pd.delDomain("BB_URLDATA")
95 urldata[fn] = {} 92 # When to drop SCM head revisions should be controled by user policy
96 if url not in urldata[fn]: 93 pd.delDomain("BB_URI_HEADREVS")
97 ud = FetchData() 94 # Make sure our domains exist
98 (ud.type, ud.host, ud.path, ud.user, ud.pswd, ud.parm) = bb.decodeurl(data.expand(url, d)) 95 pd.addDomain("BB_URLDATA")
99 ud.date = Fetch.getSRCDate(ud, d) 96 pd.addDomain("BB_URI_HEADREVS")
100 for m in methods: 97 pd.addDomain("BB_URI_LOCALCOUNT")
101 if m.supports(url, ud, d): 98
102 ud.localpath = m.localpath(url, ud, d) 99# Function call order is usually:
103 ud.md5 = ud.localpath + '.md5' 100# 1. init
104 # if user sets localpath for file, use it instead. 101# 2. go
105 if "localpath" in ud.parm: 102# 3. localpaths
106 ud.localpath = ud.parm["localpath"] 103# localpath can be called at any time
107 ud.method = m 104
108 break 105def init(urls, d, cache = True):
109 urldata[fn][url] = ud 106 urldata = {}
110 return urldata[fn][url] 107
111 108 if cache:
112def go(d): 109 urldata, pd, fn = getdata(d)
113 """Fetch all urls""" 110
111 for url in urls:
112 if url not in urldata:
113 ud = FetchData(url, d)
114 for m in methods:
115 if m.supports(url, ud, d):
116 ud.init(m, d)
117 ud.setup_localpath(d)
118 break
119 urldata[url] = ud
120
121 if cache:
122 pd.setValue("BB_URLDATA", fn, pickle.dumps(urldata, 0))
123
124 return urldata
125
126def getdata(d):
127 urldata = {}
114 fn = bb.data.getVar('FILE', d, 1) 128 fn = bb.data.getVar('FILE', d, 1)
115 for m in methods: 129 pd = persist_data.PersistData(d)
116 for u in m.urls: 130 encdata = pd.getValue("BB_URLDATA", fn)
117 ud = urldata[fn][u] 131 if encdata:
118 if ud.localfile and not m.forcefetch(u, ud, d) and os.path.exists(urldata[fn][u].md5): 132 urldata = pickle.loads(str(encdata))
119 # File already present along with md5 stamp file 133
120 # Touch md5 file to show activity 134 return urldata, pd, fn
121 os.utime(ud.md5, None) 135
122 continue 136def go(d, urldata = None):
123 # RP - is olddir needed? 137 """
124 # olddir = os.path.abspath(os.getcwd()) 138 Fetch all urls
125 m.go(u, ud , d) 139 """
126 # os.chdir(olddir) 140 if not urldata:
127 if ud.localfile and not m.forcefetch(u, ud, d): 141 urldata, pd, fn = getdata(d)
128 Fetch.write_md5sum(u, ud, d) 142
129 143 for u in urldata:
130def localpaths(d): 144 ud = urldata[u]
131 """Return a list of the local filenames, assuming successful fetch""" 145 m = ud.method
146 if ud.localfile and not m.forcefetch(u, ud, d) and os.path.exists(ud.md5):
147 # File already present along with md5 stamp file
148 # Touch md5 file to show activity
149 os.utime(ud.md5, None)
150 continue
151 m.go(u, ud, d)
152 if ud.localfile and not m.forcefetch(u, ud, d):
153 Fetch.write_md5sum(u, ud, d)
154
155def localpaths(d, urldata = None):
156 """
157 Return a list of the local filenames, assuming successful fetch
158 """
132 local = [] 159 local = []
133 fn = bb.data.getVar('FILE', d, 1) 160 if not urldata:
134 for m in methods: 161 urldata, pd, fn = getdata(d)
135 for u in m.urls: 162
136 local.append(urldata[fn][u].localpath) 163 for u in urldata:
164 ud = urldata[u]
165 local.append(ud.localpath)
166
137 return local 167 return local
138 168
139def localpath(url, d): 169def get_srcrev(d):
140 ud = initdata(url, d) 170 """
141 if ud.method: 171 Return the version string for the current package
142 return ud.localpath 172 (usually to be used as PV)
173 Most packages usually only have one SCM so we just pass on the call.
174 In the multi SCM case, we build a value based on SRCREV_FORMAT which must
175 have been set.
176 """
177 scms = []
178 urldata, pd, fn = getdata(d)
179 if len(urldata) == 0:
180 src_uri = bb.data.getVar('SRC_URI', d, 1).split()
181 for url in src_uri:
182 if url not in urldata:
183 ud = FetchData(url, d)
184 for m in methods:
185 if m.supports(url, ud, d):
186 ud.init(m, d)
187 break
188 urldata[url] = ud
189 if ud.method.suppports_srcrev():
190 scms.append(url)
191 ud.setup_localpath(d)
192 else:
193 for u in urldata:
194 ud = urldata[u]
195 if ud.method.suppports_srcrev():
196 scms.append(u)
197
198 if len(scms) == 0:
199 bb.msg.error(bb.msg.domain.Fetcher, "SRCREV was used yet no valid SCM was found in SRC_URI")
200 raise ParameterError
201
202 if len(scms) == 1:
203 return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d)
204
205 bb.msg.error(bb.msg.domain.Fetcher, "Sorry, support for SRCREV_FORMAT still needs to be written")
206 raise ParameterError
207
208def localpath(url, d, cache = True):
209 """
210 Called from the parser with cache=False since the cache isn't ready
211 at this point. Also called from classed in OE e.g. patch.bbclass
212 """
213 ud = init([url], d, cache)
214 if ud[url].method:
215 return ud[url].localpath
143 return url 216 return url
144 217
218def runfetchcmd(cmd, d, quiet = False):
219 """
220 Run cmd returning the command output
221 Raise an error if interrupted or cmd fails
222 Optionally echo command output to stdout
223 """
224 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd)
225
226 # Need to export PATH as binary could be in metadata paths
227 # rather than host provided
228 pathcmd = 'export PATH=%s; %s' % (data.expand('${PATH}', d), cmd)
229
230 stdout_handle = os.popen(pathcmd, "r")
231 output = ""
232
233 while 1:
234 line = stdout_handle.readline()
235 if not line:
236 break
237 if not quiet:
238 print line
239 output += line
240
241 status = stdout_handle.close() or 0
242 signal = status >> 8
243 exitstatus = status & 0xff
244
245 if signal:
246 raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (pathcmd, signal, output))
247 elif status != 0:
248 raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (pathcmd, status, output))
249
250 return output
251
145class FetchData(object): 252class FetchData(object):
146 """Class for fetcher variable store""" 253 """Class for fetcher variable store"""
147 def __init__(self): 254 def __init__(self, url, d):
148 self.localfile = "" 255 self.localfile = ""
256 (self.type, self.host, self.path, self.user, self.pswd, self.parm) = bb.decodeurl(data.expand(url, d))
257 self.date = Fetch.getSRCDate(self, d)
258 self.url = url
259
260 def init(self, method, d):
261 self.method = method
262
263 def setup_localpath(self, d):
264 if "localpath" in self.parm:
265 self.localpath = self.parm["localpath"]
266 else:
267 self.localpath = self.method.localpath(self.url, self, d)
268 self.md5 = self.localpath + '.md5'
269 # if user sets localpath for file, use it instead.
149 270
150 271
151class Fetch(object): 272class Fetch(object):
@@ -182,6 +303,12 @@ class Fetch(object):
182 """ 303 """
183 return False 304 return False
184 305
306 def suppports_srcrev(self):
307 """
308 The fetcher supports auto source revisions (SRCREV)
309 """
310 return False
311
185 def go(self, url, urldata, d): 312 def go(self, url, urldata, d):
186 """ 313 """
187 Fetch urls 314 Fetch urls
@@ -269,6 +396,50 @@ class Fetch(object):
269 md5out.close() 396 md5out.close()
270 write_md5sum = staticmethod(write_md5sum) 397 write_md5sum = staticmethod(write_md5sum)
271 398
399 def latest_revision(self, url, ud, d):
400 """
401 Look in the cache for the latest revision, if not present ask the SCM.
402 """
403 if not hasattr(self, "_latest_revision"):
404 raise ParameterError
405
406 pd = persist_data.PersistData(d)
407 key = self._revision_key(url, ud, d)
408 rev = pd.getValue("BB_URI_HEADREVS", key)
409 if rev != None:
410 return str(rev)
411
412 rev = self._latest_revision(url, ud, d)
413 pd.setValue("BB_URI_HEADREVS", key, rev)
414 return rev
415
416 def sortable_revision(self, url, ud, d):
417 """
418
419 """
420 if hasattr(self, "_sortable_revision"):
421 return self._sortable_revision(url, ud, d)
422
423 pd = persist_data.PersistData(d)
424 key = self._revision_key(url, ud, d)
425 latest_rev = self.latest_revision(url, ud, d)
426 last_rev = pd.getValue("BB_URI_LOCALCOUNT", key + "_rev")
427 count = pd.getValue("BB_URI_LOCALCOUNT", key + "_count")
428
429 if last_rev == latest_rev:
430 return str(count + "+" + latest_rev)
431
432 if count is None:
433 count = "0"
434 else:
435 count = str(int(count) + 1)
436
437 pd.setValue("BB_URI_LOCALCOUNT", key + "_rev", latest_rev)
438 pd.setValue("BB_URI_LOCALCOUNT", key + "_count", count)
439
440 return str(count + "+" + latest_rev)
441
442
272import cvs 443import cvs
273import git 444import git
274import local 445import local
@@ -278,11 +449,11 @@ import svk
278import ssh 449import ssh
279import perforce 450import perforce
280 451
281methods.append(cvs.Cvs())
282methods.append(git.Git())
283methods.append(local.Local()) 452methods.append(local.Local())
284methods.append(svn.Svn())
285methods.append(wget.Wget()) 453methods.append(wget.Wget())
454methods.append(svn.Svn())
455methods.append(git.Git())
456methods.append(cvs.Cvs())
286methods.append(svk.Svk()) 457methods.append(svk.Svk())
287methods.append(ssh.SSH()) 458methods.append(ssh.SSH())
288methods.append(perforce.Perforce()) 459methods.append(perforce.Perforce())
diff --git a/bitbake/lib/bb/fetch/git.py b/bitbake/lib/bb/fetch/git.py
index c0cd27df09..7d55ee9138 100644
--- a/bitbake/lib/bb/fetch/git.py
+++ b/bitbake/lib/bb/fetch/git.py
@@ -25,6 +25,7 @@ import bb
25from bb import data 25from bb import data
26from bb.fetch import Fetch 26from bb.fetch import Fetch
27from bb.fetch import FetchError 27from bb.fetch import FetchError
28from bb.fetch import runfetchcmd
28 29
29def prunedir(topdir): 30def prunedir(topdir):
30 # Delete everything reachable from the directory named in 'topdir'. 31 # Delete everything reachable from the directory named in 'topdir'.
@@ -35,19 +36,6 @@ def prunedir(topdir):
35 for name in dirs: 36 for name in dirs:
36 os.rmdir(os.path.join(root, name)) 37 os.rmdir(os.path.join(root, name))
37 38
38def rungitcmd(cmd,d):
39
40 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd)
41
42 # Need to export PATH as git is likely to be in metadata paths
43 # rather than host provided
44 pathcmd = 'export PATH=%s; %s' % (data.expand('${PATH}', d), cmd)
45
46 myret = os.system(pathcmd)
47
48 if myret != 0:
49 raise FetchError("Git: %s failed" % pathcmd)
50
51class Git(Fetch): 39class Git(Fetch):
52 """Class to fetch a module or modules from git repositories""" 40 """Class to fetch a module or modules from git repositories"""
53 def supports(self, url, ud, d): 41 def supports(self, url, ud, d):
@@ -62,24 +50,22 @@ class Git(Fetch):
62 if 'protocol' in ud.parm: 50 if 'protocol' in ud.parm:
63 ud.proto = ud.parm['protocol'] 51 ud.proto = ud.parm['protocol']
64 52
65 ud.tag = "master" 53 tag = data.getVar("SRCREV", d, 0)
66 if 'tag' in ud.parm: 54 if 'tag' in ud.parm:
67 ud.tag = ud.parm['tag'] 55 ud.tag = ud.parm['tag']
56 elif tag and "get_srcrev" not in tag and len(tag) == 40:
57 ud.tag = tag
58 else:
59 ud.tag = self.latest_revision(url, ud, d)
68 60
69 ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.tag), d) 61 ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.tag), d)
70 62
71 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) 63 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
72 64
73 def forcefetch(self, url, ud, d):
74 # tag=="master" must always update
75 if (ud.tag == "master"):
76 return True
77 return False
78
79 def go(self, loc, ud, d): 65 def go(self, loc, ud, d):
80 """Fetch url""" 66 """Fetch url"""
81 67
82 if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile): 68 if Fetch.try_mirror(d, ud.localfile):
83 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % ud.localpath) 69 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % ud.localpath)
84 return 70 return
85 71
@@ -96,32 +82,50 @@ class Git(Fetch):
96 if Fetch.try_mirror(d, repofilename): 82 if Fetch.try_mirror(d, repofilename):
97 bb.mkdirhier(repodir) 83 bb.mkdirhier(repodir)
98 os.chdir(repodir) 84 os.chdir(repodir)
99 rungitcmd("tar -xzf %s" % (repofile),d) 85 runfetchcmd("tar -xzf %s" % (repofile), d)
100 else: 86 else:
101 rungitcmd("git clone -n %s://%s%s %s" % (ud.proto, ud.host, ud.path, repodir),d) 87 runfetchcmd("git clone -n %s://%s%s %s" % (ud.proto, ud.host, ud.path, repodir), d)
102 88
103 os.chdir(repodir) 89 os.chdir(repodir)
104 rungitcmd("git pull %s://%s%s" % (ud.proto, ud.host, ud.path),d)
105 rungitcmd("git pull --tags %s://%s%s" % (ud.proto, ud.host, ud.path),d)
106 rungitcmd("git prune-packed", d)
107 rungitcmd("git pack-redundant --all | xargs -r rm", d)
108 # Remove all but the .git directory 90 # Remove all but the .git directory
109 rungitcmd("rm * -Rf", d) 91 runfetchcmd("rm * -Rf", d)
92 runfetchcmd("git pull %s://%s%s" % (ud.proto, ud.host, ud.path), d)
93 runfetchcmd("git pull --tags %s://%s%s" % (ud.proto, ud.host, ud.path), d)
94 runfetchcmd("git prune-packed", d)
95 runfetchcmd("git pack-redundant --all | xargs -r rm", d)
110 # old method of downloading tags 96 # old method of downloading tags
111 #rungitcmd("rsync -a --verbose --stats --progress rsync://%s%s/ %s" % (ud.host, ud.path, os.path.join(repodir, ".git", "")),d) 97 #runfetchcmd("rsync -a --verbose --stats --progress rsync://%s%s/ %s" % (ud.host, ud.path, os.path.join(repodir, ".git", "")), d)
112 98
113 os.chdir(repodir) 99 os.chdir(repodir)
114 bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository") 100 bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository")
115 rungitcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ),d) 101 runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d)
116 102
117 if os.path.exists(codir): 103 if os.path.exists(codir):
118 prunedir(codir) 104 prunedir(codir)
119 105
120 bb.mkdirhier(codir) 106 bb.mkdirhier(codir)
121 os.chdir(repodir) 107 os.chdir(repodir)
122 rungitcmd("git read-tree %s" % (ud.tag),d) 108 runfetchcmd("git read-tree %s" % (ud.tag), d)
123 rungitcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")),d) 109 runfetchcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")), d)
124 110
125 os.chdir(codir) 111 os.chdir(codir)
126 bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout") 112 bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout")
127 rungitcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ),d) 113 runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d)
114
115 os.chdir(repodir)
116 prunedir(codir)
117
118 def suppports_srcrev(self):
119 return True
120
121 def _revision_key(self, url, ud, d):
122 """
123 Return a unique key for the url
124 """
125 return "git:" + ud.host + ud.path.replace('/', '.')
126
127 def _latest_revision(self, url, ud, d):
128
129 output = runfetchcmd("git ls-remote %s://%s%s" % (ud.proto, ud.host, ud.path), d, True)
130 return output.split()[0]
131
diff --git a/bitbake/lib/bb/fetch/perforce.py b/bitbake/lib/bb/fetch/perforce.py
index 125eb99aa6..97b618228b 100644
--- a/bitbake/lib/bb/fetch/perforce.py
+++ b/bitbake/lib/bb/fetch/perforce.py
@@ -125,7 +125,7 @@ class Perforce(Fetch):
125 """ 125 """
126 126
127 # try to use the tarball stash 127 # try to use the tarball stash
128 if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile): 128 if Fetch.try_mirror(d, ud.localfile):
129 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping perforce checkout." % ud.localpath) 129 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping perforce checkout." % ud.localpath)
130 return 130 return
131 131
diff --git a/bitbake/lib/bb/fetch/svn.py b/bitbake/lib/bb/fetch/svn.py
index 120f4f8539..ca12efe158 100644
--- a/bitbake/lib/bb/fetch/svn.py
+++ b/bitbake/lib/bb/fetch/svn.py
@@ -1,17 +1,12 @@
1# ex:ts=4:sw=4:sts=4:et 1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- 2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3""" 3"""
4BitBake 'Fetch' implementations 4BitBake 'Fetch' implementation for svn.
5
6This implementation is for svn. It is based on the cvs implementation.
7 5
8""" 6"""
9 7
10# Copyright (C) 2004 Marcin Juszkiewicz 8# Copyright (C) 2003, 2004 Chris Larson
11# 9# Copyright (C) 2004 Marcin Juszkiewicz
12# Classes for obtaining upstream sources for the
13# BitBake build tools.
14# Copyright (C) 2003, 2004 Chris Larson
15# 10#
16# This program is free software; you can redistribute it and/or modify 11# This program is free software; you can redistribute it and/or modify
17# it under the terms of the GNU General Public License version 2 as 12# it under the terms of the GNU General Public License version 2 as
@@ -35,6 +30,7 @@ from bb import data
35from bb.fetch import Fetch 30from bb.fetch import Fetch
36from bb.fetch import FetchError 31from bb.fetch import FetchError
37from bb.fetch import MissingParameterError 32from bb.fetch import MissingParameterError
33from bb.fetch import runfetchcmd
38 34
39class Svn(Fetch): 35class Svn(Fetch):
40 """Class to fetch a module or modules from svn repositories""" 36 """Class to fetch a module or modules from svn repositories"""
@@ -47,32 +43,54 @@ class Svn(Fetch):
47 def localpath(self, url, ud, d): 43 def localpath(self, url, ud, d):
48 if not "module" in ud.parm: 44 if not "module" in ud.parm:
49 raise MissingParameterError("svn method needs a 'module' parameter") 45 raise MissingParameterError("svn method needs a 'module' parameter")
50 else:
51 ud.module = ud.parm["module"]
52 46
53 ud.revision = "" 47 ud.module = ud.parm["module"]
54 if 'rev' in ud.parm: 48
55 ud.revision = ud.parm['rev'] 49 # Create paths to svn checkouts
50 relpath = ud.path
51 if relpath.startswith('/'):
52 # Remove leading slash as os.path.join can't cope
53 relpath = relpath[1:]
54 ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
55 ud.moddir = os.path.join(ud.pkgdir, ud.module)
56 56
57 if ud.revision: 57 if 'rev' in ud.parm:
58 ud.date = "" 58 ud.date = ""
59 ud.revision = ud.parm['rev']
60 elif 'date' in ud.date:
61 ud.date = ud.parm['date']
62 ud.revision = ""
63 else:
64 #
65 # ***Nasty hacks***
66 # If DATE in unexpanded PV, use ud.date (which is set from SRCDATE)
67 # Will warn people to switch to SRCREV here
68 #
69 # How can we tell when a user has overriden SRCDATE?
70 # check for "get_srcdate" in unexpanded SRCREV - ugly
71 #
72 pv = data.getVar("PV", d, 0)
73 if "DATE" in pv:
74 ud.revision = ""
75 else:
76 rev = data.getVar("SRCREV", d, 0)
77 if "get_srcrev" in rev:
78 ud.revision = self.latest_revision(url, ud, d)
79 else:
80 ud.revision = rev
81 ud.date = ""
59 82
60 ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d) 83 ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
61 84
62 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) 85 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
63 86
64 def forcefetch(self, url, ud, d): 87 def _buildsvncommand(self, ud, d, command):
65 if (ud.date == "now"): 88 """
66 return True 89 Build up an svn commandline based on ud
67 return False 90 command is "fetch", "update", "info"
68 91 """
69 def go(self, loc, ud, d):
70 """Fetch url"""
71 92
72 # try to use the tarball stash 93 basecmd = data.expand('${FETCHCMD_svn}', d)
73 if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile):
74 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping svn checkout." % ud.localpath)
75 return
76 94
77 proto = "svn" 95 proto = "svn"
78 if "proto" in ud.parm: 96 if "proto" in ud.parm:
@@ -84,12 +102,8 @@ class Svn(Fetch):
84 102
85 svnroot = ud.host + ud.path 103 svnroot = ud.host + ud.path
86 104
87 # either use the revision, or SRCDATE in braces, or nothing for SRCDATE = "now" 105 # either use the revision, or SRCDATE in braces,
88 options = [] 106 options = []
89 if ud.revision:
90 options.append("-r %s" % ud.revision)
91 elif ud.date != "now":
92 options.append("-r {%s}" % ud.date)
93 107
94 if ud.user: 108 if ud.user:
95 options.append("--username %s" % ud.user) 109 options.append("--username %s" % ud.user)
@@ -97,48 +111,93 @@ class Svn(Fetch):
97 if ud.pswd: 111 if ud.pswd:
98 options.append("--password %s" % ud.pswd) 112 options.append("--password %s" % ud.pswd)
99 113
100 localdata = data.createCopy(d) 114 if command is "info":
101 data.setVar('OVERRIDES', "svn:%s" % data.getVar('OVERRIDES', localdata), localdata) 115 svncmd = "%s info %s %s://%s/%s/" % (basecmd, " ".join(options), proto, svnroot, ud.module)
102 data.update_data(localdata) 116 else:
103 117 if ud.revision:
104 data.setVar('SVNROOT', "%s://%s/%s" % (proto, svnroot, ud.module), localdata) 118 options.append("-r %s" % ud.revision)
105 data.setVar('SVNCOOPTS', " ".join(options), localdata) 119 elif ud.date:
106 data.setVar('SVNMODULE', ud.module, localdata) 120 options.append("-r {%s}" % ud.date)
107 svncmd = data.getVar('FETCHCOMMAND', localdata, 1) 121
108 svnupcmd = data.getVar('UPDATECOMMAND', localdata, 1) 122 if command is "fetch":
123 svncmd = "%s co %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, svnroot, ud.module, ud.module)
124 elif command is "update":
125 svncmd = "%s update %s" % (basecmd, " ".join(options))
126 else:
127 raise FetchError("Invalid svn command %s" % command)
109 128
110 if svn_rsh: 129 if svn_rsh:
111 svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd) 130 svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
112 svnupcmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svnupcmd)
113 131
114 pkg = data.expand('${PN}', d) 132 return svncmd
115 pkgdir = os.path.join(data.expand('${SVNDIR}', localdata), pkg) 133
116 moddir = os.path.join(pkgdir, ud.module) 134 def go(self, loc, ud, d):
117 bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + moddir + "'") 135 """Fetch url"""
118 136
119 if os.access(os.path.join(moddir, '.svn'), os.R_OK): 137 # try to use the tarball stash
138 if Fetch.try_mirror(d, ud.localfile):
139 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping svn checkout." % ud.localpath)
140 return
141
142 bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'")
143
144 if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
145 svnupdatecmd = self._buildsvncommand(ud, d, "update")
120 bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) 146 bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc)
121 # update sources there 147 # update sources there
122 os.chdir(moddir) 148 os.chdir(ud.moddir)
123 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnupcmd) 149 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnupdatecmd)
124 myret = os.system(svnupcmd) 150 runfetchcmd(svnupdatecmd, d)
125 else: 151 else:
152 svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
126 bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) 153 bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
127 # check out sources there 154 # check out sources there
128 bb.mkdirhier(pkgdir) 155 bb.mkdirhier(ud.pkgdir)
129 os.chdir(pkgdir) 156 os.chdir(ud.pkgdir)
130 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svncmd) 157 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnfetchcmd)
131 myret = os.system(svncmd) 158 runfetchcmd(svnfetchcmd, d)
132
133 if myret != 0:
134 raise FetchError(ud.module)
135 159
136 os.chdir(pkgdir) 160 os.chdir(ud.pkgdir)
137 # tar them up to a defined filename 161 # tar them up to a defined filename
138 myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module))) 162 try:
139 if myret != 0: 163 runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)), d)
164 except:
165 t, v, tb = sys.exc_info()
140 try: 166 try:
141 os.unlink(ud.localpath) 167 os.unlink(ud.localpath)
142 except OSError: 168 except OSError:
143 pass 169 pass
144 raise FetchError(ud.module) 170 raise t, v, tb
171
172 def suppports_srcrev(self):
173 return True
174
175 def _revision_key(self, url, ud, d):
176 """
177 Return a unique key for the url
178 """
179 return "svn:" + ud.moddir
180
181 def _latest_revision(self, url, ud, d):
182 """
183 Return the latest upstream revision number
184 """
185 bb.msg.debug(2, bb.msg.domain.Fetcher, "SVN fetcher hitting network for %s" % url)
186
187 output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True)
188
189 revision = None
190 for line in output.splitlines():
191 if "Last Changed Rev" in line:
192 revision = line.split(":")[1].strip()
193
194 return revision
195
196 def _sortable_revision(self, url, ud, d):
197 """
198 Return a sortable revision number which in our case is the revision number
199 (use the cached version to avoid network access)
200 """
201
202 return self.latest_revision(url, ud, d)
203
diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py
index 71b0b05b77..98cb6e6bf3 100644
--- a/bitbake/lib/bb/msg.py
+++ b/bitbake/lib/bb/msg.py
@@ -37,6 +37,7 @@ domain = bb.utils.Enum(
37 'Depends', 37 'Depends',
38 'Fetcher', 38 'Fetcher',
39 'Parsing', 39 'Parsing',
40 'PersistData',
40 'Provider', 41 'Provider',
41 'RunQueue', 42 'RunQueue',
42 'TaskData', 43 'TaskData',
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py
index 20fa60355e..5f8426df24 100644
--- a/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -400,14 +400,14 @@ def set_additional_vars(file, d, include):
400 400
401 from bb import fetch 401 from bb import fetch
402 try: 402 try:
403 fetch.init(src_uri.split(), d) 403 ud = fetch.init(src_uri.split(), d)
404 a += fetch.localpaths(d, ud)
404 except fetch.NoMethodError: 405 except fetch.NoMethodError:
405 pass 406 pass
406 except bb.MalformedUrl,e: 407 except bb.MalformedUrl,e:
407 raise ParseError("Unable to generate local paths for SRC_URI due to malformed uri: %s" % e) 408 raise ParseError("Unable to generate local paths for SRC_URI due to malformed uri: %s" % e)
408
409 a += fetch.localpaths(d)
410 del fetch 409 del fetch
410
411 data.setVar('A', " ".join(a), d) 411 data.setVar('A', " ".join(a), d)
412 412
413 413
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
index 0e05928d84..6a44e28e90 100644
--- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -45,14 +45,17 @@ def localpath(fn, d):
45 if os.path.exists(fn): 45 if os.path.exists(fn):
46 return fn 46 return fn
47 47
48 if "://" not in fn:
49 return fn
50
48 localfn = None 51 localfn = None
49 try: 52 try:
50 localfn = bb.fetch.localpath(fn, d) 53 localfn = bb.fetch.localpath(fn, d, False)
51 except bb.MalformedUrl: 54 except bb.MalformedUrl:
52 pass 55 pass
53 56
54 if not localfn: 57 if not localfn:
55 localfn = fn 58 return fn
56 return localfn 59 return localfn
57 60
58def obtain(fn, data): 61def obtain(fn, data):
@@ -67,14 +70,14 @@ def obtain(fn, data):
67 return localfn 70 return localfn
68 bb.mkdirhier(dldir) 71 bb.mkdirhier(dldir)
69 try: 72 try:
70 bb.fetch.init([fn]) 73 ud = bb.fetch.init([fn], data, False)
71 except bb.fetch.NoMethodError: 74 except bb.fetch.NoMethodError:
72 (type, value, traceback) = sys.exc_info() 75 (type, value, traceback) = sys.exc_info()
73 bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: no method: %s" % value) 76 bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: no method: %s" % value)
74 return localfn 77 return localfn
75 78
76 try: 79 try:
77 bb.fetch.go(data) 80 bb.fetch.go(data, ud)
78 except bb.fetch.MissingParameterError: 81 except bb.fetch.MissingParameterError:
79 (type, value, traceback) = sys.exc_info() 82 (type, value, traceback) = sys.exc_info()
80 bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: missing parameters: %s" % value) 83 bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: missing parameters: %s" % value)
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py
new file mode 100644
index 0000000000..4df335a6a1
--- /dev/null
+++ b/bitbake/lib/bb/persist_data.py
@@ -0,0 +1,94 @@
1# BitBake Persistent Data Store
2#
3# Copyright (C) 2007 Richard Purdie
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as
7# published by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License along
15# with this program; if not, write to the Free Software Foundation, Inc.,
16# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17
18import bb, os
19
20try:
21 import sqlite3
22except ImportError:
23 try:
24 from pysqlite2 import dbapi2 as sqlite3
25 except ImportError:
26 bb.msg.fatal(bb.msg.domain.PersistData, "Importing sqlite3 and pysqlite2 failed, please install one of them. A 'python-pysqlite2' like package is likely to be what you need.")
27
28class PersistData:
29 """
30 BitBake Persistent Data Store
31
32 Used to store data in a central location such that other threads/tasks can
33 access them at some future date.
34
35 The "domain" is used as a key to isolate each data pool and in this
36 implementation corresponds to an SQL table. The SQL table consists of a
37 simple key and value pair.
38
39 Why sqlite? It handles all the locking issues for us.
40 """
41 def __init__(self, d):
42 self.cachedir = bb.data.getVar("CACHE", d, True)
43 if self.cachedir in [None, '']:
44 bb.msg.fatal(bb.msg.domain.PersistData, "Please set the 'CACHE' variable.")
45 try:
46 os.stat(self.cachedir)
47 except OSError:
48 bb.mkdirhier(self.cachedir)
49
50 self.cachefile = os.path.join(self.cachedir,"bb_persist_data.sqlite3")
51 bb.msg.debug(1, bb.msg.domain.PersistData, "Using '%s' as the persistent data cache" % self.cachefile)
52
53 self.connection = sqlite3.connect(self.cachefile, timeout=5, isolation_level=None)
54
55 def addDomain(self, domain):
56 """
57 Should be called before any domain is used
58 Creates it if it doesn't exist.
59 """
60 self.connection.execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);" % domain)
61
62 def delDomain(self, domain):
63 """
64 Removes a domain and all the data it contains
65 """
66 self.connection.execute("DROP TABLE IF EXISTS %s;" % domain)
67
68 def getValue(self, domain, key):
69 """
70 Return the value of a key for a domain
71 """
72 data = self.connection.execute("SELECT * from %s where key=?;" % domain, [key])
73 for row in data:
74 return row[1]
75
76 def setValue(self, domain, key, value):
77 """
78 Sets the value of a key for a domain
79 """
80 data = self.connection.execute("SELECT * from %s where key=?;" % domain, [key])
81 rows = 0
82 for row in data:
83 rows = rows + 1
84 if rows:
85 self.connection.execute("UPDATE %s SET value=? WHERE key=?;" % domain, [value, key])
86 else:
87 self.connection.execute("INSERT into %s(key, value) values (?, ?);" % domain, [key, value])
88
89 def delValue(self, domain, key):
90 """
91 Deletes a key/value pair
92 """
93 self.connection.execute("DELETE from %s where key=?;" % domain, [key])
94