summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorRichard Purdie <richard@openedhand.com>2007-08-11 22:42:15 +0000
committerRichard Purdie <richard@openedhand.com>2007-08-11 22:42:15 +0000
commit18026165c3086b77253663fb12d5b7470de8f2a1 (patch)
treec07368e40fa2d1ae1c39947b66474b45dd672130
parent0197eb2d870263b983ba217aca69ffe9f7708eb5 (diff)
downloadpoky-18026165c3086b77253663fb12d5b7470de8f2a1.tar.gz
bitbake: Sync with upstream
git-svn-id: https://svn.o-hand.com/repos/poky/trunk@2480 311d38ba-8fff-0310-9ca6-ca027cbcb966
-rw-r--r--bitbake/ChangeLog6
-rwxr-xr-xbitbake/bin/bitbake19
-rw-r--r--bitbake/lib/bb/fetch/__init__.py103
-rw-r--r--bitbake/lib/bb/parse/parse_py/BBHandler.py40
-rw-r--r--bitbake/lib/bb/parse/parse_py/ConfHandler.py8
5 files changed, 92 insertions, 84 deletions
diff --git a/bitbake/ChangeLog b/bitbake/ChangeLog
index fd998496ec..c5641836d1 100644
--- a/bitbake/ChangeLog
+++ b/bitbake/ChangeLog
@@ -7,6 +7,12 @@ Changes in Bitbake 1.8.x:
7 - Sync fetcher code with that in trunk, adding SRCREV support for svn 7 - Sync fetcher code with that in trunk, adding SRCREV support for svn
8 - Add ConfigParsed Event after configuration parsing is complete 8 - Add ConfigParsed Event after configuration parsing is complete
9 - data.emit_var() - only call getVar if we need the variable 9 - data.emit_var() - only call getVar if we need the variable
10 - Stop generating the A variable (seems to be legacy code)
11 - Make sure intertask depends get processed correcting in recursive depends
12 - Add pn-PN to overrides when evaluating PREFERRED_VERSION
13 - Improve the progress indicator by skipping tasks that have
14 already run before starting the build rather than during it
15 - Add profiling option (-P)
10 16
11Changes in Bitbake 1.8.6: 17Changes in Bitbake 1.8.6:
12 - Correctly redirect stdin when forking 18 - Correctly redirect stdin when forking
diff --git a/bitbake/bin/bitbake b/bitbake/bin/bitbake
index 4b212adc2d..8b69a0a33f 100755
--- a/bitbake/bin/bitbake
+++ b/bitbake/bin/bitbake
@@ -102,6 +102,8 @@ Default BBFILES are the .bb files in the current directory.""" )
102 parser.add_option( "-l", "--log-domains", help = """Show debug logging for the specified logging domains""", 102 parser.add_option( "-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
103 action = "append", dest = "debug_domains", default = [] ) 103 action = "append", dest = "debug_domains", default = [] )
104 104
105 parser.add_option( "-P", "--profile", help = "profile the command and print a report",
106 action = "store_true", dest = "profile", default = False )
105 107
106 options, args = parser.parse_args(sys.argv) 108 options, args = parser.parse_args(sys.argv)
107 109
@@ -110,8 +112,23 @@ Default BBFILES are the .bb files in the current directory.""" )
110 configuration.pkgs_to_build.extend(args[1:]) 112 configuration.pkgs_to_build.extend(args[1:])
111 113
112 cooker = bb.cooker.BBCooker(configuration) 114 cooker = bb.cooker.BBCooker(configuration)
113 cooker.cook()
114 115
116 if configuration.profile:
117 try:
118 import cProfile as profile
119 except:
120 import profile
121
122 profile.runctx("cooker.cook()", globals(), locals(), "profile.log")
123 import pstats
124 p = pstats.Stats('profile.log')
125 p.sort_stats('time')
126 p.print_stats()
127 p.print_callers()
128 p.sort_stats('cumulative')
129 p.print_stats()
130 else:
131 cooker.cook()
115 132
116if __name__ == "__main__": 133if __name__ == "__main__":
117 main() 134 main()
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py
index f235526452..f739245bd1 100644
--- a/bitbake/lib/bb/fetch/__init__.py
+++ b/bitbake/lib/bb/fetch/__init__.py
@@ -80,6 +80,7 @@ def uri_replace(uri, uri_find, uri_replace, d):
80 return bb.encodeurl(result_decoded) 80 return bb.encodeurl(result_decoded)
81 81
82methods = [] 82methods = []
83urldata_cache = {}
83 84
84def fetcher_init(d): 85def fetcher_init(d):
85 """ 86 """
@@ -87,12 +88,16 @@ def fetcher_init(d):
87 Calls before this must not hit the cache. 88 Calls before this must not hit the cache.
88 """ 89 """
89 pd = persist_data.PersistData(d) 90 pd = persist_data.PersistData(d)
90 # Clear any cached url data 91 # When to drop SCM head revisions controled by user policy
91 pd.delDomain("BB_URLDATA") 92 srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
92 # When to drop SCM head revisions should be controled by user policy 93 if srcrev_policy == "cache":
93 pd.delDomain("BB_URI_HEADREVS") 94 bb.msg.debug(1, bb.msg.domain.Fetcher, "Keeping SRCREV cache due to cache policy of: %s" % srcrev_policy)
95 elif srcrev_policy == "clear":
96 bb.msg.debug(1, bb.msg.domain.Fetcher, "Clearing SRCREV cache due to cache policy of: %s" % srcrev_policy)
97 pd.delDomain("BB_URI_HEADREVS")
98 else:
99 bb.msg.fatal(bb.msg.domain.Fetcher, "Invalid SRCREV cache policy of: %s" % srcrev_policy)
94 # Make sure our domains exist 100 # Make sure our domains exist
95 pd.addDomain("BB_URLDATA")
96 pd.addDomain("BB_URI_HEADREVS") 101 pd.addDomain("BB_URI_HEADREVS")
97 pd.addDomain("BB_URI_LOCALCOUNT") 102 pd.addDomain("BB_URI_LOCALCOUNT")
98 103
@@ -102,45 +107,30 @@ def fetcher_init(d):
102# 3. localpaths 107# 3. localpaths
103# localpath can be called at any time 108# localpath can be called at any time
104 109
105def init(urls, d, cache = True): 110def init(urls, d, setup = True):
106 urldata = {} 111 urldata = {}
107 112 fn = bb.data.getVar('FILE', d, 1)
108 if cache: 113 if fn in urldata_cache:
109 urldata = getdata(d) 114 urldata = urldata_cache[fn]
110 115
111 for url in urls: 116 for url in urls:
112 if url not in urldata: 117 if url not in urldata:
113 ud = FetchData(url, d) 118 urldata[url] = FetchData(url, d)
114 for m in methods:
115 if m.supports(url, ud, d):
116 ud.init(m, d)
117 ud.setup_localpath(d)
118 break
119 urldata[url] = ud
120
121 if cache:
122 fn = bb.data.getVar('FILE', d, 1)
123 pd = persist_data.PersistData(d)
124 pd.setValue("BB_URLDATA", fn, pickle.dumps(urldata, 0))
125 119
126 return urldata 120 if setup:
127 121 for url in urldata:
128def getdata(d): 122 if not urldata[url].setup:
129 urldata = {} 123 urldata[url].setup_localpath(d)
130 fn = bb.data.getVar('FILE', d, 1)
131 pd = persist_data.PersistData(d)
132 encdata = pd.getValue("BB_URLDATA", fn)
133 if encdata:
134 urldata = pickle.loads(str(encdata))
135 124
125 urldata_cache[fn] = urldata
136 return urldata 126 return urldata
137 127
138def go(d, urldata = None): 128def go(d):
139 """ 129 """
140 Fetch all urls 130 Fetch all urls
131 init must have previously been called
141 """ 132 """
142 if not urldata: 133 urldata = init([], d, True)
143 urldata = getdata(d)
144 134
145 for u in urldata: 135 for u in urldata:
146 ud = urldata[u] 136 ud = urldata[u]
@@ -154,13 +144,12 @@ def go(d, urldata = None):
154 if ud.localfile and not m.forcefetch(u, ud, d): 144 if ud.localfile and not m.forcefetch(u, ud, d):
155 Fetch.write_md5sum(u, ud, d) 145 Fetch.write_md5sum(u, ud, d)
156 146
157def localpaths(d, urldata = None): 147def localpaths(d):
158 """ 148 """
159 Return a list of the local filenames, assuming successful fetch 149 Return a list of the local filenames, assuming successful fetch
160 """ 150 """
161 local = [] 151 local = []
162 if not urldata: 152 urldata = init([], d, True)
163 urldata = getdata(d)
164 153
165 for u in urldata: 154 for u in urldata:
166 ud = urldata[u] 155 ud = urldata[u]
@@ -177,25 +166,14 @@ def get_srcrev(d):
177 have been set. 166 have been set.
178 """ 167 """
179 scms = [] 168 scms = []
180 urldata = getdata(d) 169 # Only call setup_localpath on URIs which suppports_srcrev()
181 if len(urldata) == 0: 170 urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False)
182 src_uri = bb.data.getVar('SRC_URI', d, 1).split() 171 for u in urldata:
183 for url in src_uri: 172 ud = urldata[u]
184 if url not in urldata: 173 if ud.method.suppports_srcrev():
185 ud = FetchData(url, d) 174 if not ud.setup:
186 for m in methods: 175 ud.setup_localpath(d)
187 if m.supports(url, ud, d): 176 scms.append(u)
188 ud.init(m, d)
189 break
190 urldata[url] = ud
191 if ud.method.suppports_srcrev():
192 scms.append(url)
193 ud.setup_localpath(d)
194 else:
195 for u in urldata:
196 ud = urldata[u]
197 if ud.method.suppports_srcrev():
198 scms.append(u)
199 177
200 if len(scms) == 0: 178 if len(scms) == 0:
201 bb.msg.error(bb.msg.domain.Fetcher, "SRCREV was used yet no valid SCM was found in SRC_URI") 179 bb.msg.error(bb.msg.domain.Fetcher, "SRCREV was used yet no valid SCM was found in SRC_URI")
@@ -212,7 +190,7 @@ def localpath(url, d, cache = True):
212 Called from the parser with cache=False since the cache isn't ready 190 Called from the parser with cache=False since the cache isn't ready
213 at this point. Also called from classed in OE e.g. patch.bbclass 191 at this point. Also called from classed in OE e.g. patch.bbclass
214 """ 192 """
215 ud = init([url], d, cache) 193 ud = init([url], d)
216 if ud[url].method: 194 if ud[url].method:
217 return ud[url].localpath 195 return ud[url].localpath
218 return url 196 return url
@@ -252,17 +230,22 @@ def runfetchcmd(cmd, d, quiet = False):
252 return output 230 return output
253 231
254class FetchData(object): 232class FetchData(object):
255 """Class for fetcher variable store""" 233 """
234 A class which represents the fetcher state for a given URI.
235 """
256 def __init__(self, url, d): 236 def __init__(self, url, d):
257 self.localfile = "" 237 self.localfile = ""
258 (self.type, self.host, self.path, self.user, self.pswd, self.parm) = bb.decodeurl(data.expand(url, d)) 238 (self.type, self.host, self.path, self.user, self.pswd, self.parm) = bb.decodeurl(data.expand(url, d))
259 self.date = Fetch.getSRCDate(self, d) 239 self.date = Fetch.getSRCDate(self, d)
260 self.url = url 240 self.url = url
261 241 self.setup = False
262 def init(self, method, d): 242 for m in methods:
263 self.method = method 243 if m.supports(url, self, d):
244 self.method = m
245 break
264 246
265 def setup_localpath(self, d): 247 def setup_localpath(self, d):
248 self.setup = True
266 if "localpath" in self.parm: 249 if "localpath" in self.parm:
267 self.localpath = self.parm["localpath"] 250 self.localpath = self.parm["localpath"]
268 else: 251 else:
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py
index 1afbc6f294..aaa262d3e2 100644
--- a/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -391,26 +391,26 @@ def set_additional_vars(file, d, include):
391 """Deduce rest of variables, e.g. ${A} out of ${SRC_URI}""" 391 """Deduce rest of variables, e.g. ${A} out of ${SRC_URI}"""
392 392
393 return 393 return
394 394 # Nothing seems to use this variable
395# bb.msg.debug(2, bb.msg.domain.Parsing, "BB %s: set_additional_vars" % file) 395 #bb.msg.debug(2, bb.msg.domain.Parsing, "BB %s: set_additional_vars" % file)
396 396
397# src_uri = data.getVar('SRC_URI', d, 1) 397 #src_uri = data.getVar('SRC_URI', d, 1)
398# if not src_uri: 398 #if not src_uri:
399# return 399 # return
400 400
401# a = (data.getVar('A', d, 1) or '').split() 401 #a = (data.getVar('A', d, 1) or '').split()
402 402
403# from bb import fetch 403 #from bb import fetch
404# try: 404 #try:
405# ud = fetch.init(src_uri.split(), d) 405 # ud = fetch.init(src_uri.split(), d)
406# a += fetch.localpaths(d, ud) 406 # a += fetch.localpaths(d, ud)
407# except fetch.NoMethodError: 407 #except fetch.NoMethodError:
408# pass 408 # pass
409# except bb.MalformedUrl,e: 409 #except bb.MalformedUrl,e:
410# raise ParseError("Unable to generate local paths for SRC_URI due to malformed uri: %s" % e) 410 # raise ParseError("Unable to generate local paths for SRC_URI due to malformed uri: %s" % e)
411# del fetch 411 #del fetch
412 412
413# data.setVar('A', " ".join(a), d) 413 #data.setVar('A', " ".join(a), d)
414 414
415 415
416# Add us to the handlers list 416# Add us to the handlers list
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
index 6a44e28e90..6311e76902 100644
--- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -70,14 +70,14 @@ def obtain(fn, data):
70 return localfn 70 return localfn
71 bb.mkdirhier(dldir) 71 bb.mkdirhier(dldir)
72 try: 72 try:
73 ud = bb.fetch.init([fn], data, False) 73 bb.fetch.init([fn], data)
74 except bb.fetch.NoMethodError: 74 except bb.fetch.NoMethodError:
75 (type, value, traceback) = sys.exc_info() 75 (type, value, traceback) = sys.exc_info()
76 bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: no method: %s" % value) 76 bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: no method: %s" % value)
77 return localfn 77 return localfn
78 78
79 try: 79 try:
80 bb.fetch.go(data, ud) 80 bb.fetch.go(data)
81 except bb.fetch.MissingParameterError: 81 except bb.fetch.MissingParameterError:
82 (type, value, traceback) = sys.exc_info() 82 (type, value, traceback) = sys.exc_info()
83 bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: missing parameters: %s" % value) 83 bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: missing parameters: %s" % value)
@@ -181,7 +181,9 @@ def feeder(lineno, s, fn, data):
181 if val == None: 181 if val == None:
182 val = groupd["value"] 182 val = groupd["value"]
183 elif "colon" in groupd and groupd["colon"] != None: 183 elif "colon" in groupd and groupd["colon"] != None:
184 val = bb.data.expand(groupd["value"], data) 184 e = data.createCopy()
185 bb.data.update_data(e)
186 val = bb.data.expand(groupd["value"], e)
185 elif "append" in groupd and groupd["append"] != None: 187 elif "append" in groupd and groupd["append"] != None:
186 val = "%s %s" % ((getFunc(groupd, key, data) or ""), groupd["value"]) 188 val = "%s %s" % ((getFunc(groupd, key, data) or ""), groupd["value"])
187 elif "prepend" in groupd and groupd["prepend"] != None: 189 elif "prepend" in groupd and groupd["prepend"] != None: