summaryrefslogtreecommitdiffstats
path: root/bitbake
diff options
context:
space:
mode:
authorRichard Purdie <richard.purdie@linuxfoundation.org>2016-08-15 18:03:29 +0100
committerRichard Purdie <richard.purdie@linuxfoundation.org>2016-08-18 10:06:26 +0100
commitb50b14e37249fb23b8e4f3a86f9b245cba85ca86 (patch)
tree9e7becee9892a76b9468513414c2e257290fde22 /bitbake
parentb176189df1163d92aaec8b565bf69dcf76bab458 (diff)
downloadpoky-b50b14e37249fb23b8e4f3a86f9b245cba85ca86.tar.gz
bitbake: cache: Build datastores from databuilder object
Rather than passing in a datastore to build on top of, use the data builder object in the cache and base the parsed recipe from this. This turns things into proper objects building from one another rather than messy mixes of static and class functions. This sets things up so we can support parsing and building multiple configurations. (Bitbake rev: fef18b445c0cb6b266cd939b9c78d7cbce38663f) Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake')
-rwxr-xr-xbitbake/bin/bitbake-worker23
-rw-r--r--bitbake/lib/bb/cache.py46
-rw-r--r--bitbake/lib/bb/cooker.py23
3 files changed, 51 insertions, 41 deletions
diff --git a/bitbake/bin/bitbake-worker b/bitbake/bin/bitbake-worker
index 963b4cdf93..1926b89882 100755
--- a/bitbake/bin/bitbake-worker
+++ b/bitbake/bin/bitbake-worker
@@ -115,7 +115,7 @@ def sigterm_handler(signum, frame):
115 os.killpg(0, signal.SIGTERM) 115 os.killpg(0, signal.SIGTERM)
116 sys.exit() 116 sys.exit()
117 117
118def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdata, quieterrors=False): 118def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, appends, taskdepdata, quieterrors=False):
119 # We need to setup the environment BEFORE the fork, since 119 # We need to setup the environment BEFORE the fork, since
120 # a fork() or exec*() activates PSEUDO... 120 # a fork() or exec*() activates PSEUDO...
121 121
@@ -193,15 +193,18 @@ def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdat
193 if umask: 193 if umask:
194 os.umask(umask) 194 os.umask(umask)
195 195
196 data.setVar("BB_WORKERCONTEXT", "1")
197 data.setVar("BB_TASKDEPDATA", taskdepdata)
198 data.setVar("BUILDNAME", workerdata["buildname"])
199 data.setVar("DATE", workerdata["date"])
200 data.setVar("TIME", workerdata["time"])
201 bb.parse.siggen.set_taskdata(workerdata["sigdata"])
202 ret = 0
203 try: 196 try:
204 the_data = bb.cache.Cache.loadDataFull(fn, appends, data) 197 bb_cache = bb.cache.NoCache(databuilder)
198 the_data = databuilder.data
199 the_data.setVar("BB_WORKERCONTEXT", "1")
200 the_data.setVar("BB_TASKDEPDATA", taskdepdata)
201 the_data.setVar("BUILDNAME", workerdata["buildname"])
202 the_data.setVar("DATE", workerdata["date"])
203 the_data.setVar("TIME", workerdata["time"])
204 bb.parse.siggen.set_taskdata(workerdata["sigdata"])
205 ret = 0
206
207 the_data = bb_cache.loadDataFull(fn, appends)
205 the_data.setVar('BB_TASKHASH', workerdata["runq_hash"][task]) 208 the_data.setVar('BB_TASKHASH', workerdata["runq_hash"][task])
206 209
207 bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN", True), taskname.replace("do_", ""))) 210 bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN", True), taskname.replace("do_", "")))
@@ -389,7 +392,7 @@ class BitbakeWorker(object):
389 fn, task, taskname, quieterrors, appends, taskdepdata = pickle.loads(data) 392 fn, task, taskname, quieterrors, appends, taskdepdata = pickle.loads(data)
390 workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname)) 393 workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname))
391 394
392 pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.workerdata, fn, task, taskname, appends, taskdepdata, quieterrors) 395 pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, fn, task, taskname, appends, taskdepdata, quieterrors)
393 396
394 self.build_pids[pid] = task 397 self.build_pids[pid] = task
395 self.build_pipes[pid] = runQueueWorkerPipe(pipein, pipeout) 398 self.build_pipes[pid] = runQueueWorkerPipe(pipein, pipeout)
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
index 8c1fe11317..5f302d68b4 100644
--- a/bitbake/lib/bb/cache.py
+++ b/bitbake/lib/bb/cache.py
@@ -271,35 +271,44 @@ class NoCache(object):
271 self.databuilder = databuilder 271 self.databuilder = databuilder
272 self.data = databuilder.data 272 self.data = databuilder.data
273 273
274 @classmethod 274 def loadDataFull(self, virtualfn, appends):
275 def loadDataFull(cls, virtualfn, appends, cfgData):
276 """ 275 """
277 Return a complete set of data for fn. 276 Return a complete set of data for fn.
278 To do this, we need to parse the file. 277 To do this, we need to parse the file.
279 """ 278 """
280 279 logger.debug(1, "Parsing %s (full)" % virtualfn)
281 (fn, virtual) = virtualfn2realfn(virtualfn) 280 (fn, virtual) = virtualfn2realfn(virtualfn)
282 281 bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
283 logger.debug(1, "Parsing %s (full)", fn)
284
285 cfgData.setVar("__ONLYFINALISE", virtual or "default")
286 bb_data = cls.load_bbfile(fn, appends, cfgData)
287 return bb_data[virtual] 282 return bb_data[virtual]
288 283
289 @staticmethod 284 def load_bbfile(self, bbfile, appends, virtonly = False):
290 def load_bbfile(bbfile, appends, config):
291 """ 285 """
292 Load and parse one .bb build file 286 Load and parse one .bb build file
293 Return the data and whether parsing resulted in the file being skipped 287 Return the data and whether parsing resulted in the file being skipped
294 """ 288 """
289
290 if virtonly:
291 (bbfile, virtual) = virtualfn2realfn(bbfile)
292 bb_data = self.data.createCopy()
293 bb_data.setVar("__BBMULTICONFIG", mc)
294 bb_data.setVar("__ONLYFINALISE", virtual or "default")
295 datastores = self._load_bbfile(bb_data, bbfile, appends)
296 return datastores
297
298 bb_data = self.data.createCopy()
299 datastores = self._load_bbfile(bb_data, bbfile, appends)
300
301 return datastores
302
303 def _load_bbfile(self, bb_data, bbfile, appends):
295 chdir_back = False 304 chdir_back = False
296 305
297 # expand tmpdir to include this topdir 306 # expand tmpdir to include this topdir
298 config.setVar('TMPDIR', config.getVar('TMPDIR', True) or "") 307 bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR', True) or "")
299 bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) 308 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
300 oldpath = os.path.abspath(os.getcwd()) 309 oldpath = os.path.abspath(os.getcwd())
301 bb.parse.cached_mtime_noerror(bbfile_loc) 310 bb.parse.cached_mtime_noerror(bbfile_loc)
302 bb_data = config.createCopy() 311
303 # The ConfHandler first looks if there is a TOPDIR and if not 312 # The ConfHandler first looks if there is a TOPDIR and if not
304 # then it would call getcwd(). 313 # then it would call getcwd().
305 # Previously, we chdir()ed to bbfile_loc, called the handler 314 # Previously, we chdir()ed to bbfile_loc, called the handler
@@ -431,12 +440,11 @@ class Cache(NoCache):
431 len(self.depends_cache)), 440 len(self.depends_cache)),
432 self.data) 441 self.data)
433 442
434 @classmethod 443 def parse(self, filename, appends):
435 def parse(cls, filename, appends, configdata, caches_array):
436 """Parse the specified filename, returning the recipe information""" 444 """Parse the specified filename, returning the recipe information"""
437 logger.debug(1, "Parsing %s", filename) 445 logger.debug(1, "Parsing %s", filename)
438 infos = [] 446 infos = []
439 datastores = cls.load_bbfile(filename, appends, configdata) 447 datastores = self.load_bbfile(filename, appends)
440 depends = [] 448 depends = []
441 variants = [] 449 variants = []
442 # Process the "real" fn last so we can store variants list 450 # Process the "real" fn last so we can store variants list
@@ -451,14 +459,14 @@ class Cache(NoCache):
451 if virtualfn == filename: 459 if virtualfn == filename:
452 data.setVar("__VARIANTS", " ".join(variants)) 460 data.setVar("__VARIANTS", " ".join(variants))
453 info_array = [] 461 info_array = []
454 for cache_class in caches_array: 462 for cache_class in self.caches_array:
455 info = cache_class(filename, data) 463 info = cache_class(filename, data)
456 info_array.append(info) 464 info_array.append(info)
457 infos.append((virtualfn, info_array)) 465 infos.append((virtualfn, info_array))
458 466
459 return infos 467 return infos
460 468
461 def load(self, filename, appends, configdata): 469 def load(self, filename, appends):
462 """Obtain the recipe information for the specified filename, 470 """Obtain the recipe information for the specified filename,
463 using cached values if available, otherwise parsing. 471 using cached values if available, otherwise parsing.
464 472
@@ -479,13 +487,13 @@ class Cache(NoCache):
479 487
480 return cached, infos 488 return cached, infos
481 489
482 def loadData(self, fn, appends, cfgData, cacheData): 490 def loadData(self, fn, appends, cacheData):
483 """Load the recipe info for the specified filename, 491 """Load the recipe info for the specified filename,
484 parsing and adding to the cache if necessary, and adding 492 parsing and adding to the cache if necessary, and adding
485 the recipe information to the supplied CacheData instance.""" 493 the recipe information to the supplied CacheData instance."""
486 skipped, virtuals = 0, 0 494 skipped, virtuals = 0, 0
487 495
488 cached, infos = self.load(fn, appends, cfgData) 496 cached, infos = self.load(fn, appends)
489 for virtualfn, info_array in infos: 497 for virtualfn, info_array in infos:
490 if info_array[0].skipped: 498 if info_array[0].skipped:
491 logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason) 499 logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index 43c4f78dbc..fe95e73a12 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -617,7 +617,8 @@ class BBCooker:
617 617
618 if fn: 618 if fn:
619 try: 619 try:
620 envdata = bb.cache.Cache.loadDataFull(fn, self.collection.get_file_appends(fn), self.data) 620 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
621 envdata = bb_cache.loadDataFull(fn, self.collection.get_file_appends(fn))
621 except Exception as e: 622 except Exception as e:
622 parselog.exception("Unable to read %s", fn) 623 parselog.exception("Unable to read %s", fn)
623 raise 624 raise
@@ -1254,9 +1255,9 @@ class BBCooker:
1254 1255
1255 self.buildSetVars() 1256 self.buildSetVars()
1256 1257
1257 infos = bb.cache.Cache.parse(fn, self.collection.get_file_appends(fn), \ 1258 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
1258 self.data, 1259
1259 self.caches_array) 1260 infos = bb_cache.parse(fn, self.collection.get_file_appends(fn))
1260 infos = dict(infos) 1261 infos = dict(infos)
1261 1262
1262 fn = bb.cache.realfn2virtual(fn, cls) 1263 fn = bb.cache.realfn2virtual(fn, cls)
@@ -1943,7 +1944,7 @@ class Parser(multiprocessing.Process):
1943 except queue.Full: 1944 except queue.Full:
1944 pending.append(result) 1945 pending.append(result)
1945 1946
1946 def parse(self, filename, appends, caches_array): 1947 def parse(self, filename, appends):
1947 try: 1948 try:
1948 # Record the filename we're parsing into any events generated 1949 # Record the filename we're parsing into any events generated
1949 def parse_filter(self, record): 1950 def parse_filter(self, record):
@@ -1956,7 +1957,7 @@ class Parser(multiprocessing.Process):
1956 bb.event.set_class_handlers(self.handlers.copy()) 1957 bb.event.set_class_handlers(self.handlers.copy())
1957 bb.event.LogHandler.filter = parse_filter 1958 bb.event.LogHandler.filter = parse_filter
1958 1959
1959 return True, bb.cache.Cache.parse(filename, appends, self.cfg, caches_array) 1960 return True, self.bb_cache.parse(filename, appends)
1960 except Exception as exc: 1961 except Exception as exc:
1961 tb = sys.exc_info()[2] 1962 tb = sys.exc_info()[2]
1962 exc.recipe = filename 1963 exc.recipe = filename
@@ -1995,7 +1996,7 @@ class CookerParser(object):
1995 for filename in self.filelist: 1996 for filename in self.filelist:
1996 appends = self.cooker.collection.get_file_appends(filename) 1997 appends = self.cooker.collection.get_file_appends(filename)
1997 if not self.bb_cache.cacheValid(filename, appends): 1998 if not self.bb_cache.cacheValid(filename, appends):
1998 self.willparse.append((filename, appends, cooker.caches_array)) 1999 self.willparse.append((filename, appends))
1999 else: 2000 else:
2000 self.fromcache.append((filename, appends)) 2001 self.fromcache.append((filename, appends))
2001 self.toparse = self.total - len(self.fromcache) 2002 self.toparse = self.total - len(self.fromcache)
@@ -2013,7 +2014,7 @@ class CookerParser(object):
2013 if self.toparse: 2014 if self.toparse:
2014 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata) 2015 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2015 def init(): 2016 def init():
2016 Parser.cfg = self.cfgdata 2017 Parser.bb_cache = self.bb_cache
2017 bb.utils.set_process_name(multiprocessing.current_process().name) 2018 bb.utils.set_process_name(multiprocessing.current_process().name)
2018 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1) 2019 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2019 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1) 2020 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
@@ -2084,7 +2085,7 @@ class CookerParser(object):
2084 2085
2085 def load_cached(self): 2086 def load_cached(self):
2086 for filename, appends in self.fromcache: 2087 for filename, appends in self.fromcache:
2087 cached, infos = self.bb_cache.load(filename, appends, self.cfgdata) 2088 cached, infos = self.bb_cache.load(filename, appends)
2088 yield not cached, infos 2089 yield not cached, infos
2089 2090
2090 def parse_generator(self): 2091 def parse_generator(self):
@@ -2168,8 +2169,6 @@ class CookerParser(object):
2168 return True 2169 return True
2169 2170
2170 def reparse(self, filename): 2171 def reparse(self, filename):
2171 infos = self.bb_cache.parse(filename, 2172 infos = self.bb_cache.parse(filename, self.cooker.collection.get_file_appends(filename))
2172 self.cooker.collection.get_file_appends(filename),
2173 self.cfgdata, self.cooker.caches_array)
2174 for vfn, info_array in infos: 2173 for vfn, info_array in infos:
2175 self.cooker.recipecache.add_from_recipeinfo(vfn, info_array) 2174 self.cooker.recipecache.add_from_recipeinfo(vfn, info_array)