summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/cooker.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/cooker.py')
-rw-r--r--bitbake/lib/bb/cooker.py43
1 files changed, 23 insertions, 20 deletions
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index c5bfef55d6..2e80986640 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -281,7 +281,6 @@ class BBCooker:
281 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False) 281 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
282 self.databuilder.parseBaseConfiguration() 282 self.databuilder.parseBaseConfiguration()
283 self.data = self.databuilder.data 283 self.data = self.databuilder.data
284 self.data_hash = self.databuilder.data_hash
285 self.extraconfigdata = {} 284 self.extraconfigdata = {}
286 285
287 eventlog = self.data.getVar("BB_DEFAULT_EVENTLOG") 286 eventlog = self.data.getVar("BB_DEFAULT_EVENTLOG")
@@ -315,13 +314,13 @@ class BBCooker:
315 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db" 314 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
316 upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None 315 upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None
317 if upstream: 316 if upstream:
318 import socket
319 try: 317 try:
320 sock = socket.create_connection(upstream.split(":"), 5) 318 with hashserv.create_client(upstream) as client:
321 sock.close() 319 client.ping()
322 except socket.error as e: 320 except (ConnectionError, ImportError) as e:
323 bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s" 321 bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s"
324 % (upstream, repr(e))) 322 % (upstream, repr(e)))
323 upstream = None
325 324
326 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR") 325 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
327 self.hashserv = hashserv.create_server( 326 self.hashserv = hashserv.create_server(
@@ -370,6 +369,11 @@ class BBCooker:
370 if not clean: 369 if not clean:
371 bb.parse.BBHandler.cached_statements = {} 370 bb.parse.BBHandler.cached_statements = {}
372 371
372 # If writes were made to any of the data stores, we need to recalculate the data
373 # store cache
374 if hasattr(self, "databuilder"):
375 self.databuilder.calc_datastore_hashes()
376
373 def parseConfiguration(self): 377 def parseConfiguration(self):
374 self.updateCacheSync() 378 self.updateCacheSync()
375 379
@@ -680,14 +684,14 @@ class BBCooker:
680 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data) 684 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
681 return taskdata, runlist 685 return taskdata, runlist
682 686
683 def prepareTreeData(self, pkgs_to_build, task): 687 def prepareTreeData(self, pkgs_to_build, task, halt=False):
684 """ 688 """
685 Prepare a runqueue and taskdata object for iteration over pkgs_to_build 689 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
686 """ 690 """
687 691
688 # We set halt to False here to prevent unbuildable targets raising 692 # We set halt to False here to prevent unbuildable targets raising
689 # an exception when we're just generating data 693 # an exception when we're just generating data
690 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True) 694 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, halt, allowincomplete=True)
691 695
692 return runlist, taskdata 696 return runlist, taskdata
693 697
@@ -701,7 +705,7 @@ class BBCooker:
701 if not task.startswith("do_"): 705 if not task.startswith("do_"):
702 task = "do_%s" % task 706 task = "do_%s" % task
703 707
704 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task) 708 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task, halt=True)
705 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) 709 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
706 rq.rqdata.prepare() 710 rq.rqdata.prepare()
707 return self.buildDependTree(rq, taskdata) 711 return self.buildDependTree(rq, taskdata)
@@ -1338,7 +1342,7 @@ class BBCooker:
1338 self.buildSetVars() 1342 self.buildSetVars()
1339 self.reset_mtime_caches() 1343 self.reset_mtime_caches()
1340 1344
1341 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array) 1345 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.databuilder.data_hash, self.caches_array)
1342 1346
1343 layername = self.collections[mc].calc_bbfile_priority(fn)[2] 1347 layername = self.collections[mc].calc_bbfile_priority(fn)[2]
1344 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn), layername) 1348 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn), layername)
@@ -1459,7 +1463,6 @@ class BBCooker:
1459 1463
1460 if t in task or getAllTaskSignatures: 1464 if t in task or getAllTaskSignatures:
1461 try: 1465 try:
1462 rq.rqdata.prepare_task_hash(tid)
1463 sig.append([pn, t, rq.rqdata.get_task_unihash(tid)]) 1466 sig.append([pn, t, rq.rqdata.get_task_unihash(tid)])
1464 except KeyError: 1467 except KeyError:
1465 sig.append(self.getTaskSignatures(target, [t])[0]) 1468 sig.append(self.getTaskSignatures(target, [t])[0])
@@ -1813,8 +1816,8 @@ class CookerCollectFiles(object):
1813 bb.event.fire(CookerExit(), eventdata) 1816 bb.event.fire(CookerExit(), eventdata)
1814 1817
1815 # We need to track where we look so that we can know when the cache is invalid. There 1818 # We need to track where we look so that we can know when the cache is invalid. There
1816 # is no nice way to do this, this is horrid. We intercept the os.listdir() 1819 # is no nice way to do this, this is horrid. We intercept the os.listdir() and os.scandir()
1817 # (or os.scandir() for python 3.6+) calls while we run glob(). 1820 # calls while we run glob().
1818 origlistdir = os.listdir 1821 origlistdir = os.listdir
1819 if hasattr(os, 'scandir'): 1822 if hasattr(os, 'scandir'):
1820 origscandir = os.scandir 1823 origscandir = os.scandir
@@ -2113,7 +2116,7 @@ class CookerParser(object):
2113 self.mcfilelist = mcfilelist 2116 self.mcfilelist = mcfilelist
2114 self.cooker = cooker 2117 self.cooker = cooker
2115 self.cfgdata = cooker.data 2118 self.cfgdata = cooker.data
2116 self.cfghash = cooker.data_hash 2119 self.cfghash = cooker.databuilder.data_hash
2117 self.cfgbuilder = cooker.databuilder 2120 self.cfgbuilder = cooker.databuilder
2118 2121
2119 # Accounting statistics 2122 # Accounting statistics
@@ -2225,9 +2228,8 @@ class CookerParser(object):
2225 2228
2226 for process in self.processes: 2229 for process in self.processes:
2227 process.join() 2230 process.join()
2228 # Added in 3.7, cleans up zombies 2231 # clean up zombies
2229 if hasattr(process, "close"): 2232 process.close()
2230 process.close()
2231 2233
2232 bb.codeparser.parser_cache_save() 2234 bb.codeparser.parser_cache_save()
2233 bb.codeparser.parser_cache_savemerge() 2235 bb.codeparser.parser_cache_savemerge()
@@ -2237,12 +2239,13 @@ class CookerParser(object):
2237 profiles = [] 2239 profiles = []
2238 for i in self.process_names: 2240 for i in self.process_names:
2239 logfile = "profile-parse-%s.log" % i 2241 logfile = "profile-parse-%s.log" % i
2240 if os.path.exists(logfile): 2242 if os.path.exists(logfile) and os.path.getsize(logfile):
2241 profiles.append(logfile) 2243 profiles.append(logfile)
2242 2244
2243 pout = "profile-parse.log.processed" 2245 if profiles:
2244 bb.utils.process_profilelog(profiles, pout = pout) 2246 pout = "profile-parse.log.processed"
2245 print("Processed parsing statistics saved to %s" % (pout)) 2247 bb.utils.process_profilelog(profiles, pout = pout)
2248 print("Processed parsing statistics saved to %s" % (pout))
2246 2249
2247 def final_cleanup(self): 2250 def final_cleanup(self):
2248 if self.syncthread: 2251 if self.syncthread: