summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/cooker.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/cooker.py')
-rw-r--r--bitbake/lib/bb/cooker.py31
1 files changed, 14 insertions, 17 deletions
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index 0761f06e1c..dc131939ed 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -26,6 +26,7 @@ import json
26import pickle 26import pickle
27import codecs 27import codecs
28import hashserv 28import hashserv
29import ctypes
29 30
30logger = logging.getLogger("BitBake") 31logger = logging.getLogger("BitBake")
31collectlog = logging.getLogger("BitBake.Collection") 32collectlog = logging.getLogger("BitBake.Collection")
@@ -1998,9 +1999,9 @@ class ParsingFailure(Exception):
1998 Exception.__init__(self, realexception, recipe) 1999 Exception.__init__(self, realexception, recipe)
1999 2000
2000class Parser(multiprocessing.Process): 2001class Parser(multiprocessing.Process):
2001 def __init__(self, jobs, jobid_queue, results, quit, profile): 2002 def __init__(self, jobs, next_job_id, results, quit, profile):
2002 self.jobs = jobs 2003 self.jobs = jobs
2003 self.jobid_queue = jobid_queue 2004 self.next_job_id = next_job_id
2004 self.results = results 2005 self.results = results
2005 self.quit = quit 2006 self.quit = quit
2006 multiprocessing.Process.__init__(self) 2007 multiprocessing.Process.__init__(self)
@@ -2065,15 +2066,16 @@ class Parser(multiprocessing.Process):
2065 if self.quit.is_set(): 2066 if self.quit.is_set():
2066 break 2067 break
2067 2068
2068 jobid = None 2069 job = None
2069 try: 2070 if havejobs:
2070 # Have to wait for all parsers to have forked 2071 with self.next_job_id.get_lock():
2071 jobid = self.jobid_queue.get(True, 30) 2072 if self.next_job_id.value < len(self.jobs):
2072 except (ValueError, OSError, queue.Empty): 2073 job = self.jobs[self.next_job_id.value]
2073 havejobs = False 2074 self.next_job_id.value += 1
2075 else:
2076 havejobs = False
2074 2077
2075 if jobid is not None: 2078 if job:
2076 job = self.jobs[jobid]
2077 result = self.parse(*job) 2079 result = self.parse(*job)
2078 # Clear the siggen cache after parsing to control memory usage, its huge 2080 # Clear the siggen cache after parsing to control memory usage, its huge
2079 bb.parse.siggen.postparsing_clean_cache() 2081 bb.parse.siggen.postparsing_clean_cache()
@@ -2086,7 +2088,6 @@ class Parser(multiprocessing.Process):
2086 except queue.Full: 2088 except queue.Full:
2087 pending.append(result) 2089 pending.append(result)
2088 finally: 2090 finally:
2089 self.jobs.close()
2090 self.results.close() 2091 self.results.close()
2091 self.results.join_thread() 2092 self.results.join_thread()
2092 2093
@@ -2168,22 +2169,18 @@ class CookerParser(object):
2168 if self.toparse: 2169 if self.toparse:
2169 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata) 2170 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2170 2171
2171 self.toparse_queue = multiprocessing.Queue(len(self.willparse)) 2172 next_job_id = multiprocessing.Value(ctypes.c_int, 0)
2172 self.parser_quit = multiprocessing.Event() 2173 self.parser_quit = multiprocessing.Event()
2173 self.result_queue = multiprocessing.Queue() 2174 self.result_queue = multiprocessing.Queue()
2174 2175
2175 # Have to pass in willparse at fork time so all parsing processes have the unpickleable data 2176 # Have to pass in willparse at fork time so all parsing processes have the unpickleable data
2176 # then access it by index from the parse queue. 2177 # then access it by index from the parse queue.
2177 for i in range(0, self.num_processes): 2178 for i in range(0, self.num_processes):
2178 parser = Parser(self.willparse, self.toparse_queue, self.result_queue, self.parser_quit, self.cooker.configuration.profile) 2179 parser = Parser(self.willparse, next_job_id, self.result_queue, self.parser_quit, self.cooker.configuration.profile)
2179 parser.start() 2180 parser.start()
2180 self.process_names.append(parser.name) 2181 self.process_names.append(parser.name)
2181 self.processes.append(parser) 2182 self.processes.append(parser)
2182 2183
2183 for jobid in range(len(self.willparse)):
2184 self.toparse_queue.put(jobid)
2185 self.toparse_queue.close()
2186
2187 self.results = itertools.chain(self.results, self.parse_generator()) 2184 self.results = itertools.chain(self.results, self.parse_generator())
2188 2185
2189 def shutdown(self, clean=True, eventmsg="Parsing halted due to errors"): 2186 def shutdown(self, clean=True, eventmsg="Parsing halted due to errors"):