diff options
author | Chris Larson <chris_larson@mentor.com> | 2010-11-18 20:21:54 -0700 |
---|---|---|
committer | Richard Purdie <rpurdie@linux.intel.com> | 2011-01-04 14:46:42 +0000 |
commit | 32ea7668712a50d8f8b67d5e4558039e5092a485 (patch) | |
tree | 2473f8b1aade6131c7a37fbad2cc4d23998a3a56 | |
parent | 570bec37a898fb502d166a22f20bdb1da8c21c38 (diff) | |
download | poky-32ea7668712a50d8f8b67d5e4558039e5092a485.tar.gz |
Implement parallel parsing support
This utilizes python's multiprocessing module. The default number of threads
to be used is the same as the number of available processor cores, however,
you can manually set this with the BB_NUMBER_PARSE_THREADS variable.
(Bitbake rev: c7b3ec819549e51e438d293969e205883fee725f)
Signed-off-by: Chris Larson <chris_larson@mentor.com>
Signed-off-by: Richard Purdie <rpurdie@linux.intel.com>
-rw-r--r-- | bitbake/lib/bb/cache.py | 98 | ||||
-rw-r--r-- | bitbake/lib/bb/cooker.py | 135 | ||||
-rw-r--r-- | bitbake/lib/bb/pysh/pyshyacc.py | 1 |
3 files changed, 157 insertions, 77 deletions
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index 23845bc07b..93dccf21f1 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py | |||
@@ -232,48 +232,57 @@ class Cache(object): | |||
232 | bb_data = cls.load_bbfile(fn, appends, cfgData) | 232 | bb_data = cls.load_bbfile(fn, appends, cfgData) |
233 | return bb_data[virtual] | 233 | return bb_data[virtual] |
234 | 234 | ||
235 | def loadData(self, fn, appends, cfgData, cacheData): | 235 | @classmethod |
236 | """ | 236 | def parse(cls, filename, appends, configdata): |
237 | Load a subset of data for fn. | 237 | """Parse the specified filename, returning the recipe information""" |
238 | If the cached data is valid we do nothing, | 238 | infos = [] |
239 | To do this, we need to parse the file and set the system | 239 | datastores = cls.load_bbfile(filename, appends, configdata) |
240 | to record the variables accessed. | 240 | depends = set() |
241 | Return the cache status and whether the file was skipped when parsed | 241 | for variant, data in sorted(datastores.iteritems(), |
242 | """ | 242 | key=lambda i: i[0], |
243 | skipped, virtuals = 0, 0 | 243 | reverse=True): |
244 | virtualfn = cls.realfn2virtual(filename, variant) | ||
245 | depends |= (data.getVar("__depends", False) or set()) | ||
246 | if depends and not variant: | ||
247 | data.setVar("__depends", depends) | ||
248 | info = RecipeInfo.from_metadata(filename, data) | ||
249 | infos.append((virtualfn, info)) | ||
250 | return infos | ||
251 | |||
252 | def load(self, filename, appends, configdata): | ||
253 | """Obtain the recipe information for the specified filename, | ||
254 | using cached values if available, otherwise parsing. | ||
255 | |||
256 | Note that if it does parse to obtain the info, it will not | ||
257 | automatically add the information to the cache or to your | ||
258 | CacheData. Use the add or add_info method to do so after | ||
259 | running this, or use loadData instead.""" | ||
260 | cached = self.cacheValid(filename) | ||
261 | if cached: | ||
262 | infos = [] | ||
263 | info = self.depends_cache[filename] | ||
264 | for variant in info.variants: | ||
265 | virtualfn = self.realfn2virtual(filename, variant) | ||
266 | infos.append((virtualfn, self.depends_cache[virtualfn])) | ||
267 | else: | ||
268 | logger.debug(1, "Parsing %s", filename) | ||
269 | return self.parse(filename, appends, configdata) | ||
244 | 270 | ||
245 | if fn not in self.checked: | 271 | return cached, infos |
246 | self.cacheValidUpdate(fn) | ||
247 | 272 | ||
248 | cached = self.cacheValid(fn) | 273 | def loadData(self, fn, appends, cfgData, cacheData): |
249 | if not cached: | 274 | """Load the recipe info for the specified filename, |
250 | logger.debug(1, "Parsing %s", fn) | 275 | parsing and adding to the cache if necessary, and adding |
251 | datastores = self.load_bbfile(fn, appends, cfgData) | 276 | the recipe information to the supplied CacheData instance.""" |
252 | depends = set() | 277 | skipped, virtuals = 0, 0 |
253 | for variant, data in sorted(datastores.iteritems(), | ||
254 | key=lambda i: i[0], | ||
255 | reverse=True): | ||
256 | virtualfn = self.realfn2virtual(fn, variant) | ||
257 | depends |= (data.getVar("__depends", False) or set()) | ||
258 | if depends and not variant: | ||
259 | data.setVar("__depends", depends) | ||
260 | info = RecipeInfo.from_metadata(fn, data) | ||
261 | if not info.nocache: | ||
262 | # The recipe was parsed, and is not marked as being | ||
263 | # uncacheable, so we need to ensure that we write out the | ||
264 | # new cache data. | ||
265 | self.cacheclean = False | ||
266 | self.depends_cache[virtualfn] = info | ||
267 | 278 | ||
268 | info = self.depends_cache[fn] | 279 | cached, infos = self.load(fn, appends, cfgData) |
269 | for variant in info.variants: | 280 | for virtualfn, info in infos: |
270 | virtualfn = self.realfn2virtual(fn, variant) | 281 | if info.skipped: |
271 | vinfo = self.depends_cache[virtualfn] | ||
272 | if vinfo.skipped: | ||
273 | logger.debug(1, "Skipping %s", virtualfn) | 282 | logger.debug(1, "Skipping %s", virtualfn) |
274 | skipped += 1 | 283 | skipped += 1 |
275 | else: | 284 | else: |
276 | cacheData.add_from_recipeinfo(virtualfn, vinfo) | 285 | self.add_info(virtualfn, info, cacheData, not cached) |
277 | virtuals += 1 | 286 | virtuals += 1 |
278 | 287 | ||
279 | return cached, skipped, virtuals | 288 | return cached, skipped, virtuals |
@@ -283,6 +292,9 @@ class Cache(object): | |||
283 | Is the cache valid for fn? | 292 | Is the cache valid for fn? |
284 | Fast version, no timestamps checked. | 293 | Fast version, no timestamps checked. |
285 | """ | 294 | """ |
295 | if fn not in self.checked: | ||
296 | self.cacheValidUpdate(fn) | ||
297 | |||
286 | # Is cache enabled? | 298 | # Is cache enabled? |
287 | if not self.has_cache: | 299 | if not self.has_cache: |
288 | return False | 300 | return False |
@@ -412,14 +424,22 @@ class Cache(object): | |||
412 | def mtime(cachefile): | 424 | def mtime(cachefile): |
413 | return bb.parse.cached_mtime_noerror(cachefile) | 425 | return bb.parse.cached_mtime_noerror(cachefile) |
414 | 426 | ||
415 | def add(self, file_name, data, cacheData): | 427 | def add_info(self, filename, info, cacheData, parsed=None): |
428 | self.depends_cache[filename] = info | ||
429 | cacheData.add_from_recipeinfo(filename, info) | ||
430 | if parsed and not info.nocache: | ||
431 | # The recipe was parsed, and is not marked as being | ||
432 | # uncacheable, so we need to ensure that we write out the | ||
433 | # new cache data. | ||
434 | self.cacheclean = False | ||
435 | |||
436 | def add(self, file_name, data, cacheData, parsed=None): | ||
416 | """ | 437 | """ |
417 | Save data we need into the cache | 438 | Save data we need into the cache |
418 | """ | 439 | """ |
419 | realfn = self.virtualfn2realfn(file_name)[0] | 440 | realfn = self.virtualfn2realfn(file_name)[0] |
420 | info = RecipeInfo.from_metadata(realfn, data) | 441 | info = RecipeInfo.from_metadata(realfn, data) |
421 | self.depends_cache[file_name] = info | 442 | self.add_info(file_name, info, cacheData, parsed) |
422 | cacheData.add_from_recipeinfo(file_name, info) | ||
423 | 443 | ||
424 | @staticmethod | 444 | @staticmethod |
425 | def load_bbfile(bbfile, appends, config): | 445 | def load_bbfile(bbfile, appends, config): |
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py index 6194919e4c..0143c149b8 100644 --- a/bitbake/lib/bb/cooker.py +++ b/bitbake/lib/bb/cooker.py | |||
@@ -25,6 +25,8 @@ from __future__ import print_function | |||
25 | import sys, os, glob, os.path, re, time | 25 | import sys, os, glob, os.path, re, time |
26 | import logging | 26 | import logging |
27 | import sre_constants | 27 | import sre_constants |
28 | import multiprocessing | ||
29 | import signal | ||
28 | from cStringIO import StringIO | 30 | from cStringIO import StringIO |
29 | from contextlib import closing | 31 | from contextlib import closing |
30 | import bb | 32 | import bb |
@@ -976,7 +978,7 @@ class CookerExit(bb.event.Event): | |||
976 | def __init__(self): | 978 | def __init__(self): |
977 | bb.event.Event.__init__(self) | 979 | bb.event.Event.__init__(self) |
978 | 980 | ||
979 | class CookerParser: | 981 | class CookerParser(object): |
980 | def __init__(self, cooker, filelist, masked): | 982 | def __init__(self, cooker, filelist, masked): |
981 | # Internal data | 983 | # Internal data |
982 | self.filelist = filelist | 984 | self.filelist = filelist |
@@ -987,49 +989,106 @@ class CookerParser: | |||
987 | self.cached = 0 | 989 | self.cached = 0 |
988 | self.error = 0 | 990 | self.error = 0 |
989 | self.masked = masked | 991 | self.masked = masked |
990 | self.total = len(filelist) | ||
991 | 992 | ||
992 | self.skipped = 0 | 993 | self.skipped = 0 |
993 | self.virtuals = 0 | 994 | self.virtuals = 0 |
995 | self.total = len(filelist) | ||
994 | 996 | ||
995 | # Pointer to the next file to parse | 997 | # current to the next file to parse |
996 | self.pointer = 0 | 998 | self.current = 0 |
997 | 999 | self.result_queue = None | |
998 | def parse_next(self): | 1000 | self.fromcache = None |
999 | cooker = self.cooker | ||
1000 | if self.pointer < len(self.filelist): | ||
1001 | f = self.filelist[self.pointer] | ||
1002 | |||
1003 | try: | ||
1004 | fromCache, skipped, virtuals = cooker.bb_cache.loadData(f, cooker.get_file_appends(f), cooker.configuration.data, cooker.status) | ||
1005 | if fromCache: | ||
1006 | self.cached += 1 | ||
1007 | else: | ||
1008 | self.parsed += 1 | ||
1009 | |||
1010 | self.skipped += skipped | ||
1011 | self.virtuals += virtuals | ||
1012 | 1001 | ||
1013 | except KeyboardInterrupt: | 1002 | self.launch_processes() |
1014 | cooker.bb_cache.remove(f) | ||
1015 | cooker.bb_cache.sync() | ||
1016 | raise | ||
1017 | except Exception as e: | ||
1018 | self.error += 1 | ||
1019 | cooker.bb_cache.remove(f) | ||
1020 | parselog.exception("Unable to open %s", f) | ||
1021 | except: | ||
1022 | cooker.bb_cache.remove(f) | ||
1023 | raise | ||
1024 | finally: | ||
1025 | bb.event.fire(bb.event.ParseProgress(self.cached, self.parsed, self.skipped, self.masked, self.virtuals, self.error, self.total), cooker.configuration.event_data) | ||
1026 | 1003 | ||
1027 | self.pointer += 1 | 1004 | def launch_processes(self): |
1005 | self.task_queue = multiprocessing.Queue() | ||
1006 | self.result_queue = multiprocessing.Queue() | ||
1007 | |||
1008 | self.fromcache = [] | ||
1009 | cfgdata = self.cooker.configuration.data | ||
1010 | for filename in self.filelist: | ||
1011 | appends = self.cooker.get_file_appends(filename) | ||
1012 | if not self.cooker.bb_cache.cacheValid(filename): | ||
1013 | self.task_queue.put((filename, appends)) | ||
1014 | else: | ||
1015 | self.fromcache.append((filename, appends)) | ||
1016 | |||
1017 | def worker(input, output, cfgdata): | ||
1018 | signal.signal(signal.SIGINT, signal.SIG_IGN) | ||
1019 | for filename, appends in iter(input.get, 'STOP'): | ||
1020 | infos = bb.cache.Cache.parse(filename, appends, cfgdata) | ||
1021 | output.put(infos) | ||
1022 | |||
1023 | self.processes = [] | ||
1024 | num_processes = int(cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or | ||
1025 | multiprocessing.cpu_count()) | ||
1026 | for i in xrange(num_processes): | ||
1027 | process = multiprocessing.Process(target=worker, | ||
1028 | args=(self.task_queue, | ||
1029 | self.result_queue, | ||
1030 | cfgdata)) | ||
1031 | process.start() | ||
1032 | self.processes.append(process) | ||
1033 | |||
1034 | def shutdown(self, clean=True): | ||
1035 | self.result_queue.close() | ||
1036 | for process in self.processes: | ||
1037 | if clean: | ||
1038 | self.task_queue.put('STOP') | ||
1039 | else: | ||
1040 | process.terminate() | ||
1041 | self.task_queue.close() | ||
1042 | for process in self.processes: | ||
1043 | process.join() | ||
1044 | self.cooker.bb_cache.sync() | ||
1045 | bb.codeparser.parser_cache_save(self.cooker.configuration.data) | ||
1046 | if self.error > 0: | ||
1047 | raise ParsingErrorsFound() | ||
1048 | |||
1049 | def progress(self): | ||
1050 | bb.event.fire(bb.event.ParseProgress(self.cached, self.parsed, | ||
1051 | self.skipped, self.masked, | ||
1052 | self.virtuals, self.error, | ||
1053 | self.total), | ||
1054 | self.cooker.configuration.event_data) | ||
1028 | 1055 | ||
1029 | if self.pointer >= self.total: | 1056 | def parse_next(self): |
1030 | cooker.bb_cache.sync() | 1057 | cooker = self.cooker |
1031 | bb.codeparser.parser_cache_save(cooker.configuration.data) | 1058 | if self.current >= self.total: |
1032 | if self.error > 0: | 1059 | self.shutdown() |
1033 | raise ParsingErrorsFound | ||
1034 | return False | 1060 | return False |
1061 | |||
1062 | try: | ||
1063 | if self.result_queue.empty() and self.fromcache: | ||
1064 | filename, appends = self.fromcache.pop() | ||
1065 | _, infos = cooker.bb_cache.load(filename, appends, | ||
1066 | self.cooker.configuration.data) | ||
1067 | parsed = False | ||
1068 | else: | ||
1069 | infos = self.result_queue.get() | ||
1070 | parsed = True | ||
1071 | except KeyboardInterrupt: | ||
1072 | self.shutdown(clean=False) | ||
1073 | raise | ||
1074 | except Exception as e: | ||
1075 | self.error += 1 | ||
1076 | parselog.critical(str(e)) | ||
1077 | else: | ||
1078 | if parsed: | ||
1079 | self.parsed += 1 | ||
1080 | else: | ||
1081 | self.cached += 1 | ||
1082 | self.virtuals += len(infos) | ||
1083 | |||
1084 | for virtualfn, info in infos: | ||
1085 | cooker.bb_cache.add_info(virtualfn, info, cooker.status, | ||
1086 | parsed=parsed) | ||
1087 | if info.skipped: | ||
1088 | self.skipped += 1 | ||
1089 | finally: | ||
1090 | self.progress() | ||
1091 | |||
1092 | self.current += 1 | ||
1035 | return True | 1093 | return True |
1094 | |||
diff --git a/bitbake/lib/bb/pysh/pyshyacc.py b/bitbake/lib/bb/pysh/pyshyacc.py index 8bb9927321..3d6f54a58c 100644 --- a/bitbake/lib/bb/pysh/pyshyacc.py +++ b/bitbake/lib/bb/pysh/pyshyacc.py | |||
@@ -648,6 +648,7 @@ def p_error(p): | |||
648 | try: | 648 | try: |
649 | import pyshtables | 649 | import pyshtables |
650 | except ImportError: | 650 | except ImportError: |
651 | import os | ||
651 | outputdir = os.path.dirname(__file__) | 652 | outputdir = os.path.dirname(__file__) |
652 | if not os.access(outputdir, os.W_OK): | 653 | if not os.access(outputdir, os.W_OK): |
653 | outputdir = '' | 654 | outputdir = '' |