summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rwxr-xr-xbitbake/bin/bitbake-worker32
-rw-r--r--bitbake/lib/bb/runqueue.py52
2 files changed, 58 insertions, 26 deletions
diff --git a/bitbake/bin/bitbake-worker b/bitbake/bin/bitbake-worker
index d54044f361..3799b170cb 100755
--- a/bitbake/bin/bitbake-worker
+++ b/bitbake/bin/bitbake-worker
@@ -145,7 +145,16 @@ def sigterm_handler(signum, frame):
145 os.killpg(0, signal.SIGTERM) 145 os.killpg(0, signal.SIGTERM)
146 sys.exit() 146 sys.exit()
147 147
148def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskhash, unihash, appends, taskdepdata, extraconfigdata, quieterrors=False, dry_run_exec=False): 148def fork_off_task(cfg, data, databuilder, workerdata, extraconfigdata, runtask):
149
150 fn = runtask['fn']
151 task = runtask['task']
152 taskname = runtask['taskname']
153 taskhash = runtask['taskhash']
154 unihash = runtask['unihash']
155 appends = runtask['appends']
156 taskdepdata = runtask['taskdepdata']
157 quieterrors = runtask['quieterrors']
149 # We need to setup the environment BEFORE the fork, since 158 # We need to setup the environment BEFORE the fork, since
150 # a fork() or exec*() activates PSEUDO... 159 # a fork() or exec*() activates PSEUDO...
151 160
@@ -157,8 +166,7 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
157 uid = os.getuid() 166 uid = os.getuid()
158 gid = os.getgid() 167 gid = os.getgid()
159 168
160 169 taskdep = runtask['taskdep']
161 taskdep = workerdata["taskdeps"][fn]
162 if 'umask' in taskdep and taskname in taskdep['umask']: 170 if 'umask' in taskdep and taskname in taskdep['umask']:
163 umask = taskdep['umask'][taskname] 171 umask = taskdep['umask'][taskname]
164 elif workerdata["umask"]: 172 elif workerdata["umask"]:
@@ -170,24 +178,24 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
170 except TypeError: 178 except TypeError:
171 pass 179 pass
172 180
173 dry_run = cfg.dry_run or dry_run_exec 181 dry_run = cfg.dry_run or runtask['dry_run']
174 182
175 # We can't use the fakeroot environment in a dry run as it possibly hasn't been built 183 # We can't use the fakeroot environment in a dry run as it possibly hasn't been built
176 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not dry_run: 184 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not dry_run:
177 fakeroot = True 185 fakeroot = True
178 envvars = (workerdata["fakerootenv"][fn] or "").split() 186 envvars = (runtask['fakerootenv'] or "").split()
179 for key, value in (var.split('=') for var in envvars): 187 for key, value in (var.split('=') for var in envvars):
180 envbackup[key] = os.environ.get(key) 188 envbackup[key] = os.environ.get(key)
181 os.environ[key] = value 189 os.environ[key] = value
182 fakeenv[key] = value 190 fakeenv[key] = value
183 191
184 fakedirs = (workerdata["fakerootdirs"][fn] or "").split() 192 fakedirs = (runtask['fakerootdirs'] or "").split()
185 for p in fakedirs: 193 for p in fakedirs:
186 bb.utils.mkdirhier(p) 194 bb.utils.mkdirhier(p)
187 logger.debug2('Running %s:%s under fakeroot, fakedirs: %s' % 195 logger.debug2('Running %s:%s under fakeroot, fakedirs: %s' %
188 (fn, taskname, ', '.join(fakedirs))) 196 (fn, taskname, ', '.join(fakedirs)))
189 else: 197 else:
190 envvars = (workerdata["fakerootnoenv"][fn] or "").split() 198 envvars = (runtask['fakerootnoenv'] or "").split()
191 for key, value in (var.split('=') for var in envvars): 199 for key, value in (var.split('=') for var in envvars):
192 envbackup[key] = os.environ.get(key) 200 envbackup[key] = os.environ.get(key)
193 os.environ[key] = value 201 os.environ[key] = value
@@ -474,11 +482,15 @@ class BitbakeWorker(object):
474 sys.exit(0) 482 sys.exit(0)
475 483
476 def handle_runtask(self, data): 484 def handle_runtask(self, data):
477 fn, task, taskname, taskhash, unihash, quieterrors, appends, taskdepdata, dry_run_exec = pickle.loads(data) 485 runtask = pickle.loads(data)
478 workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname))
479 486
480 pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, fn, task, taskname, taskhash, unihash, appends, taskdepdata, self.extraconfigdata, quieterrors, dry_run_exec) 487 fn = runtask['fn']
488 task = runtask['task']
489 taskname = runtask['taskname']
490
491 workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname))
481 492
493 pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, self.extraconfigdata, runtask)
482 self.build_pids[pid] = task 494 self.build_pids[pid] = task
483 self.build_pipes[pid] = runQueueWorkerPipe(pipein, pipeout) 495 self.build_pipes[pid] = runQueueWorkerPipe(pipein, pipeout)
484 496
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
index 437f4a185c..595a58883e 100644
--- a/bitbake/lib/bb/runqueue.py
+++ b/bitbake/lib/bb/runqueue.py
@@ -1311,10 +1311,6 @@ class RunQueue:
1311 workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec, fakerootlogs=fakerootlogs) 1311 workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec, fakerootlogs=fakerootlogs)
1312 1312
1313 workerdata = { 1313 workerdata = {
1314 "taskdeps" : self.rqdata.dataCaches[mc].task_deps,
1315 "fakerootenv" : self.rqdata.dataCaches[mc].fakerootenv,
1316 "fakerootdirs" : self.rqdata.dataCaches[mc].fakerootdirs,
1317 "fakerootnoenv" : self.rqdata.dataCaches[mc].fakerootnoenv,
1318 "sigdata" : bb.parse.siggen.get_taskdata(), 1314 "sigdata" : bb.parse.siggen.get_taskdata(),
1319 "logdefaultlevel" : bb.msg.loggerDefaultLogLevel, 1315 "logdefaultlevel" : bb.msg.loggerDefaultLogLevel,
1320 "build_verbose_shell" : self.cooker.configuration.build_verbose_shell, 1316 "build_verbose_shell" : self.cooker.configuration.build_verbose_shell,
@@ -2139,18 +2135,30 @@ class RunQueueExecute:
2139 startevent = sceneQueueTaskStarted(task, self.stats, self.rq) 2135 startevent = sceneQueueTaskStarted(task, self.stats, self.rq)
2140 bb.event.fire(startevent, self.cfgData) 2136 bb.event.fire(startevent, self.cfgData)
2141 2137
2142 taskdepdata = self.sq_build_taskdepdata(task)
2143
2144 taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn] 2138 taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
2145 taskhash = self.rqdata.get_task_hash(task) 2139 runtask = {
2146 unihash = self.rqdata.get_task_unihash(task) 2140 'fn' : taskfn,
2141 'task' : task,
2142 'taskname' : taskname,
2143 'taskhash' : self.rqdata.get_task_hash(task),
2144 'unihash' : self.rqdata.get_task_unihash(task),
2145 'quieterrors' : True,
2146 'appends' : self.cooker.collections[mc].get_file_appends(taskfn),
2147 'taskdepdata' : self.sq_build_taskdepdata(task),
2148 'dry_run' : False,
2149 'taskdep': taskdep,
2150 'fakerootenv' : self.rqdata.dataCaches[mc].fakerootenv[taskfn],
2151 'fakerootdirs' : self.rqdata.dataCaches[mc].fakerootdirs[taskfn],
2152 'fakerootnoenv' : self.rqdata.dataCaches[mc].fakerootnoenv[taskfn]
2153 }
2154
2147 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run: 2155 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run:
2148 if not mc in self.rq.fakeworker: 2156 if not mc in self.rq.fakeworker:
2149 self.rq.start_fakeworker(self, mc) 2157 self.rq.start_fakeworker(self, mc)
2150 self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>") 2158 self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps(runtask) + b"</runtask>")
2151 self.rq.fakeworker[mc].process.stdin.flush() 2159 self.rq.fakeworker[mc].process.stdin.flush()
2152 else: 2160 else:
2153 self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>") 2161 self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps(runtask) + b"</runtask>")
2154 self.rq.worker[mc].process.stdin.flush() 2162 self.rq.worker[mc].process.stdin.flush()
2155 2163
2156 self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True) 2164 self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True)
@@ -2220,11 +2228,23 @@ class RunQueueExecute:
2220 startevent = runQueueTaskStarted(task, self.stats, self.rq) 2228 startevent = runQueueTaskStarted(task, self.stats, self.rq)
2221 bb.event.fire(startevent, self.cfgData) 2229 bb.event.fire(startevent, self.cfgData)
2222 2230
2223 taskdepdata = self.build_taskdepdata(task)
2224
2225 taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn] 2231 taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
2226 taskhash = self.rqdata.get_task_hash(task) 2232 runtask = {
2227 unihash = self.rqdata.get_task_unihash(task) 2233 'fn' : taskfn,
2234 'task' : task,
2235 'taskname' : taskname,
2236 'taskhash' : self.rqdata.get_task_hash(task),
2237 'unihash' : self.rqdata.get_task_unihash(task),
2238 'quieterrors' : False,
2239 'appends' : self.cooker.collections[mc].get_file_appends(taskfn),
2240 'taskdepdata' : self.build_taskdepdata(task),
2241 'dry_run' : self.rqdata.setscene_enforce,
2242 'taskdep': taskdep,
2243 'fakerootenv' : self.rqdata.dataCaches[mc].fakerootenv[taskfn],
2244 'fakerootdirs' : self.rqdata.dataCaches[mc].fakerootdirs[taskfn],
2245 'fakerootnoenv' : self.rqdata.dataCaches[mc].fakerootnoenv[taskfn]
2246 }
2247
2228 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce): 2248 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce):
2229 if not mc in self.rq.fakeworker: 2249 if not mc in self.rq.fakeworker:
2230 try: 2250 try:
@@ -2234,10 +2254,10 @@ class RunQueueExecute:
2234 self.rq.state = runQueueFailed 2254 self.rq.state = runQueueFailed
2235 self.stats.taskFailed() 2255 self.stats.taskFailed()
2236 return True 2256 return True
2237 self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>") 2257 self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps(runtask) + b"</runtask>")
2238 self.rq.fakeworker[mc].process.stdin.flush() 2258 self.rq.fakeworker[mc].process.stdin.flush()
2239 else: 2259 else:
2240 self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>") 2260 self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps(runtask) + b"</runtask>")
2241 self.rq.worker[mc].process.stdin.flush() 2261 self.rq.worker[mc].process.stdin.flush()
2242 2262
2243 self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True) 2263 self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True)