summaryrefslogtreecommitdiffstats
path: root/bitbake/bin/bitbake-worker
diff options
context:
space:
mode:
authorRichard Purdie <richard.purdie@linuxfoundation.org>2022-11-16 23:11:38 +0000
committerRichard Purdie <richard.purdie@linuxfoundation.org>2022-11-20 08:31:28 +0000
commit228f9a3a2d1991af2f2775af63e795b8b65e0805 (patch)
treea1286ce817ecd16378e4793892843a6da18acf34 /bitbake/bin/bitbake-worker
parent16bc168084cc7b9a092385dfb02d5efc012bed5b (diff)
downloadpoky-228f9a3a2d1991af2f2775af63e795b8b65e0805.tar.gz
bitbake: worker/runqueue: Reduce initial data transfer in workerdata
When setting up the worker we were transfering large amounts of data which aren't needed until task execution time. Defer the fakeroot and taskdeps data until they're needed for a specific task. This will duplicate some information when executing different tasks for a given recipe but as is is spread over the build run, it shouldn't be an issue overall. Also take the opportunity to clean up the silly length argument lists that were being passed around at the expense of extra dictionary keys. (Bitbake rev: 3a82acdcf40bdccd933c4dcef3d7e480f0d7ad3a) Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake/bin/bitbake-worker')
-rwxr-xr-xbitbake/bin/bitbake-worker32
1 files changed, 22 insertions, 10 deletions
diff --git a/bitbake/bin/bitbake-worker b/bitbake/bin/bitbake-worker
index d54044f361..3799b170cb 100755
--- a/bitbake/bin/bitbake-worker
+++ b/bitbake/bin/bitbake-worker
@@ -145,7 +145,16 @@ def sigterm_handler(signum, frame):
145 os.killpg(0, signal.SIGTERM) 145 os.killpg(0, signal.SIGTERM)
146 sys.exit() 146 sys.exit()
147 147
148def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskhash, unihash, appends, taskdepdata, extraconfigdata, quieterrors=False, dry_run_exec=False): 148def fork_off_task(cfg, data, databuilder, workerdata, extraconfigdata, runtask):
149
150 fn = runtask['fn']
151 task = runtask['task']
152 taskname = runtask['taskname']
153 taskhash = runtask['taskhash']
154 unihash = runtask['unihash']
155 appends = runtask['appends']
156 taskdepdata = runtask['taskdepdata']
157 quieterrors = runtask['quieterrors']
149 # We need to setup the environment BEFORE the fork, since 158 # We need to setup the environment BEFORE the fork, since
150 # a fork() or exec*() activates PSEUDO... 159 # a fork() or exec*() activates PSEUDO...
151 160
@@ -157,8 +166,7 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
157 uid = os.getuid() 166 uid = os.getuid()
158 gid = os.getgid() 167 gid = os.getgid()
159 168
160 169 taskdep = runtask['taskdep']
161 taskdep = workerdata["taskdeps"][fn]
162 if 'umask' in taskdep and taskname in taskdep['umask']: 170 if 'umask' in taskdep and taskname in taskdep['umask']:
163 umask = taskdep['umask'][taskname] 171 umask = taskdep['umask'][taskname]
164 elif workerdata["umask"]: 172 elif workerdata["umask"]:
@@ -170,24 +178,24 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
170 except TypeError: 178 except TypeError:
171 pass 179 pass
172 180
173 dry_run = cfg.dry_run or dry_run_exec 181 dry_run = cfg.dry_run or runtask['dry_run']
174 182
175 # We can't use the fakeroot environment in a dry run as it possibly hasn't been built 183 # We can't use the fakeroot environment in a dry run as it possibly hasn't been built
176 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not dry_run: 184 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not dry_run:
177 fakeroot = True 185 fakeroot = True
178 envvars = (workerdata["fakerootenv"][fn] or "").split() 186 envvars = (runtask['fakerootenv'] or "").split()
179 for key, value in (var.split('=') for var in envvars): 187 for key, value in (var.split('=') for var in envvars):
180 envbackup[key] = os.environ.get(key) 188 envbackup[key] = os.environ.get(key)
181 os.environ[key] = value 189 os.environ[key] = value
182 fakeenv[key] = value 190 fakeenv[key] = value
183 191
184 fakedirs = (workerdata["fakerootdirs"][fn] or "").split() 192 fakedirs = (runtask['fakerootdirs'] or "").split()
185 for p in fakedirs: 193 for p in fakedirs:
186 bb.utils.mkdirhier(p) 194 bb.utils.mkdirhier(p)
187 logger.debug2('Running %s:%s under fakeroot, fakedirs: %s' % 195 logger.debug2('Running %s:%s under fakeroot, fakedirs: %s' %
188 (fn, taskname, ', '.join(fakedirs))) 196 (fn, taskname, ', '.join(fakedirs)))
189 else: 197 else:
190 envvars = (workerdata["fakerootnoenv"][fn] or "").split() 198 envvars = (runtask['fakerootnoenv'] or "").split()
191 for key, value in (var.split('=') for var in envvars): 199 for key, value in (var.split('=') for var in envvars):
192 envbackup[key] = os.environ.get(key) 200 envbackup[key] = os.environ.get(key)
193 os.environ[key] = value 201 os.environ[key] = value
@@ -474,11 +482,15 @@ class BitbakeWorker(object):
474 sys.exit(0) 482 sys.exit(0)
475 483
476 def handle_runtask(self, data): 484 def handle_runtask(self, data):
477 fn, task, taskname, taskhash, unihash, quieterrors, appends, taskdepdata, dry_run_exec = pickle.loads(data) 485 runtask = pickle.loads(data)
478 workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname))
479 486
480 pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, fn, task, taskname, taskhash, unihash, appends, taskdepdata, self.extraconfigdata, quieterrors, dry_run_exec) 487 fn = runtask['fn']
488 task = runtask['task']
489 taskname = runtask['taskname']
490
491 workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname))
481 492
493 pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, self.extraconfigdata, runtask)
482 self.build_pids[pid] = task 494 self.build_pids[pid] = task
483 self.build_pipes[pid] = runQueueWorkerPipe(pipein, pipeout) 495 self.build_pipes[pid] = runQueueWorkerPipe(pipein, pipeout)
484 496