From 228f9a3a2d1991af2f2775af63e795b8b65e0805 Mon Sep 17 00:00:00 2001 From: Richard Purdie Date: Wed, 16 Nov 2022 23:11:38 +0000 Subject: bitbake: worker/runqueue: Reduce initial data transfer in workerdata When setting up the worker we were transfering large amounts of data which aren't needed until task execution time. Defer the fakeroot and taskdeps data until they're needed for a specific task. This will duplicate some information when executing different tasks for a given recipe but as is is spread over the build run, it shouldn't be an issue overall. Also take the opportunity to clean up the silly length argument lists that were being passed around at the expense of extra dictionary keys. (Bitbake rev: 3a82acdcf40bdccd933c4dcef3d7e480f0d7ad3a) Signed-off-by: Richard Purdie --- bitbake/lib/bb/runqueue.py | 52 ++++++++++++++++++++++++++++++++-------------- 1 file changed, 36 insertions(+), 16 deletions(-) (limited to 'bitbake/lib/bb/runqueue.py') diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py index 437f4a185c..595a58883e 100644 --- a/bitbake/lib/bb/runqueue.py +++ b/bitbake/lib/bb/runqueue.py @@ -1311,10 +1311,6 @@ class RunQueue: workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec, fakerootlogs=fakerootlogs) workerdata = { - "taskdeps" : self.rqdata.dataCaches[mc].task_deps, - "fakerootenv" : self.rqdata.dataCaches[mc].fakerootenv, - "fakerootdirs" : self.rqdata.dataCaches[mc].fakerootdirs, - "fakerootnoenv" : self.rqdata.dataCaches[mc].fakerootnoenv, "sigdata" : bb.parse.siggen.get_taskdata(), "logdefaultlevel" : bb.msg.loggerDefaultLogLevel, "build_verbose_shell" : self.cooker.configuration.build_verbose_shell, @@ -2139,18 +2135,30 @@ class RunQueueExecute: startevent = sceneQueueTaskStarted(task, self.stats, self.rq) bb.event.fire(startevent, self.cfgData) - taskdepdata = self.sq_build_taskdepdata(task) - taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn] - taskhash = self.rqdata.get_task_hash(task) - unihash = self.rqdata.get_task_unihash(task) + runtask = { + 'fn' : taskfn, + 'task' : task, + 'taskname' : taskname, + 'taskhash' : self.rqdata.get_task_hash(task), + 'unihash' : self.rqdata.get_task_unihash(task), + 'quieterrors' : True, + 'appends' : self.cooker.collections[mc].get_file_appends(taskfn), + 'taskdepdata' : self.sq_build_taskdepdata(task), + 'dry_run' : False, + 'taskdep': taskdep, + 'fakerootenv' : self.rqdata.dataCaches[mc].fakerootenv[taskfn], + 'fakerootdirs' : self.rqdata.dataCaches[mc].fakerootdirs[taskfn], + 'fakerootnoenv' : self.rqdata.dataCaches[mc].fakerootnoenv[taskfn] + } + if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run: if not mc in self.rq.fakeworker: self.rq.start_fakeworker(self, mc) - self.rq.fakeworker[mc].process.stdin.write(b"" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, False)) + b"") + self.rq.fakeworker[mc].process.stdin.write(b"" + pickle.dumps(runtask) + b"") self.rq.fakeworker[mc].process.stdin.flush() else: - self.rq.worker[mc].process.stdin.write(b"" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, False)) + b"") + self.rq.worker[mc].process.stdin.write(b"" + pickle.dumps(runtask) + b"") self.rq.worker[mc].process.stdin.flush() self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True) @@ -2220,11 +2228,23 @@ class RunQueueExecute: startevent = runQueueTaskStarted(task, self.stats, self.rq) bb.event.fire(startevent, self.cfgData) - taskdepdata = self.build_taskdepdata(task) - taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn] - taskhash = self.rqdata.get_task_hash(task) - unihash = self.rqdata.get_task_unihash(task) + runtask = { + 'fn' : taskfn, + 'task' : task, + 'taskname' : taskname, + 'taskhash' : self.rqdata.get_task_hash(task), + 'unihash' : self.rqdata.get_task_unihash(task), + 'quieterrors' : False, + 'appends' : self.cooker.collections[mc].get_file_appends(taskfn), + 'taskdepdata' : self.build_taskdepdata(task), + 'dry_run' : self.rqdata.setscene_enforce, + 'taskdep': taskdep, + 'fakerootenv' : self.rqdata.dataCaches[mc].fakerootenv[taskfn], + 'fakerootdirs' : self.rqdata.dataCaches[mc].fakerootdirs[taskfn], + 'fakerootnoenv' : self.rqdata.dataCaches[mc].fakerootnoenv[taskfn] + } + if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce): if not mc in self.rq.fakeworker: try: @@ -2234,10 +2254,10 @@ class RunQueueExecute: self.rq.state = runQueueFailed self.stats.taskFailed() return True - self.rq.fakeworker[mc].process.stdin.write(b"" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"") + self.rq.fakeworker[mc].process.stdin.write(b"" + pickle.dumps(runtask) + b"") self.rq.fakeworker[mc].process.stdin.flush() else: - self.rq.worker[mc].process.stdin.write(b"" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"") + self.rq.worker[mc].process.stdin.write(b"" + pickle.dumps(runtask) + b"") self.rq.worker[mc].process.stdin.flush() self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True) -- cgit v1.2.3-54-g00ecf