aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorJoshua Watt <jpewhacker@gmail.com>2018-12-18 21:10:29 -0600
committerRichard Purdie <richard.purdie@linuxfoundation.org>2019-01-03 22:47:06 +0000
commitaab80b099f6f259e4b57cba2c26dd385d07c5947 (patch)
tree545e2936c81567451a15255f9e00768bb0913a60
parent1ecc47f0831b35c8c92b37a81cef4e43ff9f67b2 (diff)
downloadbitbake-aab80b099f6f259e4b57cba2c26dd385d07c5947.tar.gz
runqueue: Pass unique hash to task
The unique hash is now passed to the task in the BB_UNIHASH variable [YOCTO #13030] Signed-off-by: Joshua Watt <JPEWhacker@gmail.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-rwxr-xr-xbin/bitbake-worker7
-rw-r--r--lib/bb/runqueue.py10
2 files changed, 10 insertions, 7 deletions
diff --git a/bin/bitbake-worker b/bin/bitbake-worker
index cd687e6e4..a9e997e1f 100755
--- a/bin/bitbake-worker
+++ b/bin/bitbake-worker
@@ -136,7 +136,7 @@ def sigterm_handler(signum, frame):
os.killpg(0, signal.SIGTERM)
sys.exit()
-def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskhash, appends, taskdepdata, extraconfigdata, quieterrors=False, dry_run_exec=False):
+def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskhash, unihash, appends, taskdepdata, extraconfigdata, quieterrors=False, dry_run_exec=False):
# We need to setup the environment BEFORE the fork, since
# a fork() or exec*() activates PSEUDO...
@@ -235,6 +235,7 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
the_data = bb_cache.loadDataFull(fn, appends)
the_data.setVar('BB_TASKHASH', taskhash)
+ the_data.setVar('BB_UNIHASH', unihash)
bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN"), taskname.replace("do_", "")))
@@ -425,10 +426,10 @@ class BitbakeWorker(object):
sys.exit(0)
def handle_runtask(self, data):
- fn, task, taskname, taskhash, quieterrors, appends, taskdepdata, dry_run_exec = pickle.loads(data)
+ fn, task, taskname, taskhash, unihash, quieterrors, appends, taskdepdata, dry_run_exec = pickle.loads(data)
workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname))
- pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, fn, task, taskname, taskhash, appends, taskdepdata, self.extraconfigdata, quieterrors, dry_run_exec)
+ pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, fn, task, taskname, taskhash, unihash, appends, taskdepdata, self.extraconfigdata, quieterrors, dry_run_exec)
self.build_pids[pid] = task
self.build_pipes[pid] = runQueueWorkerPipe(pipein, pipeout)
diff --git a/lib/bb/runqueue.py b/lib/bb/runqueue.py
index 27b188256..de57dcb37 100644
--- a/lib/bb/runqueue.py
+++ b/lib/bb/runqueue.py
@@ -2034,6 +2034,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
taskhash = self.rqdata.get_task_hash(task)
+ unihash = self.rqdata.get_task_unihash(task)
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce):
if not mc in self.rq.fakeworker:
try:
@@ -2043,10 +2044,10 @@ class RunQueueExecuteTasks(RunQueueExecute):
self.rq.state = runQueueFailed
self.stats.taskFailed()
return True
- self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, False, self.cooker.collection.get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>")
+ self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collection.get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>")
self.rq.fakeworker[mc].process.stdin.flush()
else:
- self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, False, self.cooker.collection.get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>")
+ self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collection.get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>")
self.rq.worker[mc].process.stdin.flush()
self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True)
@@ -2462,13 +2463,14 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
taskhash = self.rqdata.get_task_hash(task)
+ unihash = self.rqdata.get_task_unihash(task)
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run:
if not mc in self.rq.fakeworker:
self.rq.start_fakeworker(self, mc)
- self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, True, self.cooker.collection.get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>")
+ self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collection.get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>")
self.rq.fakeworker[mc].process.stdin.flush()
else:
- self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, True, self.cooker.collection.get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>")
+ self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collection.get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>")
self.rq.worker[mc].process.stdin.flush()
self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True)