bitbake: bitbake: siggen/runqueue: Switch to using RECIPE_SIGGEN_INFO feature for signature dumping

Now that we have cache support for the taskdep/gendep/lookupcache data,
we can switch to use that cooker feature and skip the secondary reparse to
write the sig files. This does make the initial parse longer but means the
secondary one isn't needed.

At present parsing with the larger cache isn't optimal but we have plans
in place which will make this faster than the current reparse code being
removed here.

(Bitbake rev: 5951b5b56449855bc2a30146af65eb287a35fcef)

(Bitbake rev: 1252e5bce51ae912ecff9dcc354a371786ff2c72)

Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
Richard Purdie
2022-12-09 15:56:19 +00:00
parent 4754b1021e
commit 2946c56b23
3 changed files with 37 additions and 33 deletions

View File

@@ -395,6 +395,11 @@ def setup_bitbake(configParams, extrafeatures=None):
# In status only mode there are no logs and no UI
logger.addHandler(handler)
if configParams.dump_signatures:
if extrafeatures is None:
extrafeatures = []
extrafeatures.append(bb.cooker.CookerFeatures.RECIPE_SIGGEN_INFO)
if configParams.server_only:
featureset = []
ui_module = None

View File

@@ -1608,28 +1608,28 @@ class RunQueue:
else:
self.rqexe.finish()
def rq_dump_sigfn(self, fn, options):
mc = bb.runqueue.mc_from_tid(fn)
the_data = self.cooker.databuilder.parseRecipe(fn, self.cooker.collections[mc].get_file_appends(fn))
siggen = bb.parse.siggen
dataCaches = self.rqdata.dataCaches
siggen.dump_sigfn(fn, dataCaches, options)
def _rq_dump_sigtid(self, tids):
for tid in tids:
(mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
dataCaches = self.rqdata.dataCaches
bb.parse.siggen.dump_sigtask(taskfn, taskname, dataCaches[mc].stamp[taskfn], True)
def dump_signatures(self, options):
fns = set()
bb.note("Reparsing files to collect dependency data")
if bb.cooker.CookerFeatures.RECIPE_SIGGEN_INFO not in self.cooker.featureset:
bb.fatal("The dump signatures functionality needs the RECIPE_SIGGEN_INFO feature enabled")
for tid in self.rqdata.runtaskentries:
fn = fn_from_tid(tid)
fns.add(fn)
bb.note("Writing task signature files")
max_process = int(self.cfgData.getVar("BB_NUMBER_PARSE_THREADS") or os.cpu_count() or 1)
def chunkify(l, n):
return [l[i::n] for i in range(n)]
tids = chunkify(list(self.rqdata.runtaskentries), max_process)
# We cannot use the real multiprocessing.Pool easily due to some local data
# that can't be pickled. This is a cheap multi-process solution.
launched = []
while fns:
while tids:
if len(launched) < max_process:
p = Process(target=self.rq_dump_sigfn, args=(fns.pop(), options))
p = Process(target=self._rq_dump_sigtid, args=(tids.pop(), ))
p.start()
launched.append(p)
for q in launched:

View File

@@ -335,8 +335,8 @@ class SignatureGeneratorBasic(SignatureGenerator):
self.unihash_cache.copyfile(targetdir)
def dump_sigtask(self, fn, task, stampbase, runtime):
tid = fn + ":" + task
mc = bb.runqueue.mc_from_tid(fn)
referencestamp = stampbase
if isinstance(runtime, str) and runtime.startswith("customfile"):
sigfile = stampbase
@@ -353,16 +353,27 @@ class SignatureGeneratorBasic(SignatureGenerator):
data['task'] = task
data['basehash_ignore_vars'] = self.basehash_ignore_vars
data['taskhash_ignore_tasks'] = self.taskhash_ignore_tasks
data['taskdeps'] = self.taskdeps[fn][task]
if hasattr(self, "datacaches"):
data['taskdeps'] = self.datacaches[mc].siggen_taskdeps[fn][task]
else:
data['taskdeps'] = self.taskdeps[fn][task]
data['basehash'] = self.basehash[tid]
data['gendeps'] = {}
data['varvals'] = {}
data['varvals'][task] = self.lookupcache[fn][task]
for dep in self.taskdeps[fn][task]:
if dep in self.basehash_ignore_vars:
continue
data['gendeps'][dep] = self.gendeps[fn][dep]
data['varvals'][dep] = self.lookupcache[fn][dep]
if hasattr(self, "datacaches"):
data['varvals'][task] = self.datacaches[mc].siggen_varvals[fn][task]
for dep in self.datacaches[mc].siggen_taskdeps[fn][task]:
if dep in self.basehash_ignore_vars:
continue
data['gendeps'][dep] = self.datacaches[mc].siggen_gendeps[fn][dep]
data['varvals'][dep] = self.datacaches[mc].siggen_varvals[fn][dep]
else:
data['varvals'][task] = self.lookupcache[fn][task]
for dep in self.taskdeps[fn][task]:
if dep in self.basehash_ignore_vars:
continue
data['gendeps'][dep] = self.gendeps[fn][dep]
data['varvals'][dep] = self.lookupcache[fn][dep]
if runtime and tid in self.taskhash:
data['runtaskdeps'] = self.runtaskdeps[tid]
@@ -409,18 +420,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
pass
raise err
def dump_sigfn(self, fn, dataCaches, options):
if fn in self.taskdeps:
for task in self.taskdeps[fn]:
tid = fn + ":" + task
mc = bb.runqueue.mc_from_tid(tid)
if tid not in self.taskhash:
continue
if dataCaches[mc].basetaskhash[tid] != self.basehash[tid]:
bb.error("Bitbake's cached basehash does not match the one we just generated (%s)!" % tid)
bb.error("The mismatched hashes were %s and %s" % (dataCaches[mc].basetaskhash[tid], self.basehash[tid]))
self.dump_sigtask(fn, task, dataCaches[mc].stamp[fn], True)
class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
name = "basichash"