mirror of
https://git.yoctoproject.org/poky
synced 2026-04-02 17:02:21 +02:00
Now 'runtaskdeps' is assigned before taskhash whitelist is filtered out, and thus always contains original task dependencies. This causes problem to diagnose sstate related problem. So fix it by appending filtered dep to 'runtaskdeps' in the fly. Signed-off-by: Kevin Tian <kevin.tian@intel.com>
262 lines
9.3 KiB
Python
262 lines
9.3 KiB
Python
import hashlib
|
|
import re
|
|
|
|
try:
|
|
import cPickle as pickle
|
|
except ImportError:
|
|
import pickle
|
|
bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.")
|
|
|
|
def init(d, dumpsigs):
|
|
siggens = [obj for obj in globals().itervalues()
|
|
if type(obj) is type and issubclass(obj, SignatureGenerator)]
|
|
|
|
desired = bb.data.getVar("BB_SIGNATURE_HANDLER", d, True) or "noop"
|
|
for sg in siggens:
|
|
if desired == sg.name:
|
|
return sg(d, dumpsigs)
|
|
break
|
|
else:
|
|
bb.error("Invalid signature generator '%s', using default 'noop' generator" % desired)
|
|
bb.error("Available generators: %s" % ", ".join(obj.name for obj in siggens))
|
|
return SignatureGenerator(d, dumpsigs)
|
|
|
|
class SignatureGenerator(object):
|
|
"""
|
|
"""
|
|
name = "noop"
|
|
|
|
def __init__(self, data, dumpsigs):
|
|
return
|
|
|
|
def finalise(self, fn, d, varient):
|
|
return
|
|
|
|
def stampfile(self, stampbase, taskname, taskhash):
|
|
return "%s.%s" % (stampbase, taskname)
|
|
|
|
class SignatureGeneratorBasic(SignatureGenerator):
|
|
"""
|
|
"""
|
|
name = "basic"
|
|
|
|
def __init__(self, data, dumpsigs):
|
|
self.basehash = {}
|
|
self.taskhash = {}
|
|
self.taskdeps = {}
|
|
self.runtaskdeps = {}
|
|
self.gendeps = {}
|
|
self.lookupcache = {}
|
|
self.basewhitelist = (data.getVar("BB_HASHBASE_WHITELIST", True) or "").split()
|
|
self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST", True) or None
|
|
|
|
if self.taskwhitelist:
|
|
self.twl = re.compile(self.taskwhitelist)
|
|
else:
|
|
self.twl = None
|
|
|
|
self.dumpsigs = dumpsigs
|
|
|
|
def _build_data(self, fn, d):
|
|
|
|
taskdeps, gendeps = bb.data.generate_dependencies(d)
|
|
|
|
basehash = {}
|
|
lookupcache = {}
|
|
|
|
for task in taskdeps:
|
|
data = d.getVar(task, False)
|
|
lookupcache[task] = data
|
|
for dep in sorted(taskdeps[task]):
|
|
if dep in self.basewhitelist:
|
|
continue
|
|
if dep in lookupcache:
|
|
var = lookupcache[dep]
|
|
else:
|
|
var = d.getVar(dep, False)
|
|
lookupcache[dep] = var
|
|
if var:
|
|
data = data + var
|
|
if data is None:
|
|
bb.error("Task %s from %s seems to be empty?!" % (task, fn))
|
|
self.basehash[fn + "." + task] = hashlib.md5(data).hexdigest()
|
|
#bb.note("Hash for %s is %s" % (task, tashhash[task]))
|
|
|
|
if self.dumpsigs:
|
|
self.taskdeps[fn] = taskdeps
|
|
self.gendeps[fn] = gendeps
|
|
self.lookupcache[fn] = lookupcache
|
|
|
|
return taskdeps
|
|
|
|
def finalise(self, fn, d, variant):
|
|
|
|
if variant:
|
|
fn = "virtual:" + variant + ":" + fn
|
|
|
|
taskdeps = self._build_data(fn, d)
|
|
|
|
#Slow but can be useful for debugging mismatched basehashes
|
|
#for task in self.taskdeps[fn]:
|
|
# self.dump_sigtask(fn, task, d.getVar("STAMP", True), False)
|
|
|
|
for task in taskdeps:
|
|
d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + "." + task])
|
|
|
|
def get_taskhash(self, fn, task, deps, dataCache):
|
|
k = fn + "." + task
|
|
data = dataCache.basetaskhash[k]
|
|
self.runtaskdeps[k] = []
|
|
for dep in sorted(deps):
|
|
if self.twl and self.twl.search(dataCache.pkg_fn[fn]):
|
|
#bb.note("Skipping %s" % dep)
|
|
continue
|
|
if dep not in self.taskhash:
|
|
bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?", dep)
|
|
data = data + self.taskhash[dep]
|
|
self.runtaskdeps[k].append(dep)
|
|
h = hashlib.md5(data).hexdigest()
|
|
self.taskhash[k] = h
|
|
#d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
|
|
return h
|
|
|
|
def set_taskdata(self, hashes, deps):
|
|
self.runtaskdeps = deps
|
|
self.taskhash = hashes
|
|
|
|
def dump_sigtask(self, fn, task, stampbase, runtime):
|
|
k = fn + "." + task
|
|
if runtime == "customfile":
|
|
sigfile = stampbase
|
|
elif runtime:
|
|
sigfile = stampbase + "." + task + ".sigdata" + "." + self.taskhash[k]
|
|
else:
|
|
sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[k]
|
|
|
|
data = {}
|
|
data['basewhitelist'] = self.basewhitelist
|
|
data['taskwhitelist'] = self.taskwhitelist
|
|
data['taskdeps'] = self.taskdeps[fn][task]
|
|
data['basehash'] = self.basehash[k]
|
|
data['gendeps'] = {}
|
|
data['varvals'] = {}
|
|
data['varvals'][task] = self.lookupcache[fn][task]
|
|
for dep in self.taskdeps[fn][task]:
|
|
if dep in self.basewhitelist:
|
|
continue
|
|
data['gendeps'][dep] = self.gendeps[fn][dep]
|
|
data['varvals'][dep] = self.lookupcache[fn][dep]
|
|
|
|
if runtime:
|
|
data['runtaskdeps'] = self.runtaskdeps[k]
|
|
data['runtaskhashes'] = {}
|
|
for dep in data['runtaskdeps']:
|
|
data['runtaskhashes'][dep] = self.taskhash[dep]
|
|
|
|
p = pickle.Pickler(file(sigfile, "wb"), -1)
|
|
p.dump(data)
|
|
|
|
def dump_sigs(self, dataCache):
|
|
for fn in self.taskdeps:
|
|
for task in self.taskdeps[fn]:
|
|
k = fn + "." + task
|
|
if k not in self.taskhash:
|
|
continue
|
|
if dataCache.basetaskhash[k] != self.basehash[k]:
|
|
bb.error("Bitbake's cached basehash does not match the one we just generated!")
|
|
bb.error("The mismatched hashes were %s and %s" % (dataCache.basetaskhash[k], self.basehash[k]))
|
|
self.dump_sigtask(fn, task, dataCache.stamp[fn], True)
|
|
|
|
def dump_this_task(outfile, d):
|
|
fn = d.getVar("BB_FILENAME", True)
|
|
task = "do_" + d.getVar("BB_CURRENTTASK", True)
|
|
bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile")
|
|
|
|
def compare_sigfiles(a, b):
|
|
p1 = pickle.Unpickler(file(a, "rb"))
|
|
a_data = p1.load()
|
|
p2 = pickle.Unpickler(file(b, "rb"))
|
|
b_data = p2.load()
|
|
|
|
#print "Checking"
|
|
#print str(a_data)
|
|
#print str(b_data)
|
|
|
|
def dict_diff(a, b):
|
|
sa = set(a.keys())
|
|
sb = set(b.keys())
|
|
common = sa & sb
|
|
changed = set()
|
|
for i in common:
|
|
if a[i] != b[i]:
|
|
changed.add(i)
|
|
added = sa - sb
|
|
removed = sb - sa
|
|
return changed, added, removed
|
|
|
|
if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']:
|
|
print "basewhitelist changed from %s to %s" % (a_data['basewhitelist'], b_data['basewhitelist'])
|
|
|
|
if 'taskwhitelist' in a_data and a_data['taskwhitelist'] != b_data['taskwhitelist']:
|
|
print "taskwhitelist changed from %s to %s" % (a_data['taskwhitelist'], b_data['taskwhitelist'])
|
|
|
|
if a_data['taskdeps'] != b_data['taskdeps']:
|
|
print "Task dependencies changed from %s to %s" % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps']))
|
|
|
|
if a_data['basehash'] != b_data['basehash']:
|
|
print "basehash changed from %s to %s" % (a_data['basehash'], b_data['basehash'])
|
|
|
|
changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'])
|
|
if changed:
|
|
for dep in changed:
|
|
print "List of dependencies for variable %s changed from %s to %s" % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep])
|
|
if added:
|
|
for dep in added:
|
|
print "Dependency on variable %s was added" % (dep)
|
|
if removed:
|
|
for dep in removed:
|
|
print "Dependency on Variable %s was removed" % (dep)
|
|
|
|
|
|
changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals'])
|
|
if changed:
|
|
for dep in changed:
|
|
print "Variable %s value changed from %s to %s" % (dep, a_data['varvals'][dep], b_data['varvals'][dep])
|
|
#if added:
|
|
# print "Dependency on variable %s was added (value %s)" % (dep, b_data['gendeps'][dep])
|
|
#if removed:
|
|
# print "Dependency on Variable %s was removed (value %s)" % (dep, a_data['gendeps'][dep])
|
|
|
|
if 'runtaskdeps' in a_data and 'runtaskdeps' in b_data and sorted(a_data['runtaskdeps']) != sorted(b_data['runtaskdeps']):
|
|
print "Tasks this task depends on changed from %s to %s" % (sorted(a_data['runtaskdeps']), sorted(b_data['runtaskdeps']))
|
|
|
|
if 'runtaskhashes' in a_data:
|
|
for dep in a_data['runtaskhashes']:
|
|
if a_data['runtaskhashes'][dep] != b_data['runtaskhashes'][dep]:
|
|
print "Hash for dependent task %s changed from %s to %s" % (dep, a_data['runtaskhashes'][dep], b_data['runtaskhashes'][dep])
|
|
|
|
def dump_sigfile(a):
|
|
p1 = pickle.Unpickler(file(a, "rb"))
|
|
a_data = p1.load()
|
|
|
|
print "basewhitelist: %s" % (a_data['basewhitelist'])
|
|
|
|
print "taskwhitelist: %s" % (a_data['taskwhitelist'])
|
|
|
|
print "Task dependencies: %s" % (sorted(a_data['taskdeps']))
|
|
|
|
print "basehash: %s" % (a_data['basehash'])
|
|
|
|
for dep in a_data['gendeps']:
|
|
print "List of dependencies for variable %s is %s" % (dep, a_data['gendeps'][dep])
|
|
|
|
for dep in a_data['varvals']:
|
|
print "Variable %s value is %s" % (dep, a_data['varvals'][dep])
|
|
|
|
if 'runtaskdeps' in a_data:
|
|
print "Tasks this task depends on: %s" % (a_data['runtaskdeps'])
|
|
|
|
if 'runtaskhashes' in a_data:
|
|
for dep in a_data['runtaskhashes']:
|
|
print "Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep])
|