mirror of
https://git.yoctoproject.org/poky
synced 2026-02-13 12:13:02 +01:00
current bitbake-diffsigs simply print out the whole 'runtaskdeps' when there's mismatch, which is not very readable. On the other hand, 'runtaskhashes' comparison is broken which assumes same key existing in two sides. This commit provides better output by figuring out differences from addition, removal or hash change. Signed-off-by: Kevin Tian <kevin.tian@intel.com>
297 lines
10 KiB
Python
297 lines
10 KiB
Python
import hashlib
|
|
import logging
|
|
import re
|
|
|
|
logger = logging.getLogger('BitBake.SigGen')
|
|
|
|
try:
|
|
import cPickle as pickle
|
|
except ImportError:
|
|
import pickle
|
|
logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
|
|
|
|
def init(d):
|
|
siggens = [obj for obj in globals().itervalues()
|
|
if type(obj) is type and issubclass(obj, SignatureGenerator)]
|
|
|
|
desired = bb.data.getVar("BB_SIGNATURE_HANDLER", d, True) or "noop"
|
|
for sg in siggens:
|
|
if desired == sg.name:
|
|
return sg(d)
|
|
break
|
|
else:
|
|
logger.error("Invalid signature generator '%s', using default 'noop'\n"
|
|
"Available generators: %s",
|
|
', '.join(obj.name for obj in siggens))
|
|
return SignatureGenerator(d)
|
|
|
|
class SignatureGenerator(object):
|
|
"""
|
|
"""
|
|
name = "noop"
|
|
|
|
def __init__(self, data):
|
|
return
|
|
|
|
def finalise(self, fn, d, varient):
|
|
return
|
|
|
|
def get_taskhash(self, fn, task, deps, dataCache):
|
|
return 0
|
|
|
|
def set_taskdata(self, hashes, deps):
|
|
return
|
|
|
|
def stampfile(self, stampbase, file_name, taskname, extrainfo):
|
|
return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
|
|
|
|
class SignatureGeneratorBasic(SignatureGenerator):
|
|
"""
|
|
"""
|
|
name = "basic"
|
|
|
|
def __init__(self, data):
|
|
self.basehash = {}
|
|
self.taskhash = {}
|
|
self.taskdeps = {}
|
|
self.runtaskdeps = {}
|
|
self.gendeps = {}
|
|
self.lookupcache = {}
|
|
self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST", True) or "").split())
|
|
self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST", True) or None
|
|
|
|
if self.taskwhitelist:
|
|
self.twl = re.compile(self.taskwhitelist)
|
|
else:
|
|
self.twl = None
|
|
|
|
def _build_data(self, fn, d):
|
|
|
|
tasklist, gendeps = bb.data.generate_dependencies(d)
|
|
|
|
taskdeps = {}
|
|
basehash = {}
|
|
lookupcache = {}
|
|
|
|
for task in tasklist:
|
|
data = d.getVar(task, False)
|
|
lookupcache[task] = data
|
|
|
|
newdeps = gendeps[task]
|
|
seen = set()
|
|
while newdeps:
|
|
nextdeps = newdeps
|
|
seen |= nextdeps
|
|
newdeps = set()
|
|
for dep in nextdeps:
|
|
if dep in self.basewhitelist:
|
|
continue
|
|
newdeps |= gendeps[dep]
|
|
newdeps -= seen
|
|
|
|
alldeps = seen - self.basewhitelist
|
|
|
|
for dep in sorted(alldeps):
|
|
if dep in lookupcache:
|
|
var = lookupcache[dep]
|
|
else:
|
|
var = d.getVar(dep, False)
|
|
lookupcache[dep] = var
|
|
if var:
|
|
data = data + var
|
|
if data is None:
|
|
bb.error("Task %s from %s seems to be empty?!" % (task, fn))
|
|
self.basehash[fn + "." + task] = hashlib.md5(data).hexdigest()
|
|
taskdeps[task] = sorted(alldeps)
|
|
|
|
self.taskdeps[fn] = taskdeps
|
|
self.gendeps[fn] = gendeps
|
|
self.lookupcache[fn] = lookupcache
|
|
|
|
return taskdeps
|
|
|
|
def finalise(self, fn, d, variant):
|
|
|
|
if variant:
|
|
fn = "virtual:" + variant + ":" + fn
|
|
|
|
taskdeps = self._build_data(fn, d)
|
|
|
|
#Slow but can be useful for debugging mismatched basehashes
|
|
#for task in self.taskdeps[fn]:
|
|
# self.dump_sigtask(fn, task, d.getVar("STAMP", True), False)
|
|
|
|
for task in taskdeps:
|
|
d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + "." + task])
|
|
|
|
def get_taskhash(self, fn, task, deps, dataCache):
|
|
k = fn + "." + task
|
|
data = dataCache.basetaskhash[k]
|
|
self.runtaskdeps[k] = []
|
|
for dep in sorted(deps):
|
|
# We only manipulate the dependencies for packages not in the whitelist
|
|
if self.twl and not self.twl.search(dataCache.pkg_fn[fn]):
|
|
# then process the actual dependencies
|
|
dep_fn = re.search("(?P<fn>.*)\..*", dep).group('fn')
|
|
if self.twl.search(dataCache.pkg_fn[dep_fn]):
|
|
continue
|
|
if dep not in self.taskhash:
|
|
bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?", dep)
|
|
data = data + self.taskhash[dep]
|
|
self.runtaskdeps[k].append(dep)
|
|
h = hashlib.md5(data).hexdigest()
|
|
self.taskhash[k] = h
|
|
#d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
|
|
return h
|
|
|
|
def set_taskdata(self, hashes, deps):
|
|
self.runtaskdeps = deps
|
|
self.taskhash = hashes
|
|
|
|
def dump_sigtask(self, fn, task, stampbase, runtime):
|
|
k = fn + "." + task
|
|
if runtime == "customfile":
|
|
sigfile = stampbase
|
|
elif runtime:
|
|
sigfile = stampbase + "." + task + ".sigdata" + "." + self.taskhash[k]
|
|
else:
|
|
sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[k]
|
|
|
|
bb.utils.mkdirhier(os.path.dirname(sigfile))
|
|
|
|
data = {}
|
|
data['basewhitelist'] = self.basewhitelist
|
|
data['taskwhitelist'] = self.taskwhitelist
|
|
data['taskdeps'] = self.taskdeps[fn][task]
|
|
data['basehash'] = self.basehash[k]
|
|
data['gendeps'] = {}
|
|
data['varvals'] = {}
|
|
data['varvals'][task] = self.lookupcache[fn][task]
|
|
for dep in self.taskdeps[fn][task]:
|
|
if dep in self.basewhitelist:
|
|
continue
|
|
data['gendeps'][dep] = self.gendeps[fn][dep]
|
|
data['varvals'][dep] = self.lookupcache[fn][dep]
|
|
|
|
if runtime:
|
|
data['runtaskdeps'] = self.runtaskdeps[k]
|
|
data['runtaskhashes'] = {}
|
|
for dep in data['runtaskdeps']:
|
|
data['runtaskhashes'][dep] = self.taskhash[dep]
|
|
|
|
p = pickle.Pickler(file(sigfile, "wb"), -1)
|
|
p.dump(data)
|
|
|
|
def dump_sigs(self, dataCache):
|
|
for fn in self.taskdeps:
|
|
for task in self.taskdeps[fn]:
|
|
k = fn + "." + task
|
|
if k not in self.taskhash:
|
|
continue
|
|
if dataCache.basetaskhash[k] != self.basehash[k]:
|
|
bb.error("Bitbake's cached basehash does not match the one we just generated (%s)!" % k)
|
|
bb.error("The mismatched hashes were %s and %s" % (dataCache.basetaskhash[k], self.basehash[k]))
|
|
self.dump_sigtask(fn, task, dataCache.stamp[fn], True)
|
|
|
|
class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
|
|
name = "basichash"
|
|
|
|
def stampfile(self, stampbase, fn, taskname, extrainfo):
|
|
if taskname != "do_setscene" and taskname.endswith("_setscene"):
|
|
k = fn + "." + taskname[:-9]
|
|
else:
|
|
k = fn + "." + taskname
|
|
h = self.taskhash[k]
|
|
return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.')
|
|
|
|
def dump_this_task(outfile, d):
|
|
fn = d.getVar("BB_FILENAME", True)
|
|
task = "do_" + d.getVar("BB_CURRENTTASK", True)
|
|
bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile")
|
|
|
|
def compare_sigfiles(a, b):
|
|
p1 = pickle.Unpickler(file(a, "rb"))
|
|
a_data = p1.load()
|
|
p2 = pickle.Unpickler(file(b, "rb"))
|
|
b_data = p2.load()
|
|
|
|
def dict_diff(a, b):
|
|
sa = set(a.keys())
|
|
sb = set(b.keys())
|
|
common = sa & sb
|
|
changed = set()
|
|
for i in common:
|
|
if a[i] != b[i]:
|
|
changed.add(i)
|
|
added = sa - sb
|
|
removed = sb - sa
|
|
return changed, added, removed
|
|
|
|
if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']:
|
|
print "basewhitelist changed from %s to %s" % (a_data['basewhitelist'], b_data['basewhitelist'])
|
|
|
|
if 'taskwhitelist' in a_data and a_data['taskwhitelist'] != b_data['taskwhitelist']:
|
|
print "taskwhitelist changed from %s to %s" % (a_data['taskwhitelist'], b_data['taskwhitelist'])
|
|
|
|
if a_data['taskdeps'] != b_data['taskdeps']:
|
|
print "Task dependencies changed from %s to %s" % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps']))
|
|
|
|
if a_data['basehash'] != b_data['basehash']:
|
|
print "basehash changed from %s to %s" % (a_data['basehash'], b_data['basehash'])
|
|
|
|
changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'])
|
|
if changed:
|
|
for dep in changed:
|
|
print "List of dependencies for variable %s changed from %s to %s" % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep])
|
|
if added:
|
|
for dep in added:
|
|
print "Dependency on variable %s was added" % (dep)
|
|
if removed:
|
|
for dep in removed:
|
|
print "Dependency on Variable %s was removed" % (dep)
|
|
|
|
|
|
changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals'])
|
|
if changed:
|
|
for dep in changed:
|
|
print "Variable %s value changed from %s to %s" % (dep, a_data['varvals'][dep], b_data['varvals'][dep])
|
|
|
|
if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
|
|
changed, added, removed = dict_diff(a_data['runtaskhashes'], b_data['runtaskhashes'])
|
|
if added:
|
|
for dep in added:
|
|
print "Dependency on task %s was added" % (dep)
|
|
if removed:
|
|
for dep in removed:
|
|
print "Dependency on task %s was removed" % (dep)
|
|
if changed:
|
|
for dep in changed:
|
|
print "Hash for dependent task %s changed from %s to %s" % (dep, a_data['runtaskhashes'][dep], b_data['runtaskhashes'][dep])
|
|
elif 'runtaskdeps' in a_data and 'runtaskdeps' in b_data and sorted(a_data['runtaskdeps']) != sorted(b_data['runtaskdeps']):
|
|
print "Tasks this task depends on changed from %s to %s" % (sorted(a_data['runtaskdeps']), sorted(b_data['runtaskdeps']))
|
|
|
|
def dump_sigfile(a):
|
|
p1 = pickle.Unpickler(file(a, "rb"))
|
|
a_data = p1.load()
|
|
|
|
print "basewhitelist: %s" % (a_data['basewhitelist'])
|
|
|
|
print "taskwhitelist: %s" % (a_data['taskwhitelist'])
|
|
|
|
print "Task dependencies: %s" % (sorted(a_data['taskdeps']))
|
|
|
|
print "basehash: %s" % (a_data['basehash'])
|
|
|
|
for dep in a_data['gendeps']:
|
|
print "List of dependencies for variable %s is %s" % (dep, a_data['gendeps'][dep])
|
|
|
|
for dep in a_data['varvals']:
|
|
print "Variable %s value is %s" % (dep, a_data['varvals'][dep])
|
|
|
|
if 'runtaskdeps' in a_data:
|
|
print "Tasks this task depends on: %s" % (a_data['runtaskdeps'])
|
|
|
|
if 'runtaskhashes' in a_data:
|
|
for dep in a_data['runtaskhashes']:
|
|
print "Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep])
|