siggen.py 40 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027
  1. #
  2. # SPDX-License-Identifier: GPL-2.0-only
  3. #
  4. import hashlib
  5. import logging
  6. import os
  7. import re
  8. import tempfile
  9. import pickle
  10. import bb.data
  11. import difflib
  12. import simplediff
  13. from bb.checksum import FileChecksumCache
  14. from bb import runqueue
  15. import hashserv
  16. import hashserv.client
  17. logger = logging.getLogger('BitBake.SigGen')
  18. hashequiv_logger = logging.getLogger('BitBake.SigGen.HashEquiv')
  19. def init(d):
  20. siggens = [obj for obj in globals().values()
  21. if type(obj) is type and issubclass(obj, SignatureGenerator)]
  22. desired = d.getVar("BB_SIGNATURE_HANDLER") or "noop"
  23. for sg in siggens:
  24. if desired == sg.name:
  25. return sg(d)
  26. break
  27. else:
  28. logger.error("Invalid signature generator '%s', using default 'noop'\n"
  29. "Available generators: %s", desired,
  30. ', '.join(obj.name for obj in siggens))
  31. return SignatureGenerator(d)
  32. class SignatureGenerator(object):
  33. """
  34. """
  35. name = "noop"
  36. def __init__(self, data):
  37. self.basehash = {}
  38. self.taskhash = {}
  39. self.unihash = {}
  40. self.runtaskdeps = {}
  41. self.file_checksum_values = {}
  42. self.taints = {}
  43. self.unitaskhashes = {}
  44. self.tidtopn = {}
  45. self.setscenetasks = set()
  46. def finalise(self, fn, d, varient):
  47. return
  48. def postparsing_clean_cache(self):
  49. return
  50. def get_unihash(self, tid):
  51. return self.taskhash[tid]
  52. def prep_taskhash(self, tid, deps, dataCache):
  53. return
  54. def get_taskhash(self, tid, deps, dataCache):
  55. self.taskhash[tid] = hashlib.sha256(tid.encode("utf-8")).hexdigest()
  56. return self.taskhash[tid]
  57. def writeout_file_checksum_cache(self):
  58. """Write/update the file checksum cache onto disk"""
  59. return
  60. def stampfile(self, stampbase, file_name, taskname, extrainfo):
  61. return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
  62. def stampcleanmask(self, stampbase, file_name, taskname, extrainfo):
  63. return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
  64. def dump_sigtask(self, fn, task, stampbase, runtime):
  65. return
  66. def invalidate_task(self, task, d, fn):
  67. bb.build.del_stamp(task, d, fn)
  68. def dump_sigs(self, dataCache, options):
  69. return
  70. def get_taskdata(self):
  71. return (self.runtaskdeps, self.taskhash, self.unihash, self.file_checksum_values, self.taints, self.basehash, self.unitaskhashes, self.tidtopn, self.setscenetasks)
  72. def set_taskdata(self, data):
  73. self.runtaskdeps, self.taskhash, self.unihash, self.file_checksum_values, self.taints, self.basehash, self.unitaskhashes, self.tidtopn, self.setscenetasks = data
  74. def reset(self, data):
  75. self.__init__(data)
  76. def get_taskhashes(self):
  77. return self.taskhash, self.unihash, self.unitaskhashes, self.tidtopn
  78. def set_taskhashes(self, hashes):
  79. self.taskhash, self.unihash, self.unitaskhashes, self.tidtopn = hashes
  80. def save_unitaskhashes(self):
  81. return
  82. def set_setscene_tasks(self, setscene_tasks):
  83. return
  84. class SignatureGeneratorBasic(SignatureGenerator):
  85. """
  86. """
  87. name = "basic"
  88. def __init__(self, data):
  89. self.basehash = {}
  90. self.taskhash = {}
  91. self.unihash = {}
  92. self.taskdeps = {}
  93. self.runtaskdeps = {}
  94. self.file_checksum_values = {}
  95. self.taints = {}
  96. self.gendeps = {}
  97. self.lookupcache = {}
  98. self.setscenetasks = set()
  99. self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST") or "").split())
  100. self.taskwhitelist = None
  101. self.init_rundepcheck(data)
  102. checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE")
  103. if checksum_cache_file:
  104. self.checksum_cache = FileChecksumCache()
  105. self.checksum_cache.init_cache(data, checksum_cache_file)
  106. else:
  107. self.checksum_cache = None
  108. self.unihash_cache = bb.cache.SimpleCache("3")
  109. self.unitaskhashes = self.unihash_cache.init_cache(data, "bb_unihashes.dat", {})
  110. self.localdirsexclude = (data.getVar("BB_SIGNATURE_LOCAL_DIRS_EXCLUDE") or "CVS .bzr .git .hg .osc .p4 .repo .svn").split()
  111. self.tidtopn = {}
  112. def init_rundepcheck(self, data):
  113. self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST") or None
  114. if self.taskwhitelist:
  115. self.twl = re.compile(self.taskwhitelist)
  116. else:
  117. self.twl = None
  118. def _build_data(self, fn, d):
  119. ignore_mismatch = ((d.getVar("BB_HASH_IGNORE_MISMATCH") or '') == '1')
  120. tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d, self.basewhitelist)
  121. taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, self.basewhitelist, fn)
  122. for task in tasklist:
  123. tid = fn + ":" + task
  124. if not ignore_mismatch and tid in self.basehash and self.basehash[tid] != basehash[tid]:
  125. bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (tid, self.basehash[tid], basehash[tid]))
  126. bb.error("The following commands may help:")
  127. cmd = "$ bitbake %s -c%s" % (d.getVar('PN'), task)
  128. # Make sure sigdata is dumped before run printdiff
  129. bb.error("%s -Snone" % cmd)
  130. bb.error("Then:")
  131. bb.error("%s -Sprintdiff\n" % cmd)
  132. self.basehash[tid] = basehash[tid]
  133. self.taskdeps[fn] = taskdeps
  134. self.gendeps[fn] = gendeps
  135. self.lookupcache[fn] = lookupcache
  136. return taskdeps
  137. def set_setscene_tasks(self, setscene_tasks):
  138. self.setscenetasks = set(setscene_tasks)
  139. def finalise(self, fn, d, variant):
  140. mc = d.getVar("__BBMULTICONFIG", False) or ""
  141. if variant or mc:
  142. fn = bb.cache.realfn2virtual(fn, variant, mc)
  143. try:
  144. taskdeps = self._build_data(fn, d)
  145. except bb.parse.SkipRecipe:
  146. raise
  147. except:
  148. bb.warn("Error during finalise of %s" % fn)
  149. raise
  150. #Slow but can be useful for debugging mismatched basehashes
  151. #for task in self.taskdeps[fn]:
  152. # self.dump_sigtask(fn, task, d.getVar("STAMP"), False)
  153. for task in taskdeps:
  154. d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + ":" + task])
  155. def postparsing_clean_cache(self):
  156. #
  157. # After parsing we can remove some things from memory to reduce our memory footprint
  158. #
  159. self.gendeps = {}
  160. self.lookupcache = {}
  161. self.taskdeps = {}
  162. def rundep_check(self, fn, recipename, task, dep, depname, dataCache):
  163. # Return True if we should keep the dependency, False to drop it
  164. # We only manipulate the dependencies for packages not in the whitelist
  165. if self.twl and not self.twl.search(recipename):
  166. # then process the actual dependencies
  167. if self.twl.search(depname):
  168. return False
  169. return True
  170. def read_taint(self, fn, task, stampbase):
  171. taint = None
  172. try:
  173. with open(stampbase + '.' + task + '.taint', 'r') as taintf:
  174. taint = taintf.read()
  175. except IOError:
  176. pass
  177. return taint
  178. def prep_taskhash(self, tid, deps, dataCache):
  179. (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
  180. self.basehash[tid] = dataCache.basetaskhash[tid]
  181. self.runtaskdeps[tid] = []
  182. self.file_checksum_values[tid] = []
  183. recipename = dataCache.pkg_fn[fn]
  184. self.tidtopn[tid] = recipename
  185. for dep in sorted(deps, key=clean_basepath):
  186. (depmc, _, deptaskname, depfn) = bb.runqueue.split_tid_mcfn(dep)
  187. if mc != depmc:
  188. continue
  189. depname = dataCache.pkg_fn[depfn]
  190. if not self.rundep_check(fn, recipename, task, dep, depname, dataCache):
  191. continue
  192. if dep not in self.taskhash:
  193. bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep)
  194. self.runtaskdeps[tid].append(dep)
  195. if task in dataCache.file_checksums[fn]:
  196. if self.checksum_cache:
  197. checksums = self.checksum_cache.get_checksums(dataCache.file_checksums[fn][task], recipename, self.localdirsexclude)
  198. else:
  199. checksums = bb.fetch2.get_file_checksums(dataCache.file_checksums[fn][task], recipename, self.localdirsexclude)
  200. for (f,cs) in checksums:
  201. self.file_checksum_values[tid].append((f,cs))
  202. taskdep = dataCache.task_deps[fn]
  203. if 'nostamp' in taskdep and task in taskdep['nostamp']:
  204. # Nostamp tasks need an implicit taint so that they force any dependent tasks to run
  205. if tid in self.taints and self.taints[tid].startswith("nostamp:"):
  206. # Don't reset taint value upon every call
  207. pass
  208. else:
  209. import uuid
  210. taint = str(uuid.uuid4())
  211. self.taints[tid] = "nostamp:" + taint
  212. taint = self.read_taint(fn, task, dataCache.stamp[fn])
  213. if taint:
  214. self.taints[tid] = taint
  215. logger.warning("%s is tainted from a forced run" % tid)
  216. return
  217. def get_taskhash(self, tid, deps, dataCache):
  218. data = self.basehash[tid]
  219. for dep in self.runtaskdeps[tid]:
  220. if dep in self.unihash:
  221. if self.unihash[dep] is None:
  222. data = data + self.taskhash[dep]
  223. else:
  224. data = data + self.unihash[dep]
  225. else:
  226. data = data + self.get_unihash(dep)
  227. for (f, cs) in self.file_checksum_values[tid]:
  228. if cs:
  229. data = data + cs
  230. if tid in self.taints:
  231. if self.taints[tid].startswith("nostamp:"):
  232. data = data + self.taints[tid][8:]
  233. else:
  234. data = data + self.taints[tid]
  235. h = hashlib.sha256(data.encode("utf-8")).hexdigest()
  236. self.taskhash[tid] = h
  237. #d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
  238. return h
  239. def writeout_file_checksum_cache(self):
  240. """Write/update the file checksum cache onto disk"""
  241. if self.checksum_cache:
  242. self.checksum_cache.save_extras()
  243. self.checksum_cache.save_merge()
  244. else:
  245. bb.fetch2.fetcher_parse_save()
  246. bb.fetch2.fetcher_parse_done()
  247. def save_unitaskhashes(self):
  248. self.unihash_cache.save(self.unitaskhashes)
  249. def dump_sigtask(self, fn, task, stampbase, runtime):
  250. tid = fn + ":" + task
  251. referencestamp = stampbase
  252. if isinstance(runtime, str) and runtime.startswith("customfile"):
  253. sigfile = stampbase
  254. referencestamp = runtime[11:]
  255. elif runtime and tid in self.taskhash:
  256. sigfile = stampbase + "." + task + ".sigdata" + "." + self.get_unihash(tid)
  257. else:
  258. sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[tid]
  259. bb.utils.mkdirhier(os.path.dirname(sigfile))
  260. data = {}
  261. data['task'] = task
  262. data['basewhitelist'] = self.basewhitelist
  263. data['taskwhitelist'] = self.taskwhitelist
  264. data['taskdeps'] = self.taskdeps[fn][task]
  265. data['basehash'] = self.basehash[tid]
  266. data['gendeps'] = {}
  267. data['varvals'] = {}
  268. data['varvals'][task] = self.lookupcache[fn][task]
  269. for dep in self.taskdeps[fn][task]:
  270. if dep in self.basewhitelist:
  271. continue
  272. data['gendeps'][dep] = self.gendeps[fn][dep]
  273. data['varvals'][dep] = self.lookupcache[fn][dep]
  274. if runtime and tid in self.taskhash:
  275. data['runtaskdeps'] = self.runtaskdeps[tid]
  276. data['file_checksum_values'] = [(os.path.basename(f), cs) for f,cs in self.file_checksum_values[tid]]
  277. data['runtaskhashes'] = {}
  278. for dep in data['runtaskdeps']:
  279. data['runtaskhashes'][dep] = self.get_unihash(dep)
  280. data['taskhash'] = self.taskhash[tid]
  281. data['unihash'] = self.get_unihash(tid)
  282. taint = self.read_taint(fn, task, referencestamp)
  283. if taint:
  284. data['taint'] = taint
  285. if runtime and tid in self.taints:
  286. if 'nostamp:' in self.taints[tid]:
  287. data['taint'] = self.taints[tid]
  288. computed_basehash = calc_basehash(data)
  289. if computed_basehash != self.basehash[tid]:
  290. bb.error("Basehash mismatch %s versus %s for %s" % (computed_basehash, self.basehash[tid], tid))
  291. if runtime and tid in self.taskhash:
  292. computed_taskhash = calc_taskhash(data)
  293. if computed_taskhash != self.taskhash[tid]:
  294. bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[tid], tid))
  295. sigfile = sigfile.replace(self.taskhash[tid], computed_taskhash)
  296. fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
  297. try:
  298. with os.fdopen(fd, "wb") as stream:
  299. p = pickle.dump(data, stream, -1)
  300. stream.flush()
  301. os.chmod(tmpfile, 0o664)
  302. os.rename(tmpfile, sigfile)
  303. except (OSError, IOError) as err:
  304. try:
  305. os.unlink(tmpfile)
  306. except OSError:
  307. pass
  308. raise err
  309. def dump_sigfn(self, fn, dataCaches, options):
  310. if fn in self.taskdeps:
  311. for task in self.taskdeps[fn]:
  312. tid = fn + ":" + task
  313. mc = bb.runqueue.mc_from_tid(tid)
  314. if tid not in self.taskhash:
  315. continue
  316. if dataCaches[mc].basetaskhash[tid] != self.basehash[tid]:
  317. bb.error("Bitbake's cached basehash does not match the one we just generated (%s)!" % tid)
  318. bb.error("The mismatched hashes were %s and %s" % (dataCaches[mc].basetaskhash[tid], self.basehash[tid]))
  319. self.dump_sigtask(fn, task, dataCaches[mc].stamp[fn], True)
  320. class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
  321. name = "basichash"
  322. def get_stampfile_hash(self, tid):
  323. if tid in self.taskhash:
  324. return self.taskhash[tid]
  325. # If task is not in basehash, then error
  326. return self.basehash[tid]
  327. def stampfile(self, stampbase, fn, taskname, extrainfo, clean=False):
  328. if taskname != "do_setscene" and taskname.endswith("_setscene"):
  329. tid = fn + ":" + taskname[:-9]
  330. else:
  331. tid = fn + ":" + taskname
  332. if clean:
  333. h = "*"
  334. else:
  335. h = self.get_stampfile_hash(tid)
  336. return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.')
  337. def stampcleanmask(self, stampbase, fn, taskname, extrainfo):
  338. return self.stampfile(stampbase, fn, taskname, extrainfo, clean=True)
  339. def invalidate_task(self, task, d, fn):
  340. bb.note("Tainting hash to force rebuild of task %s, %s" % (fn, task))
  341. bb.build.write_taint(task, d, fn)
  342. class SignatureGeneratorUniHashMixIn(object):
  343. def __init__(self, data):
  344. self.extramethod = {}
  345. super().__init__(data)
  346. def get_taskdata(self):
  347. return (self.server, self.method, self.extramethod) + super().get_taskdata()
  348. def set_taskdata(self, data):
  349. self.server, self.method, self.extramethod = data[:3]
  350. super().set_taskdata(data[3:])
  351. def client(self):
  352. if getattr(self, '_client', None) is None:
  353. self._client = hashserv.create_client(self.server)
  354. return self._client
  355. def get_stampfile_hash(self, tid):
  356. if tid in self.taskhash:
  357. # If a unique hash is reported, use it as the stampfile hash. This
  358. # ensures that if a task won't be re-run if the taskhash changes,
  359. # but it would result in the same output hash
  360. unihash = self._get_unihash(tid)
  361. if unihash is not None:
  362. return unihash
  363. return super().get_stampfile_hash(tid)
  364. def set_unihash(self, tid, unihash):
  365. (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
  366. key = mc + ":" + self.tidtopn[tid] + ":" + taskname
  367. self.unitaskhashes[key] = (self.taskhash[tid], unihash)
  368. self.unihash[tid] = unihash
  369. def _get_unihash(self, tid, checkkey=None):
  370. if tid not in self.tidtopn:
  371. return None
  372. (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
  373. key = mc + ":" + self.tidtopn[tid] + ":" + taskname
  374. if key not in self.unitaskhashes:
  375. return None
  376. if not checkkey:
  377. checkkey = self.taskhash[tid]
  378. (key, unihash) = self.unitaskhashes[key]
  379. if key != checkkey:
  380. return None
  381. return unihash
  382. def get_unihash(self, tid):
  383. taskhash = self.taskhash[tid]
  384. # If its not a setscene task we can return
  385. if self.setscenetasks and tid not in self.setscenetasks:
  386. self.unihash[tid] = None
  387. return taskhash
  388. # TODO: This cache can grow unbounded. It probably only needs to keep
  389. # for each task
  390. unihash = self._get_unihash(tid)
  391. if unihash is not None:
  392. self.unihash[tid] = unihash
  393. return unihash
  394. # In the absence of being able to discover a unique hash from the
  395. # server, make it be equivalent to the taskhash. The unique "hash" only
  396. # really needs to be a unique string (not even necessarily a hash), but
  397. # making it match the taskhash has a few advantages:
  398. #
  399. # 1) All of the sstate code that assumes hashes can be the same
  400. # 2) It provides maximal compatibility with builders that don't use
  401. # an equivalency server
  402. # 3) The value is easy for multiple independent builders to derive the
  403. # same unique hash from the same input. This means that if the
  404. # independent builders find the same taskhash, but it isn't reported
  405. # to the server, there is a better chance that they will agree on
  406. # the unique hash.
  407. unihash = taskhash
  408. try:
  409. method = self.method
  410. if tid in self.extramethod:
  411. method = method + self.extramethod[tid]
  412. data = self.client().get_unihash(method, self.taskhash[tid])
  413. if data:
  414. unihash = data
  415. # A unique hash equal to the taskhash is not very interesting,
  416. # so it is reported it at debug level 2. If they differ, that
  417. # is much more interesting, so it is reported at debug level 1
  418. hashequiv_logger.debug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server))
  419. else:
  420. hashequiv_logger.debug(2, 'No reported unihash for %s:%s from %s' % (tid, taskhash, self.server))
  421. except hashserv.client.HashConnectionError as e:
  422. bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
  423. self.set_unihash(tid, unihash)
  424. self.unihash[tid] = unihash
  425. return unihash
  426. def report_unihash(self, path, task, d):
  427. import importlib
  428. taskhash = d.getVar('BB_TASKHASH')
  429. unihash = d.getVar('BB_UNIHASH')
  430. report_taskdata = d.getVar('SSTATE_HASHEQUIV_REPORT_TASKDATA') == '1'
  431. tempdir = d.getVar('T')
  432. fn = d.getVar('BB_FILENAME')
  433. tid = fn + ':do_' + task
  434. key = tid + ':' + taskhash
  435. if self.setscenetasks and tid not in self.setscenetasks:
  436. return
  437. # This can happen if locked sigs are in action. Detect and just abort
  438. if taskhash != self.taskhash[tid]:
  439. return
  440. # Sanity checks
  441. cache_unihash = self._get_unihash(tid, checkkey=taskhash)
  442. if cache_unihash is None:
  443. bb.fatal('%s not in unihash cache. Please report this error' % key)
  444. if cache_unihash != unihash:
  445. bb.fatal("Cache unihash %s doesn't match BB_UNIHASH %s" % (cache_unihash, unihash))
  446. sigfile = None
  447. sigfile_name = "depsig.do_%s.%d" % (task, os.getpid())
  448. sigfile_link = "depsig.do_%s" % task
  449. try:
  450. sigfile = open(os.path.join(tempdir, sigfile_name), 'w+b')
  451. locs = {'path': path, 'sigfile': sigfile, 'task': task, 'd': d}
  452. if "." in self.method:
  453. (module, method) = self.method.rsplit('.', 1)
  454. locs['method'] = getattr(importlib.import_module(module), method)
  455. outhash = bb.utils.better_eval('method(path, sigfile, task, d)', locs)
  456. else:
  457. outhash = bb.utils.better_eval(self.method + '(path, sigfile, task, d)', locs)
  458. try:
  459. extra_data = {}
  460. owner = d.getVar('SSTATE_HASHEQUIV_OWNER')
  461. if owner:
  462. extra_data['owner'] = owner
  463. if report_taskdata:
  464. sigfile.seek(0)
  465. extra_data['PN'] = d.getVar('PN')
  466. extra_data['PV'] = d.getVar('PV')
  467. extra_data['PR'] = d.getVar('PR')
  468. extra_data['task'] = task
  469. extra_data['outhash_siginfo'] = sigfile.read().decode('utf-8')
  470. method = self.method
  471. if tid in self.extramethod:
  472. method = method + self.extramethod[tid]
  473. data = self.client().report_unihash(taskhash, method, outhash, unihash, extra_data)
  474. new_unihash = data['unihash']
  475. if new_unihash != unihash:
  476. hashequiv_logger.debug(1, 'Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server))
  477. bb.event.fire(bb.runqueue.taskUniHashUpdate(fn + ':do_' + task, new_unihash), d)
  478. self.set_unihash(tid, new_unihash)
  479. d.setVar('BB_UNIHASH', new_unihash)
  480. else:
  481. hashequiv_logger.debug(1, 'Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server))
  482. except hashserv.client.HashConnectionError as e:
  483. bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
  484. finally:
  485. if sigfile:
  486. sigfile.close()
  487. sigfile_link_path = os.path.join(tempdir, sigfile_link)
  488. bb.utils.remove(sigfile_link_path)
  489. try:
  490. os.symlink(sigfile_name, sigfile_link_path)
  491. except OSError:
  492. pass
  493. def report_unihash_equiv(self, tid, taskhash, wanted_unihash, current_unihash, datacaches):
  494. try:
  495. extra_data = {}
  496. method = self.method
  497. if tid in self.extramethod:
  498. method = method + self.extramethod[tid]
  499. data = self.client().report_unihash_equiv(taskhash, method, wanted_unihash, extra_data)
  500. hashequiv_logger.verbose('Reported task %s as unihash %s to %s (%s)' % (tid, wanted_unihash, self.server, str(data)))
  501. if data is None:
  502. bb.warn("Server unable to handle unihash report")
  503. return False
  504. finalunihash = data['unihash']
  505. if finalunihash == current_unihash:
  506. hashequiv_logger.verbose('Task %s unihash %s unchanged by server' % (tid, finalunihash))
  507. elif finalunihash == wanted_unihash:
  508. hashequiv_logger.verbose('Task %s unihash changed %s -> %s as wanted' % (tid, current_unihash, finalunihash))
  509. self.set_unihash(tid, finalunihash)
  510. return True
  511. else:
  512. # TODO: What to do here?
  513. hashequiv_logger.verbose('Task %s unihash reported as unwanted hash %s' % (tid, finalunihash))
  514. except hashserv.client.HashConnectionError as e:
  515. bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
  516. return False
  517. #
  518. # Dummy class used for bitbake-selftest
  519. #
  520. class SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureGeneratorBasicHash):
  521. name = "TestEquivHash"
  522. def init_rundepcheck(self, data):
  523. super().init_rundepcheck(data)
  524. self.server = data.getVar('BB_HASHSERVE')
  525. self.method = "sstate_output_hash"
  526. def dump_this_task(outfile, d):
  527. import bb.parse
  528. fn = d.getVar("BB_FILENAME")
  529. task = "do_" + d.getVar("BB_CURRENTTASK")
  530. referencestamp = bb.build.stamp_internal(task, d, None, True)
  531. bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile:" + referencestamp)
  532. def init_colors(enable_color):
  533. """Initialise colour dict for passing to compare_sigfiles()"""
  534. # First set up the colours
  535. colors = {'color_title': '\033[1m',
  536. 'color_default': '\033[0m',
  537. 'color_add': '\033[0;32m',
  538. 'color_remove': '\033[0;31m',
  539. }
  540. # Leave all keys present but clear the values
  541. if not enable_color:
  542. for k in colors.keys():
  543. colors[k] = ''
  544. return colors
  545. def worddiff_str(oldstr, newstr, colors=None):
  546. if not colors:
  547. colors = init_colors(False)
  548. diff = simplediff.diff(oldstr.split(' '), newstr.split(' '))
  549. ret = []
  550. for change, value in diff:
  551. value = ' '.join(value)
  552. if change == '=':
  553. ret.append(value)
  554. elif change == '+':
  555. item = '{color_add}{{+{value}+}}{color_default}'.format(value=value, **colors)
  556. ret.append(item)
  557. elif change == '-':
  558. item = '{color_remove}[-{value}-]{color_default}'.format(value=value, **colors)
  559. ret.append(item)
  560. whitespace_note = ''
  561. if oldstr != newstr and ' '.join(oldstr.split()) == ' '.join(newstr.split()):
  562. whitespace_note = ' (whitespace changed)'
  563. return '"%s"%s' % (' '.join(ret), whitespace_note)
  564. def list_inline_diff(oldlist, newlist, colors=None):
  565. if not colors:
  566. colors = init_colors(False)
  567. diff = simplediff.diff(oldlist, newlist)
  568. ret = []
  569. for change, value in diff:
  570. value = ' '.join(value)
  571. if change == '=':
  572. ret.append("'%s'" % value)
  573. elif change == '+':
  574. item = '{color_add}+{value}{color_default}'.format(value=value, **colors)
  575. ret.append(item)
  576. elif change == '-':
  577. item = '{color_remove}-{value}{color_default}'.format(value=value, **colors)
  578. ret.append(item)
  579. return '[%s]' % (', '.join(ret))
  580. def clean_basepath(a):
  581. mc = None
  582. if a.startswith("mc:"):
  583. _, mc, a = a.split(":", 2)
  584. b = a.rsplit("/", 2)[1] + '/' + a.rsplit("/", 2)[2]
  585. if a.startswith("virtual:"):
  586. b = b + ":" + a.rsplit(":", 1)[0]
  587. if mc:
  588. b = b + ":mc:" + mc
  589. return b
  590. def clean_basepaths(a):
  591. b = {}
  592. for x in a:
  593. b[clean_basepath(x)] = a[x]
  594. return b
  595. def clean_basepaths_list(a):
  596. b = []
  597. for x in a:
  598. b.append(clean_basepath(x))
  599. return b
  600. def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
  601. output = []
  602. colors = init_colors(color)
  603. def color_format(formatstr, **values):
  604. """
  605. Return colour formatted string.
  606. NOTE: call with the format string, not an already formatted string
  607. containing values (otherwise you could have trouble with { and }
  608. characters)
  609. """
  610. if not formatstr.endswith('{color_default}'):
  611. formatstr += '{color_default}'
  612. # In newer python 3 versions you can pass both of these directly,
  613. # but we only require 3.4 at the moment
  614. formatparams = {}
  615. formatparams.update(colors)
  616. formatparams.update(values)
  617. return formatstr.format(**formatparams)
  618. with open(a, 'rb') as f:
  619. p1 = pickle.Unpickler(f)
  620. a_data = p1.load()
  621. with open(b, 'rb') as f:
  622. p2 = pickle.Unpickler(f)
  623. b_data = p2.load()
  624. def dict_diff(a, b, whitelist=set()):
  625. sa = set(a.keys())
  626. sb = set(b.keys())
  627. common = sa & sb
  628. changed = set()
  629. for i in common:
  630. if a[i] != b[i] and i not in whitelist:
  631. changed.add(i)
  632. added = sb - sa
  633. removed = sa - sb
  634. return changed, added, removed
  635. def file_checksums_diff(a, b):
  636. from collections import Counter
  637. # Handle old siginfo format
  638. if isinstance(a, dict):
  639. a = [(os.path.basename(f), cs) for f, cs in a.items()]
  640. if isinstance(b, dict):
  641. b = [(os.path.basename(f), cs) for f, cs in b.items()]
  642. # Compare lists, ensuring we can handle duplicate filenames if they exist
  643. removedcount = Counter(a)
  644. removedcount.subtract(b)
  645. addedcount = Counter(b)
  646. addedcount.subtract(a)
  647. added = []
  648. for x in b:
  649. if addedcount[x] > 0:
  650. addedcount[x] -= 1
  651. added.append(x)
  652. removed = []
  653. changed = []
  654. for x in a:
  655. if removedcount[x] > 0:
  656. removedcount[x] -= 1
  657. for y in added:
  658. if y[0] == x[0]:
  659. changed.append((x[0], x[1], y[1]))
  660. added.remove(y)
  661. break
  662. else:
  663. removed.append(x)
  664. added = [x[0] for x in added]
  665. removed = [x[0] for x in removed]
  666. return changed, added, removed
  667. if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']:
  668. output.append(color_format("{color_title}basewhitelist changed{color_default} from '%s' to '%s'") % (a_data['basewhitelist'], b_data['basewhitelist']))
  669. if a_data['basewhitelist'] and b_data['basewhitelist']:
  670. output.append("changed items: %s" % a_data['basewhitelist'].symmetric_difference(b_data['basewhitelist']))
  671. if 'taskwhitelist' in a_data and a_data['taskwhitelist'] != b_data['taskwhitelist']:
  672. output.append(color_format("{color_title}taskwhitelist changed{color_default} from '%s' to '%s'") % (a_data['taskwhitelist'], b_data['taskwhitelist']))
  673. if a_data['taskwhitelist'] and b_data['taskwhitelist']:
  674. output.append("changed items: %s" % a_data['taskwhitelist'].symmetric_difference(b_data['taskwhitelist']))
  675. if a_data['taskdeps'] != b_data['taskdeps']:
  676. output.append(color_format("{color_title}Task dependencies changed{color_default} from:\n%s\nto:\n%s") % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps'])))
  677. if a_data['basehash'] != b_data['basehash'] and not collapsed:
  678. output.append(color_format("{color_title}basehash changed{color_default} from %s to %s") % (a_data['basehash'], b_data['basehash']))
  679. changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basewhitelist'] & b_data['basewhitelist'])
  680. if changed:
  681. for dep in changed:
  682. output.append(color_format("{color_title}List of dependencies for variable %s changed from '{color_default}%s{color_title}' to '{color_default}%s{color_title}'") % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep]))
  683. if a_data['gendeps'][dep] and b_data['gendeps'][dep]:
  684. output.append("changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep]))
  685. if added:
  686. for dep in added:
  687. output.append(color_format("{color_title}Dependency on variable %s was added") % (dep))
  688. if removed:
  689. for dep in removed:
  690. output.append(color_format("{color_title}Dependency on Variable %s was removed") % (dep))
  691. changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals'])
  692. if changed:
  693. for dep in changed:
  694. oldval = a_data['varvals'][dep]
  695. newval = b_data['varvals'][dep]
  696. if newval and oldval and ('\n' in oldval or '\n' in newval):
  697. diff = difflib.unified_diff(oldval.splitlines(), newval.splitlines(), lineterm='')
  698. # Cut off the first two lines, since we aren't interested in
  699. # the old/new filename (they are blank anyway in this case)
  700. difflines = list(diff)[2:]
  701. if color:
  702. # Add colour to diff output
  703. for i, line in enumerate(difflines):
  704. if line.startswith('+'):
  705. line = color_format('{color_add}{line}', line=line)
  706. difflines[i] = line
  707. elif line.startswith('-'):
  708. line = color_format('{color_remove}{line}', line=line)
  709. difflines[i] = line
  710. output.append(color_format("{color_title}Variable {var} value changed:{color_default}\n{diff}", var=dep, diff='\n'.join(difflines)))
  711. elif newval and oldval and (' ' in oldval or ' ' in newval):
  712. output.append(color_format("{color_title}Variable {var} value changed:{color_default}\n{diff}", var=dep, diff=worddiff_str(oldval, newval, colors)))
  713. else:
  714. output.append(color_format("{color_title}Variable {var} value changed from '{color_default}{oldval}{color_title}' to '{color_default}{newval}{color_title}'{color_default}", var=dep, oldval=oldval, newval=newval))
  715. if not 'file_checksum_values' in a_data:
  716. a_data['file_checksum_values'] = {}
  717. if not 'file_checksum_values' in b_data:
  718. b_data['file_checksum_values'] = {}
  719. changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values'])
  720. if changed:
  721. for f, old, new in changed:
  722. output.append(color_format("{color_title}Checksum for file %s changed{color_default} from %s to %s") % (f, old, new))
  723. if added:
  724. for f in added:
  725. output.append(color_format("{color_title}Dependency on checksum of file %s was added") % (f))
  726. if removed:
  727. for f in removed:
  728. output.append(color_format("{color_title}Dependency on checksum of file %s was removed") % (f))
  729. if not 'runtaskdeps' in a_data:
  730. a_data['runtaskdeps'] = {}
  731. if not 'runtaskdeps' in b_data:
  732. b_data['runtaskdeps'] = {}
  733. if not collapsed:
  734. if len(a_data['runtaskdeps']) != len(b_data['runtaskdeps']):
  735. changed = ["Number of task dependencies changed"]
  736. else:
  737. changed = []
  738. for idx, task in enumerate(a_data['runtaskdeps']):
  739. a = a_data['runtaskdeps'][idx]
  740. b = b_data['runtaskdeps'][idx]
  741. if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b] and not collapsed:
  742. changed.append("%s with hash %s\n changed to\n%s with hash %s" % (clean_basepath(a), a_data['runtaskhashes'][a], clean_basepath(b), b_data['runtaskhashes'][b]))
  743. if changed:
  744. clean_a = clean_basepaths_list(a_data['runtaskdeps'])
  745. clean_b = clean_basepaths_list(b_data['runtaskdeps'])
  746. if clean_a != clean_b:
  747. output.append(color_format("{color_title}runtaskdeps changed:{color_default}\n%s") % list_inline_diff(clean_a, clean_b, colors))
  748. else:
  749. output.append(color_format("{color_title}runtaskdeps changed:"))
  750. output.append("\n".join(changed))
  751. if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
  752. a = a_data['runtaskhashes']
  753. b = b_data['runtaskhashes']
  754. changed, added, removed = dict_diff(a, b)
  755. if added:
  756. for dep in added:
  757. bdep_found = False
  758. if removed:
  759. for bdep in removed:
  760. if b[dep] == a[bdep]:
  761. #output.append("Dependency on task %s was replaced by %s with same hash" % (dep, bdep))
  762. bdep_found = True
  763. if not bdep_found:
  764. output.append(color_format("{color_title}Dependency on task %s was added{color_default} with hash %s") % (clean_basepath(dep), b[dep]))
  765. if removed:
  766. for dep in removed:
  767. adep_found = False
  768. if added:
  769. for adep in added:
  770. if b[adep] == a[dep]:
  771. #output.append("Dependency on task %s was replaced by %s with same hash" % (adep, dep))
  772. adep_found = True
  773. if not adep_found:
  774. output.append(color_format("{color_title}Dependency on task %s was removed{color_default} with hash %s") % (clean_basepath(dep), a[dep]))
  775. if changed:
  776. for dep in changed:
  777. if not collapsed:
  778. output.append(color_format("{color_title}Hash for dependent task %s changed{color_default} from %s to %s") % (clean_basepath(dep), a[dep], b[dep]))
  779. if callable(recursecb):
  780. recout = recursecb(dep, a[dep], b[dep])
  781. if recout:
  782. if collapsed:
  783. output.extend(recout)
  784. else:
  785. # If a dependent hash changed, might as well print the line above and then defer to the changes in
  786. # that hash since in all likelyhood, they're the same changes this task also saw.
  787. output = [output[-1]] + recout
  788. a_taint = a_data.get('taint', None)
  789. b_taint = b_data.get('taint', None)
  790. if a_taint != b_taint:
  791. if a_taint and a_taint.startswith('nostamp:'):
  792. a_taint = a_taint.replace('nostamp:', 'nostamp(uuid4):')
  793. if b_taint and b_taint.startswith('nostamp:'):
  794. b_taint = b_taint.replace('nostamp:', 'nostamp(uuid4):')
  795. output.append(color_format("{color_title}Taint (by forced/invalidated task) changed{color_default} from %s to %s") % (a_taint, b_taint))
  796. return output
  797. def calc_basehash(sigdata):
  798. task = sigdata['task']
  799. basedata = sigdata['varvals'][task]
  800. if basedata is None:
  801. basedata = ''
  802. alldeps = sigdata['taskdeps']
  803. for dep in alldeps:
  804. basedata = basedata + dep
  805. val = sigdata['varvals'][dep]
  806. if val is not None:
  807. basedata = basedata + str(val)
  808. return hashlib.sha256(basedata.encode("utf-8")).hexdigest()
  809. def calc_taskhash(sigdata):
  810. data = sigdata['basehash']
  811. for dep in sigdata['runtaskdeps']:
  812. data = data + sigdata['runtaskhashes'][dep]
  813. for c in sigdata['file_checksum_values']:
  814. if c[1]:
  815. data = data + c[1]
  816. if 'taint' in sigdata:
  817. if 'nostamp:' in sigdata['taint']:
  818. data = data + sigdata['taint'][8:]
  819. else:
  820. data = data + sigdata['taint']
  821. return hashlib.sha256(data.encode("utf-8")).hexdigest()
  822. def dump_sigfile(a):
  823. output = []
  824. with open(a, 'rb') as f:
  825. p1 = pickle.Unpickler(f)
  826. a_data = p1.load()
  827. output.append("basewhitelist: %s" % (a_data['basewhitelist']))
  828. output.append("taskwhitelist: %s" % (a_data['taskwhitelist']))
  829. output.append("Task dependencies: %s" % (sorted(a_data['taskdeps'])))
  830. output.append("basehash: %s" % (a_data['basehash']))
  831. for dep in a_data['gendeps']:
  832. output.append("List of dependencies for variable %s is %s" % (dep, a_data['gendeps'][dep]))
  833. for dep in a_data['varvals']:
  834. output.append("Variable %s value is %s" % (dep, a_data['varvals'][dep]))
  835. if 'runtaskdeps' in a_data:
  836. output.append("Tasks this task depends on: %s" % (a_data['runtaskdeps']))
  837. if 'file_checksum_values' in a_data:
  838. output.append("This task depends on the checksums of files: %s" % (a_data['file_checksum_values']))
  839. if 'runtaskhashes' in a_data:
  840. for dep in a_data['runtaskhashes']:
  841. output.append("Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep]))
  842. if 'taint' in a_data:
  843. if a_data['taint'].startswith('nostamp:'):
  844. msg = a_data['taint'].replace('nostamp:', 'nostamp(uuid4):')
  845. else:
  846. msg = a_data['taint']
  847. output.append("Tainted (by forced/invalidated task): %s" % msg)
  848. if 'task' in a_data:
  849. computed_basehash = calc_basehash(a_data)
  850. output.append("Computed base hash is %s and from file %s" % (computed_basehash, a_data['basehash']))
  851. else:
  852. output.append("Unable to compute base hash")
  853. computed_taskhash = calc_taskhash(a_data)
  854. output.append("Computed task hash is %s" % computed_taskhash)
  855. return output