diff options
author | Richard Purdie <richard.purdie@linuxfoundation.org> | 2014-12-08 21:25:23 +0000 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2015-02-11 17:34:57 +0000 |
commit | 67ebf368aab8fbe372374190f013bdf2c83c59de (patch) | |
tree | 11e3301525597218432b1d0541db5301455458d2 | |
parent | d71407dbbf82659f245e002ecaad02b26838f455 (diff) | |
download | bitbake-67ebf368aab8fbe372374190f013bdf2c83c59de.tar.gz |
cache/fetch2/siggen: Ensure we track include history for file checksums
Currently, if you reference a file url, its checksum is included in the
task hash, however if you change to a different file at a different
location, perhaps taking advantage of the FILESPATH functionality, the
system will not reparse the file in question and change its checksum to
match the new file.
To correctly handle this, the system not only needs to know if the
existing file still exists or not, but also check the existance
of every file it would have looked at when computing the original file.
We already do this in the bitbake parsing code for class inclusion. This
change uses the same technique to log the file list we looked at and
if files in these locations exist when they previously did not, to
invalidate and reparse the file.
Since data stored in the cache is flattened text, we have to use a string
form of the data and split on the ":" character which is ugly, but is
an internal detail we can improve later if a better method is found.
The cache version changes to trigger a reparse since the previous
cache data is now incompatible.
[YOCTO #7019]
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-rw-r--r-- | lib/bb/cache.py | 9 | ||||
-rw-r--r-- | lib/bb/fetch2/__init__.py | 35 | ||||
-rw-r--r-- | lib/bb/siggen.py | 3 |
3 files changed, 26 insertions, 21 deletions
diff --git a/lib/bb/cache.py b/lib/bb/cache.py index ac0c27f92..715da07e8 100644 --- a/lib/bb/cache.py +++ b/lib/bb/cache.py @@ -43,7 +43,7 @@ except ImportError: logger.info("Importing cPickle failed. " "Falling back to a very slow implementation.") -__cache_version__ = "147" +__cache_version__ = "148" def getCacheFile(path, filename, data_hash): return os.path.join(path, filename + "." + data_hash) @@ -529,8 +529,11 @@ class Cache(object): if hasattr(info_array[0], 'file_checksums'): for _, fl in info_array[0].file_checksums.items(): for f in fl.split(): - if not ('*' in f or os.path.exists(f)): - logger.debug(2, "Cache: %s's file checksum list file %s was removed", + if "*" in f: + continue + f, exist = f.split(":") + if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)): + logger.debug(2, "Cache: %s's file checksum list file %s changed", fn, f) self.remove(fn) return False diff --git a/lib/bb/fetch2/__init__.py b/lib/bb/fetch2/__init__.py index c0a4763a8..378d41e1c 100644 --- a/lib/bb/fetch2/__init__.py +++ b/lib/bb/fetch2/__init__.py @@ -936,22 +936,21 @@ def get_checksum_file_list(d): ud = fetch.ud[u] if ud and isinstance(ud.method, local.Local): - ud.setup_localpath(d) - f = ud.localpath - pth = ud.decodedurl - if '*' in pth: - f = os.path.join(os.path.abspath(f), pth) - if f.startswith(dl_dir): - # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else - if os.path.exists(f): - bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f))) - else: - bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f))) - filelist.append(f) + paths = ud.method.localpaths(ud, d) + for f in paths: + pth = ud.decodedurl + if '*' in pth: + f = os.path.join(os.path.abspath(f), pth) + if f.startswith(dl_dir): + # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else + if os.path.exists(f): + bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f))) + else: + bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f))) + filelist.append(f + ":" + str(os.path.exists(f))) return " ".join(filelist) - def get_file_checksums(filelist, pn): """Get a list of the checksums for a list of local files @@ -981,6 +980,10 @@ def get_file_checksums(filelist, pn): checksums = [] for pth in filelist.split(): + exist = pth.split(":")[1] + if exist == "False": + continue + pth = pth.split(":")[0] if '*' in pth: # Handle globs for f in glob.glob(pth): @@ -988,14 +991,12 @@ def get_file_checksums(filelist, pn): checksums.extend(checksum_dir(f)) else: checksum = checksum_file(f) - if checksum: - checksums.append((f, checksum)) + checksums.append((f, checksum)) elif os.path.isdir(pth): checksums.extend(checksum_dir(pth)) else: checksum = checksum_file(pth) - if checksum: - checksums.append((pth, checksum)) + checksums.append((pth, checksum)) checksums.sort(key=operator.itemgetter(1)) return checksums diff --git a/lib/bb/siggen.py b/lib/bb/siggen.py index 092732142..e77be6abf 100644 --- a/lib/bb/siggen.py +++ b/lib/bb/siggen.py @@ -192,7 +192,8 @@ class SignatureGeneratorBasic(SignatureGenerator): checksums = bb.fetch2.get_file_checksums(dataCache.file_checksums[fn][task], recipename) for (f,cs) in checksums: self.file_checksum_values[k][f] = cs - data = data + cs + if cs: + data = data + cs taint = self.read_taint(fn, task, dataCache.stamp[fn]) if taint: |