From 14c31e18e4a4a52ee54056aeefb09ff8c295b475 Mon Sep 17 00:00:00 2001 From: Richard Purdie Date: Fri, 25 Jul 2014 14:50:43 +0100 Subject: cache: Don't reload the cache file since we already have this data in memory If we're writing out merged data to disk, its safe to assume that either we loaded the data or couldn't. Loading it again is relatively pointless and time consuming. Signed-off-by: Richard Purdie --- lib/bb/cache.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/lib/bb/cache.py b/lib/bb/cache.py index 936829b83..6dedd4d59 100644 --- a/lib/bb/cache.py +++ b/lib/bb/cache.py @@ -814,15 +814,7 @@ class MultiProcessCache(object): glf = bb.utils.lockfile(self.cachefile + ".lock") - try: - with open(self.cachefile, "rb") as f: - p = pickle.Unpickler(f) - data, version = p.load() - except (IOError, EOFError): - data, version = None, None - - if version != self.__class__.CACHE_VERSION: - data = self.create_cachedata() + data = self.cachedata for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]: f = os.path.join(os.path.dirname(self.cachefile), f) -- cgit 1.2.3-korg