summaryrefslogtreecommitdiffstats
path: root/lib/bb/cache.py
diff options
context:
space:
mode:
authorPaul Eggleton <paul.eggleton@linux.intel.com>2012-05-23 00:23:31 +0100
committerRichard Purdie <richard.purdie@linuxfoundation.org>2012-05-23 11:24:19 +0100
commit0fe3cb1438d297f90dd0fc6b26362ecbff75c76d (patch)
tree009e6f919808e2adb5b8318248e1d1e53e233b23 /lib/bb/cache.py
parente9516c7e14d782b943cc6e6a2e5e2111edf03d8c (diff)
downloadbitbake-0fe3cb1438d297f90dd0fc6b26362ecbff75c76d.tar.gz
bitbake: refactor out codeparser cache into a separate class
We want to be able to reuse most this functionality for the file checksum cache. Signed-off-by: Paul Eggleton <paul.eggleton@linux.intel.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'lib/bb/cache.py')
-rw-r--r--lib/bb/cache.py116
1 files changed, 114 insertions, 2 deletions
diff --git a/lib/bb/cache.py b/lib/bb/cache.py
index 47e814b57..36e6356f5 100644
--- a/lib/bb/cache.py
+++ b/lib/bb/cache.py
@@ -1,11 +1,12 @@
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
-# BitBake 'Event' implementation
+# BitBake Cache implementation
#
# Caching of bitbake variables before task execution
# Copyright (C) 2006 Richard Purdie
+# Copyright (C) 2012 Intel Corporation
# but small sections based on code from bin/bitbake:
# Copyright (C) 2003, 2004 Chris Larson
@@ -703,4 +704,115 @@ class CacheData(object):
for info in info_array:
info.add_cacheData(self, fn)
-
+
+class MultiProcessCache(object):
+ """
+ BitBake multi-process cache implementation
+
+ Used by the codeparser & file checksum caches
+ """
+
+ def __init__(self):
+ self.cachefile = None
+ self.cachedata = self.create_cachedata()
+ self.cachedata_extras = self.create_cachedata()
+
+ def init_cache(self, d):
+ cachedir = (d.getVar("PERSISTENT_DIR", True) or
+ d.getVar("CACHE", True))
+ if cachedir in [None, '']:
+ return
+ bb.utils.mkdirhier(cachedir)
+ self.cachefile = os.path.join(cachedir, self.__class__.cache_file_name)
+ logger.debug(1, "Using cache in '%s'", self.cachefile)
+
+ try:
+ p = pickle.Unpickler(file(self.cachefile, "rb"))
+ data, version = p.load()
+ except:
+ return
+
+ if version != self.__class__.CACHE_VERSION:
+ return
+
+ self.cachedata = data
+
+ def internSet(self, items):
+ new = set()
+ for i in items:
+ new.add(intern(i))
+ return new
+
+ def compress_keys(self, data):
+ # Override in subclasses if desired
+ return
+
+ def create_cachedata(self):
+ data = [{}]
+ return data
+
+ def save_extras(self, d):
+ if not self.cachefile:
+ return
+
+ glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
+
+ i = os.getpid()
+ lf = None
+ while not lf:
+ lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
+ if not lf or os.path.exists(self.cachefile + "-" + str(i)):
+ if lf:
+ bb.utils.unlockfile(lf)
+ lf = None
+ i = i + 1
+ continue
+
+ p = pickle.Pickler(file(self.cachefile + "-" + str(i), "wb"), -1)
+ p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
+
+ bb.utils.unlockfile(lf)
+ bb.utils.unlockfile(glf)
+
+ def merge_data(self, source, dest):
+ for j in range(0,len(dest)):
+ for h in source[j]:
+ if h not in dest[j]:
+ dest[j][h] = source[j][h]
+
+ def save_merge(self, d):
+ if not self.cachefile:
+ return
+
+ glf = bb.utils.lockfile(self.cachefile + ".lock")
+
+ try:
+ p = pickle.Unpickler(file(self.cachefile, "rb"))
+ data, version = p.load()
+ except (IOError, EOFError):
+ data, version = None, None
+
+ if version != self.__class__.CACHE_VERSION:
+ data = self.create_cachedata()
+
+ for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
+ f = os.path.join(os.path.dirname(self.cachefile), f)
+ try:
+ p = pickle.Unpickler(file(f, "rb"))
+ extradata, version = p.load()
+ except (IOError, EOFError):
+ extradata, version = self.create_cachedata(), None
+
+ if version != self.__class__.CACHE_VERSION:
+ continue
+
+ self.merge_data(extradata, data)
+ os.unlink(f)
+
+ self.compress_keys(data)
+
+ p = pickle.Pickler(file(self.cachefile, "wb"), -1)
+ p.dump([data, self.__class__.CACHE_VERSION])
+
+ bb.utils.unlockfile(glf)
+