diff options
author | Holger Hans Peter Freyther <zecke@selfish.org> | 2005-05-17 22:20:02 +0000 |
---|---|---|
committer | Holger Hans Peter Freyther <zecke@selfish.org> | 2005-05-17 22:20:02 +0000 |
commit | 3e4a93e745408f58009145b8b01e5740c406db91 (patch) | |
tree | a7a09e86f34eadce6582e1145f3d3067caa12acf | |
parent | e629173521df39dbba8ff9b346ac7f1c727c1b18 (diff) | |
download | bitbake-3e4a93e745408f58009145b8b01e5740c406db91.tar.gz |
lib/bb/data.py:
· -Add a method to get the modify time of named bb.data instance
· -Add a method to create a named bb.data instance. These instances
· inherit the global configuration.
lib/bb/data_dict.py:
· -Add persistents to the data_dict implementation, on the commit
method we will now pickle the internal dict
lib/bb/make.py:
-Use the new persistent/named bb.data methods.
-rw-r--r-- | lib/bb/data.py | 7 | ||||
-rw-r--r-- | lib/bb/data_dict.py | 68 | ||||
-rw-r--r-- | lib/bb/make.py | 33 |
3 files changed, 81 insertions, 27 deletions
diff --git a/lib/bb/data.py b/lib/bb/data.py index 149f86a18..0aeda8de0 100644 --- a/lib/bb/data.py +++ b/lib/bb/data.py @@ -36,11 +36,18 @@ sys.path.append(path) from bb import note, debug, data_dict _dict_type = data_dict.DataDict +_dict_p_type = data_dict.DataDictPackage def init(): return _dict_type() +def init_db(cache,name,clean,parent = None): + return _dict_p_type(cache,name,clean,parent) + +def init_db_mtime(cache,cache_bbfile): + return _dict_p_type.mtime(cache,cache_bbfile) + _data_dict = init() def createCopy(source): diff --git a/lib/bb/data_dict.py b/lib/bb/data_dict.py index 22dfdf2fe..b4d6fd73e 100644 --- a/lib/bb/data_dict.py +++ b/lib/bb/data_dict.py @@ -28,6 +28,14 @@ Based on functions from the base bb module, Copyright 2003 Holger Schurig import os, re, sys, types, copy from bb import note, debug, fatal +try: + import cPickle as pickle +except ImportError: + import pickle + print "NOTE: Importing cPickle failed. Falling back to a very slow implementation." + + + __setvar_regexp__ = {} __setvar_regexp__["_append"] = re.compile('(?P<base>.*?)%s(_(?P<add>.*))?' % "_append") __setvar_regexp__["_prepend"] = re.compile('(?P<base>.*?)%s(_(?P<add>.*))?' % "_prepend") @@ -174,3 +182,63 @@ class DataDict: def __setitem__(self,x,y): self.dict.__setitem__(x,y) + + +class DataDictPackage(DataDict): + """ + Persistent Data Storage + """ + def sanitize_filename(bbfile): + return bbfile.replace( '/', '_' ) + sanitize_filename = staticmethod(sanitize_filename) + + def unpickle(self): + """ + Restore the dict from memory + """ + cache_bbfile = self.sanitize_filename(self.bbfile) + p = pickle.Unpickler( file("%s/%s"%(self.cache,cache_bbfile),"rb")) + self.dict = p.load() + funcstr = self.getVar('__functions__', 0) + if funcstr: + comp = compile(funcstr, "<pickled>", "exec") + exec comp in __builtins__ + + def linkDataSet(self,parent): + if not parent == None: + self.initVar("_data") + self.dict["_data"] = parent + + + def __init__(self,cache,name,clean,parent): + """ + Construct a persistent data instance + """ + #Initialize the dictionary + DataDict.__init__(self) + + self.cache = cache + self.bbfile = name + + # Either unpickle the data or do copy on write + if clean: + self.linkDataSet(parent) + else: + self.unpickle() + + def commit(self, mtime): + """ + Save the package to a permanent storage + """ + cache_bbfile = self.sanitize_filename(self.bbfile) + p = pickle.Pickler(file("%s/%s" %(self.cache,cache_bbfile), "wb" ), -1 ) + p.dump( self.dict ) + + def mtime(cache,bbfile): + cache_bbfile = DataDictPackage.sanitize_filename(bbfile) + try: + return os.stat( "%s/%s" % (cache,cache_bbfile) ) + except OSError: + return 0 + mtime = staticmethod(mtime) + diff --git a/lib/bb/make.py b/lib/bb/make.py index fc790bb3b..8090170eb 100644 --- a/lib/bb/make.py +++ b/lib/bb/make.py @@ -28,11 +28,6 @@ This file is part of the BitBake build tools. from bb import debug, digraph, data, fetch, fatal, error, note, event, parse import copy, bb, re, sys, os, glob, sre_constants -try: - import cPickle as pickle -except ImportError: - import pickle - print "NOTE: Importing cPickle failed. Falling back to a very slow implementation." pkgdata = {} cfg = data.init() @@ -76,12 +71,8 @@ def load_bbfile( bbfile ): """Load and parse one .bb build file""" if not cache in [None, '']: - cache_bbfile = bbfile.replace( '/', '_' ) - - try: - cache_mtime = os.stat( "%s/%s" % ( cache, cache_bbfile ) )[8] - except OSError: - cache_mtime = 0 + # get the times + cache_mtime = data.init_db_mtime(cache, bbfile) file_mtime = parse.cached_mtime(bbfile) if file_mtime > cache_mtime: @@ -89,7 +80,7 @@ def load_bbfile( bbfile ): pass else: #print " : '%s' clean. loading from cache..." % bbfile - cache_data = unpickle_bb( cache_bbfile ) + cache_data = data.init_db( cache, bbfile, False ) if deps_clean(cache_data): return cache_data, True @@ -108,28 +99,16 @@ def load_bbfile( bbfile ): # go there oldpath = os.path.abspath(os.getcwd()) os.chdir(topdir) - bb = data.createCopy(cfg) + bb = data.init_db(cache,bbfile, True, cfg) try: parse.handle(bbfile, bb) # read .bb data - if not cache in [None, '']: pickle_bb( cache_bbfile, bb) # write cache + if not cache in [None, '']: + bb.commit(parse.cached_mtime(bbfile)) # write cache os.chdir(oldpath) return bb, False finally: os.chdir(oldpath) -def pickle_bb( bbfile, bb ): - p = pickle.Pickler( file( "%s/%s" % ( cache, bbfile ), "wb" ), -1 ) - p.dump( bb ) - -def unpickle_bb( bbfile ): - p = pickle.Unpickler( file( "%s/%s" % ( cache, bbfile ), "rb" ) ) - bb = p.load() - funcstr = data.getVar('__functions__', bb) - if funcstr: - comp = compile(funcstr, "<pickled>", "exec") - exec comp in __builtins__ - return bb - def collect_bbfiles( progressCallback ): """Collect all available .bb build files""" |