From 75867c18e276d18f3207d2f61567332096a190b3 Mon Sep 17 00:00:00 2001 From: Holger Hans Peter Freyther Date: Tue, 17 May 2005 22:25:07 +0000 Subject: lib/bb/data.py: -Provide a special 'collection/dict' for the collected bb-files. If we use a persistent storage (CACHE) we will not hold any data instance in memory but load it from disk on demand. This reduces bitbakes memory usage to roughly 40MB. The initial parsing currently takes roughly thirty seconds more. lib/bb/make.py: -Use the new data implementation for pkgdata. -Do not access pkgdata[f] if we've the data already in our hands --- lib/bb/data.py | 40 ++++++++++++++++++++++++++++++++++++++++ lib/bb/make.py | 21 ++++++++++++--------- 2 files changed, 52 insertions(+), 9 deletions(-) (limited to 'lib/bb') diff --git a/lib/bb/data.py b/lib/bb/data.py index 0aeda8de0..32aab74f3 100644 --- a/lib/bb/data.py +++ b/lib/bb/data.py @@ -38,6 +38,38 @@ from bb import note, debug, data_dict _dict_type = data_dict.DataDict _dict_p_type = data_dict.DataDictPackage +class DataDictCache: + """ + Databacked Dictionary implementation + """ + def __init__(self, cache_dir): + self.cache_dir = cache_dir + self.files = [] + + def has_key(self,key): + return key in self.files + + def keys(self): + return self.files + + def __setitem__(self, key, data): + """ + Add the key to the list of known files and + place the data in the cache? + """ + if key in self.files: + return + + self.files.append(key) + + def __getitem__(self, key): + if not key in self.files: + return None + + # not cached yet + return _dict_p_type(self.cache_dir, key,False,None) + + def init(): return _dict_type() @@ -48,6 +80,14 @@ def init_db(cache,name,clean,parent = None): def init_db_mtime(cache,cache_bbfile): return _dict_p_type.mtime(cache,cache_bbfile) +def pkgdata(use_cache, cache): + """ + Return some sort of dictionary to lookup parsed dictionaires + """ + if use_cache: + return DataDictCache(cache) + return {} + _data_dict = init() def createCopy(source): diff --git a/lib/bb/make.py b/lib/bb/make.py index 8090170eb..f7235f415 100644 --- a/lib/bb/make.py +++ b/lib/bb/make.py @@ -29,7 +29,7 @@ This file is part of the BitBake build tools. from bb import debug, digraph, data, fetch, fatal, error, note, event, parse import copy, bb, re, sys, os, glob, sre_constants -pkgdata = {} +pkgdata = None cfg = data.init() cache = None digits = "0123456789" @@ -113,8 +113,10 @@ def collect_bbfiles( progressCallback ): """Collect all available .bb build files""" parsed, cached, skipped, masked = 0, 0, 0, 0 - global cache - cache = bb.data.getVar( "CACHE", cfg, 1 ) + global cache, pkgdata + cache = bb.data.getVar( "CACHE", cfg, 1 ) + pkgdata = data.pkgdata( not cache in [None, ''], cache ) + if not cache in [None, '']: print "NOTE: Using cache in '%s'" % cache try: @@ -157,23 +159,24 @@ def collect_bbfiles( progressCallback ): # read a file's metadata try: - pkgdata[f], fromCache = load_bbfile(f) + bb_data, fromCache = load_bbfile(f) if fromCache: cached += 1 else: parsed += 1 deps = None - if pkgdata[f] is not None: + if bb_data is not None: # allow metadata files to add items to BBFILES #data.update_data(pkgdata[f]) - addbbfiles = data.getVar('BBFILES', pkgdata[f]) or None + addbbfiles = data.getVar('BBFILES', bb_data) or None if addbbfiles: for aof in addbbfiles.split(): if not files.count(aof): if not os.path.isabs(aof): aof = os.path.join(os.path.dirname(f),aof) files.append(aof) - for var in pkgdata[f].keys(): - if data.getVarFlag(var, "handler", pkgdata[f]) and data.getVar(var, pkgdata[f]): - event.register(data.getVar(var, pkgdata[f])) + for var in bb_data.keys(): + if data.getVarFlag(var, "handler", bb_data) and data.getVar(var, bb_data): + event.register(data.getVar(var, bb_data)) + pkgdata[f] = bb_data except IOError, e: bb.error("opening %s: %s" % (f, e)) pass -- cgit 1.2.3-korg