diff options
-rwxr-xr-x | bin/bitbake | 144 | ||||
-rw-r--r-- | lib/bb/cache.py | 141 | ||||
-rw-r--r-- | lib/bb/shell.py | 4 |
3 files changed, 144 insertions, 145 deletions
diff --git a/bin/bitbake b/bin/bitbake index 3ab8d7e3b..ecd640012 100755 --- a/bin/bitbake +++ b/bin/bitbake @@ -34,145 +34,6 @@ parsespin = itertools.cycle( r'|/-\\' ) __version__ = "1.7.0" #============================================================================# -# BBParsingStatus -#============================================================================# -class BBParsingStatus: - """ - The initial idea for this status class is to use the data when it is - already loaded instead of loading it from various place over and over - again. - """ - - def __init__(self): - """ - Direct cache variables - """ - self.providers = {} - self.rproviders = {} - self.packages = {} - self.packages_dynamic = {} - self.possible_world = [] - self.pkg_pn = {} - self.pkg_fn = {} - self.pkg_pvpr = {} - self.pkg_dp = {} - self.pn_provides = {} - self.all_depends = Set() - self.build_all = {} - self.deps = {} - self.rundeps = {} - self.runrecs = {} - self.task_queues = {} - self.task_deps = {} - self.stamp = {} - self.preferred = {} - - """ - Indirect Cache variables - """ - self.ignored_dependencies = [] - self.world_target = Set() - self.bbfile_priority = {} - self.bbfile_config_priorities = [] - - - def handle_bb_data(self, file_name, bb_cache, cached): - """ - We will fill the dictionaries with the stuff we - need for building the tree more fast - """ - - pn = bb_cache.getVar('PN', file_name, True) - pv = bb_cache.getVar('PV', file_name, True) - pr = bb_cache.getVar('PR', file_name, True) - dp = int(bb_cache.getVar('DEFAULT_PREFERENCE', file_name, True) or "0") - provides = Set([pn] + (bb_cache.getVar("PROVIDES", file_name, True) or "").split()) - depends = (bb_cache.getVar("DEPENDS", file_name, True) or "").split() - packages = (bb_cache.getVar('PACKAGES', file_name, True) or "").split() - packages_dynamic = (bb_cache.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split() - rprovides = (bb_cache.getVar("RPROVIDES", file_name, True) or "").split() - - self.task_queues[file_name] = bb_cache.getVar("_task_graph", file_name, True) - self.task_deps[file_name] = bb_cache.getVar("_task_deps", file_name, True) - - # build PackageName to FileName lookup table - if pn not in self.pkg_pn: - self.pkg_pn[pn] = [] - self.pkg_pn[pn].append(file_name) - - self.build_all[file_name] = int(bb_cache.getVar('BUILD_ALL_DEPS', file_name, True) or "0") - self.stamp[file_name] = bb_cache.getVar('STAMP', file_name, True) - - # build FileName to PackageName lookup table - self.pkg_fn[file_name] = pn - self.pkg_pvpr[file_name] = (pv,pr) - self.pkg_dp[file_name] = dp - - # Build forward and reverse provider hashes - # Forward: virtual -> [filenames] - # Reverse: PN -> [virtuals] - if pn not in self.pn_provides: - self.pn_provides[pn] = Set() - self.pn_provides[pn] |= provides - - for provide in provides: - if provide not in self.providers: - self.providers[provide] = [] - self.providers[provide].append(file_name) - - self.deps[file_name] = Set() - for dep in depends: - self.all_depends.add(dep) - self.deps[file_name].add(dep) - - # Build reverse hash for PACKAGES, so runtime dependencies - # can be be resolved (RDEPENDS, RRECOMMENDS etc.) - for package in packages: - if not package in self.packages: - self.packages[package] = [] - self.packages[package].append(file_name) - rprovides += (bb_cache.getVar("RPROVIDES_%s" % package, file_name, 1) or "").split() - - for package in packages_dynamic: - if not package in self.packages_dynamic: - self.packages_dynamic[package] = [] - self.packages_dynamic[package].append(file_name) - - for rprovide in rprovides: - if not rprovide in self.rproviders: - self.rproviders[rprovide] = [] - self.rproviders[rprovide].append(file_name) - - # Build hash of runtime depends and rececommends - - def add_dep(deplist, deps): - for dep in deps: - if not dep in deplist: - deplist[dep] = "" - - if not file_name in self.rundeps: - self.rundeps[file_name] = {} - if not file_name in self.runrecs: - self.runrecs[file_name] = {} - - for package in packages + [pn]: - if not package in self.rundeps[file_name]: - self.rundeps[file_name][package] = {} - if not package in self.runrecs[file_name]: - self.runrecs[file_name][package] = {} - - add_dep(self.rundeps[file_name][package], bb.utils.explode_deps(bb_cache.getVar('RDEPENDS', file_name, True) or "")) - add_dep(self.runrecs[file_name][package], bb.utils.explode_deps(bb_cache.getVar('RRECOMMENDS', file_name, True) or "")) - add_dep(self.rundeps[file_name][package], bb.utils.explode_deps(bb_cache.getVar("RDEPENDS_%s" % package, file_name, True) or "")) - add_dep(self.runrecs[file_name][package], bb.utils.explode_deps(bb_cache.getVar("RRECOMMENDS_%s" % package, file_name, True) or "")) - - # Collect files we may need for possible world-dep - # calculations - if not bb_cache.getVar('BROKEN', file_name, True) and not bb_cache.getVar('EXCLUDE_FROM_WORLD', file_name, True): - self.possible_world.append(file_name) - - -#============================================================================# # BBStatistics #============================================================================# class BBStatistics: @@ -215,7 +76,6 @@ class BBCooker: Manages one bitbake build run """ - ParsingStatus = BBParsingStatus # make it visible from the shell Statistics = BBStatistics # make it visible from the shell def __init__( self ): @@ -581,7 +441,7 @@ class BBCooker: def myProgressCallback( self, x, y, f, bb_cache, from_cache ): # feed the status with new input - self.status.handle_bb_data(f, bb_cache, from_cache) + bb_cache.handle_data(f, self.status) if os.isatty(sys.stdout.fileno()): sys.stdout.write("\rNOTE: Handling BitBake files: %s (%04d/%04d) [%2d %%]" % ( parsespin.next(), x, y, x*100/y ) ) @@ -723,7 +583,7 @@ class BBCooker: sys.exit( self.stats.show() ) # initialise the parsing status now we know we will need deps - self.status = BBParsingStatus() + self.status = bb.cache.CacheData() ignore = bb.data.getVar("ASSUME_PROVIDED", self.configuration.data, 1) or "" self.status.ignored_dependencies = Set( ignore.split() ) diff --git a/lib/bb/cache.py b/lib/bb/cache.py index 774e74b86..42bb552c2 100644 --- a/lib/bb/cache.py +++ b/lib/bb/cache.py @@ -33,6 +33,7 @@ Place, Suite 330, Boston, MA 02111-1307 USA. import os, re import bb.data import bb.utils +from sets import Set try: import cPickle as pickle @@ -228,7 +229,7 @@ class Cache: def sync(self): """ Save the cache - Called from the parser when complete (or exitting) + Called from the parser when complete (or exiting) """ if not self.has_cache: @@ -247,6 +248,101 @@ class Cache: except OSError: return 0 + def handle_data(self, file_name, cacheData): + """ + Save data we need into the cache + """ + + pn = self.getVar('PN', file_name, True) + pv = self.getVar('PV', file_name, True) + pr = self.getVar('PR', file_name, True) + dp = int(self.getVar('DEFAULT_PREFERENCE', file_name, True) or "0") + provides = Set([pn] + (self.getVar("PROVIDES", file_name, True) or "").split()) + depends = (self.getVar("DEPENDS", file_name, True) or "").split() + packages = (self.getVar('PACKAGES', file_name, True) or "").split() + packages_dynamic = (self.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split() + rprovides = (self.getVar("RPROVIDES", file_name, True) or "").split() + + cacheData.task_queues[file_name] = self.getVar("_task_graph", file_name, True) + cacheData.task_deps[file_name] = self.getVar("_task_deps", file_name, True) + + # build PackageName to FileName lookup table + if pn not in cacheData.pkg_pn: + cacheData.pkg_pn[pn] = [] + cacheData.pkg_pn[pn].append(file_name) + + cacheData.build_all[file_name] = int(self.getVar('BUILD_ALL_DEPS', file_name, True) or "0") + cacheData.stamp[file_name] = self.getVar('STAMP', file_name, True) + + # build FileName to PackageName lookup table + cacheData.pkg_fn[file_name] = pn + cacheData.pkg_pvpr[file_name] = (pv,pr) + cacheData.pkg_dp[file_name] = dp + + # Build forward and reverse provider hashes + # Forward: virtual -> [filenames] + # Reverse: PN -> [virtuals] + if pn not in cacheData.pn_provides: + cacheData.pn_provides[pn] = Set() + cacheData.pn_provides[pn] |= provides + + for provide in provides: + if provide not in cacheData.providers: + cacheData.providers[provide] = [] + cacheData.providers[provide].append(file_name) + + cacheData.deps[file_name] = Set() + for dep in depends: + cacheData.all_depends.add(dep) + cacheData.deps[file_name].add(dep) + + # Build reverse hash for PACKAGES, so runtime dependencies + # can be be resolved (RDEPENDS, RRECOMMENDS etc.) + for package in packages: + if not package in cacheData.packages: + cacheData.packages[package] = [] + cacheData.packages[package].append(file_name) + rprovides += (self.getVar("RPROVIDES_%s" % package, file_name, 1) or "").split() + + for package in packages_dynamic: + if not package in cacheData.packages_dynamic: + cacheData.packages_dynamic[package] = [] + cacheData.packages_dynamic[package].append(file_name) + + for rprovide in rprovides: + if not rprovide in cacheData.rproviders: + cacheData.rproviders[rprovide] = [] + cacheData.rproviders[rprovide].append(file_name) + + # Build hash of runtime depends and rececommends + + def add_dep(deplist, deps): + for dep in deps: + if not dep in deplist: + deplist[dep] = "" + + if not file_name in cacheData.rundeps: + cacheData.rundeps[file_name] = {} + if not file_name in cacheData.runrecs: + cacheData.runrecs[file_name] = {} + + for package in packages + [pn]: + if not package in cacheData.rundeps[file_name]: + cacheData.rundeps[file_name][package] = {} + if not package in cacheData.runrecs[file_name]: + cacheData.runrecs[file_name][package] = {} + + add_dep(cacheData.rundeps[file_name][package], bb.utils.explode_deps(self.getVar('RDEPENDS', file_name, True) or "")) + add_dep(cacheData.runrecs[file_name][package], bb.utils.explode_deps(self.getVar('RRECOMMENDS', file_name, True) or "")) + add_dep(cacheData.rundeps[file_name][package], bb.utils.explode_deps(self.getVar("RDEPENDS_%s" % package, file_name, True) or "")) + add_dep(cacheData.runrecs[file_name][package], bb.utils.explode_deps(self.getVar("RRECOMMENDS_%s" % package, file_name, True) or "")) + + # Collect files we may need for possible world-dep + # calculations + if not self.getVar('BROKEN', file_name, True) and not self.getVar('EXCLUDE_FROM_WORLD', file_name, True): + cacheData.possible_world.append(file_name) + + def load_bbfile( self, bbfile , config): """ Load and parse one .bb build file @@ -293,3 +389,46 @@ def init(cooker): """ return Cache(cooker) + + +#============================================================================# +# CacheData +#============================================================================# +class CacheData: + """ + The data structures we compile from the cached data + """ + + def __init__(self): + """ + Direct cache variables + (from Cache.handle_data) + """ + self.providers = {} + self.rproviders = {} + self.packages = {} + self.packages_dynamic = {} + self.possible_world = [] + self.pkg_pn = {} + self.pkg_fn = {} + self.pkg_pvpr = {} + self.pkg_dp = {} + self.pn_provides = {} + self.all_depends = Set() + self.build_all = {} + self.deps = {} + self.rundeps = {} + self.runrecs = {} + self.task_queues = {} + self.task_deps = {} + self.stamp = {} + self.preferred = {} + + """ + Indirect Cache variables + (set elsewhere) + """ + self.ignored_dependencies = [] + self.world_target = Set() + self.bbfile_priority = {} + self.bbfile_config_priorities = [] diff --git a/lib/bb/shell.py b/lib/bb/shell.py index 889d90fb1..dbf7363d4 100644 --- a/lib/bb/shell.py +++ b/lib/bb/shell.py @@ -57,7 +57,7 @@ try: except NameError: from sets import Set as set import sys, os, readline, socket, httplib, urllib, commands, popen2, copy, shlex, Queue, fnmatch -from bb import data, parse, build, fatal +from bb import data, parse, build, fatal, cache __version__ = "0.5.3.1" __credits__ = """BitBake Shell Version %s (C) 2005 Michael 'Mickey' Lauer <mickey@Vanille.de> @@ -392,7 +392,7 @@ SRC_URI = "" def parse( self, params ): """(Re-)parse .bb files and calculate the dependency graph""" - cooker.status = cooker.ParsingStatus() + cooker.status = cache.CacheData() ignore = data.getVar("ASSUME_PROVIDED", cooker.configuration.data, 1) or "" cooker.status.ignored_dependencies = set( ignore.split() ) cooker.handleCollections( data.getVar("BBFILE_COLLECTIONS", cooker.configuration.data, 1) ) |