aboutsummaryrefslogtreecommitdiffstats
path: root/bin
diff options
context:
space:
mode:
authorRichard Purdie <rpurdie@linux.intel.com>2006-04-15 22:52:28 +0000
committerRichard Purdie <rpurdie@linux.intel.com>2006-04-15 22:52:28 +0000
commitb03a424879367fd4118f60abda7f78bc5f27b957 (patch)
tree22bb4fe4d2a558db0a78d86f2be8492b280c0e77 /bin
parent67e3bddeb1dc82ab58a856b9d763b21858b1c819 (diff)
downloadbitbake-b03a424879367fd4118f60abda7f78bc5f27b957.tar.gz
bitbake/lib/bb/data.py:
bitbake/lib/bb/__init__.py: bitbake/lib/bb/data_smart.py: bitbake/lib/bb/cache.py: bitbake/lib/bb/shell.py: bitbake/bin/bitbake: * Major cache refactoring. Change the cache to store only the data bitbake needs for dependency calculations instead of all the metadata. * Separate the cache code into its own file. * Update the rest of the code to work with the cache changes. * Temporarily break the shell's poke command.
Diffstat (limited to 'bin')
-rwxr-xr-xbin/bitbake213
1 files changed, 84 insertions, 129 deletions
diff --git a/bin/bitbake b/bin/bitbake
index 81bbb40ff..508d34c93 100755
--- a/bin/bitbake
+++ b/bin/bitbake
@@ -24,7 +24,7 @@
import sys, os, getopt, glob, copy, os.path, re, time
sys.path.insert(0,os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
import bb
-from bb import utils, data, parse, debug, event, fatal
+from bb import utils, data, parse, debug, event, fatal, cache
from sets import Set
import itertools, optparse
@@ -44,7 +44,6 @@ class BBParsingStatus:
"""
def __init__(self):
- self.cache_dirty = False
self.providers = {}
self.rproviders = {}
self.packages = {}
@@ -60,34 +59,35 @@ class BBParsingStatus:
self.pkg_dp = {}
self.pn_provides = {}
self.all_depends = Set()
+ self.build_all = {}
+ self.rundeps = {}
+ self.runrecs = {}
+ self.stamp = {}
- def handle_bb_data(self, file_name, bb_data, cached):
+ def handle_bb_data(self, file_name, bb_cache, cached):
"""
We will fill the dictionaries with the stuff we
need for building the tree more fast
"""
- if bb_data == None:
- return
-
- if not cached:
- self.cache_dirty = True
-
- pn = bb.data.getVar('PN', bb_data, True)
- pv = bb.data.getVar('PV', bb_data, True)
- pr = bb.data.getVar('PR', bb_data, True)
- dp = int(bb.data.getVar('DEFAULT_PREFERENCE', bb_data, True) or "0")
- provides = Set([pn] + (bb.data.getVar("PROVIDES", bb_data, 1) or "").split())
- depends = (bb.data.getVar("DEPENDS", bb_data, True) or "").split()
- packages = (bb.data.getVar('PACKAGES', bb_data, True) or "").split()
- packages_dynamic = (bb.data.getVar('PACKAGES_DYNAMIC', bb_data, True) or "").split()
- rprovides = (bb.data.getVar("RPROVIDES", bb_data, 1) or "").split()
+ pn = bb_cache.getVar('PN', file_name, True)
+ pv = bb_cache.getVar('PV', file_name, True)
+ pr = bb_cache.getVar('PR', file_name, True)
+ dp = int(bb_cache.getVar('DEFAULT_PREFERENCE', file_name, True) or "0")
+ provides = Set([pn] + (bb_cache.getVar("PROVIDES", file_name, True) or "").split())
+ depends = (bb_cache.getVar("DEPENDS", file_name, True) or "").split()
+ packages = (bb_cache.getVar('PACKAGES', file_name, True) or "").split()
+ packages_dynamic = (bb_cache.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split()
+ rprovides = (bb_cache.getVar("RPROVIDES", file_name, True) or "").split()
# build PackageName to FileName lookup table
if pn not in self.pkg_pn:
self.pkg_pn[pn] = []
self.pkg_pn[pn].append(file_name)
+ self.build_all[file_name] = int(bb_cache.getVar('BUILD_ALL_DEPS', file_name, True) or "0")
+ self.stamp[file_name] = bb_cache.getVar('STAMP', file_name, True)
+
# build FileName to PackageName lookup table
self.pkg_fn[file_name] = pn
self.pkg_pvpr[file_name] = (pv,pr)
@@ -114,7 +114,7 @@ class BBParsingStatus:
if not package in self.packages:
self.packages[package] = []
self.packages[package].append(file_name)
- rprovides += (bb.data.getVar("RPROVIDES_%s" % package, bb_data, 1) or "").split()
+ rprovides += (bb_cache.getVar("RPROVIDES_%s" % package, file_name, 1) or "").split()
for package in packages_dynamic:
if not package in self.packages_dynamic:
@@ -126,9 +126,27 @@ class BBParsingStatus:
self.rproviders[rprovide] = []
self.rproviders[rprovide].append(file_name)
+ # Build hash of runtime depeneds and rececommends
+
+ def add_dep(deplist, deps):
+ for dep in deps:
+ if not dep in deplist:
+ deplist[dep] = ""
+
+ for package in packages + [pn]:
+ if not package in self.rundeps:
+ self.rundeps[package] = {}
+ if not package in self.runrecs:
+ self.runrecs[package] = {}
+
+ add_dep(self.rundeps[package], bb.utils.explode_deps(bb_cache.getVar('RDEPENDS', file_name, True) or ""))
+ add_dep(self.runrecs[package], bb.utils.explode_deps(bb_cache.getVar('RRECOMMENDS', file_name, True) or ""))
+ add_dep(self.rundeps[package], bb.utils.explode_deps(bb_cache.getVar("RDEPENDS_%s" % package, file_name, True) or ""))
+ add_dep(self.runrecs[package], bb.utils.explode_deps(bb_cache.getVar("RRECOMMENDS_%s" % package, file_name, True) or ""))
+
# Collect files we may need for possible world-dep
# calculations
- if not bb.data.getVar('BROKEN', bb_data, True) and not bb.data.getVar('EXCLUDE_FROM_WORLD', bb_data, True):
+ if not bb_cache.getVar('BROKEN', file_name, True) and not bb_cache.getVar('EXCLUDE_FROM_WORLD', file_name, True):
self.possible_world.append(file_name)
@@ -190,8 +208,8 @@ class BBCooker:
self.stats = BBStatistics()
self.status = None
- self.pkgdata = None
self.cache = None
+ self.bb_cache = None
def tryBuildPackage( self, fn, item, the_data ):
"""Build one package"""
@@ -226,10 +244,10 @@ class BBCooker:
If build_depends is empty, we're dealing with a runtime depends
"""
- the_data = self.pkgdata[fn]
+ the_data, fromCache = self.bb_cache.loadDataFull(fn, self)
if not buildAllDeps:
- buildAllDeps = bb.data.getVar('BUILD_ALL_DEPS', the_data, True) or False
+ buildAllDeps = self.status.build_all[fn]
# Error on build time dependency loops
if build_depends and build_depends.count(fn) > 1:
@@ -402,12 +420,14 @@ class BBCooker:
print "%-30s %20s %20s" % (p, latest[0][0] + "-" + latest[0][1],
prefstr)
+
def showEnvironment( self ):
"""Show the outer or per-package environment"""
if self.configuration.buildfile:
+ self.bb_cache = bb.cache.init(self)
try:
- self.configuration.data, fromCache = self.load_bbfile( self.configuration.buildfile )
+ self.configuration.data, fromCache = self.bb_cache.loadDataFull(self.configuration.buildfile, self)
except IOError, e:
fatal("Unable to read %s: %s" % ( self.configuration.buildfile, e ))
except Exception, e:
@@ -457,11 +477,10 @@ class BBCooker:
# look to see if one of them is already staged, or marked as preferred.
# if so, bump it to the head of the queue
for p in providers:
- the_data = self.pkgdata[p]
- pn = bb.data.getVar('PN', the_data, 1)
- pv = bb.data.getVar('PV', the_data, 1)
- pr = bb.data.getVar('PR', the_data, 1)
- stamp = '%s.do_populate_staging' % bb.data.getVar('STAMP', the_data, 1)
+ pn = self.status.pkg_fn[p]
+ pv, pr = self.status.pkg_pvpr[p]
+
+ stamp = '%s.do_populate_staging' % self.status.stamp[p]
if os.path.exists(stamp):
(newvers, fn) = preferred_versions[pn]
if not fn in eligible:
@@ -656,20 +675,11 @@ class BBCooker:
rdepends = []
self.rbuild_cache.append(item)
- the_data = self.pkgdata[fn]
- pn = self.status.pkg_fn[fn]
-
- if (item == pn):
- rdepends += bb.utils.explode_deps(bb.data.getVar('RDEPENDS', the_data, True) or "")
- rdepends += bb.utils.explode_deps(bb.data.getVar('RRECOMMENDS', the_data, True) or "")
- rdepends += bb.utils.explode_deps(bb.data.getVar("RDEPENDS_%s" % pn, the_data, True) or "")
- rdepends += bb.utils.explode_deps(bb.data.getVar('RRECOMMENDS_%s' % pn, the_data, True) or "")
- else:
- packages = (bb.data.getVar('PACKAGES', the_data, 1).split() or "")
- for package in packages:
- if package == item:
- rdepends += bb.utils.explode_deps(bb.data.getVar("RDEPENDS_%s" % package, the_data, True) or "")
- rdepends += bb.utils.explode_deps(bb.data.getVar("RRECOMMENDS_%s" % package, the_data, True) or "")
+
+ if item in self.status.rundeps:
+ rdepends += self.status.rundeps[item].keys()
+ if item in self.status.runrecs:
+ rdepends += self.status.runrecs[item].keys()
bb.debug(2, "Additional runtime dependencies for %s are: %s" % (item, " ".join(rdepends)))
@@ -698,7 +708,7 @@ class BBCooker:
self.preferred[providee] = provider
# Calculate priorities for each file
- for p in self.pkgdata.keys():
+ for p in self.status.pkg_fn.keys():
self.status.bbfile_priority[p] = calc_bbfile_priority(p)
def buildWorldTargetList(self):
@@ -729,9 +739,10 @@ class BBCooker:
self.status.possible_world = None
self.status.all_depends = None
- def myProgressCallback( self, x, y, f, file_data, from_cache ):
+ def myProgressCallback( self, x, y, f, bb_cache, from_cache ):
# feed the status with new input
- self.status.handle_bb_data(f, file_data, from_cache)
+
+ self.status.handle_bb_data(f, bb_cache, from_cache)
if bbdebug > 0:
return
@@ -938,77 +949,12 @@ class BBCooker:
return []
return finddata.readlines()
- def deps_clean(self, d):
- depstr = data.getVar('__depends', d)
- if depstr:
- deps = depstr.split(" ")
- for dep in deps:
- (f,old_mtime_s) = dep.split("@")
- old_mtime = int(old_mtime_s)
- new_mtime = parse.cached_mtime(f)
- if (new_mtime > old_mtime):
- return False
- return True
-
- def load_bbfile( self, bbfile ):
- """Load and parse one .bb build file"""
-
- if not self.cache in [None, '']:
- # get the times
- cache_mtime = data.init_db_mtime(self.cache, bbfile)
- file_mtime = parse.cached_mtime(bbfile)
-
- if file_mtime > cache_mtime:
- #print " : '%s' dirty. reparsing..." % bbfile
- pass
- else:
- #print " : '%s' clean. loading from cache..." % bbfile
- cache_data = data.init_db( self.cache, bbfile, False )
- if self.deps_clean(cache_data):
- return cache_data, True
-
- topdir = data.getVar('TOPDIR', self.configuration.data)
- if not topdir:
- topdir = os.path.abspath(os.getcwd())
- # set topdir to here
- data.setVar('TOPDIR', topdir, self.configuration)
- bbfile = os.path.abspath(bbfile)
- bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
- # expand tmpdir to include this topdir
- data.setVar('TMPDIR', data.getVar('TMPDIR', self.configuration.data, 1) or "", self.configuration.data)
- # set topdir to location of .bb file
- topdir = bbfile_loc
- #data.setVar('TOPDIR', topdir, cfg)
- # go there
- oldpath = os.path.abspath(os.getcwd())
- os.chdir(topdir)
- bb = data.init_db(self.cache,bbfile, True, self.configuration.data)
- try:
- parse.handle(bbfile, bb) # read .bb data
- if not self.cache in [None, '']:
- bb.commit(parse.cached_mtime(bbfile)) # write cache
- os.chdir(oldpath)
- return bb, False
- finally:
- os.chdir(oldpath)
-
def collect_bbfiles( self, progressCallback ):
"""Collect all available .bb build files"""
self.cb = progressCallback
parsed, cached, skipped, masked = 0, 0, 0, 0
- self.cache = bb.data.getVar( "CACHE", self.configuration.data, 1 )
- self.pkgdata = data.pkgdata( not self.cache in [None, ''], self.cache, self.configuration.data )
+ self.bb_cache = bb.cache.init(self)
- if not self.cache in [None, '']:
- if self.cb is not None:
- print "NOTE: Using cache in '%s'" % self.cache
- try:
- os.stat( self.cache )
- except OSError:
- bb.mkdirhier( self.cache )
- else:
- if self.cb is not None:
- print "NOTE: Not using a cache. Set CACHE = <directory> to enable."
files = (data.getVar( "BBFILES", self.configuration.data, 1 ) or "").split()
data.setVar("BBFILES", " ".join(files), self.configuration.data)
@@ -1043,40 +989,49 @@ class BBCooker:
# read a file's metadata
try:
- bb_data, fromCache = self.load_bbfile(f)
- if fromCache: cached += 1
+ fromCache, skip = self.bb_cache.loadData(f, self)
+ if skip:
+ skipped += 1
+ #bb.note("Skipping %s" % f)
+ self.bb_cache.skip(f)
+ continue
+ elif fromCache: cached += 1
else: parsed += 1
deps = None
- if bb_data is not None:
- # allow metadata files to add items to BBFILES
- #data.update_data(self.pkgdata[f])
- addbbfiles = data.getVar('BBFILES', bb_data) or None
- if addbbfiles:
- for aof in addbbfiles.split():
- if not files.count(aof):
- if not os.path.isabs(aof):
- aof = os.path.join(os.path.dirname(f),aof)
- files.append(aof)
- self.pkgdata[f] = bb_data
+
+ # allow metadata files to add items to BBFILES
+ #data.update_data(self.pkgdata[f])
+ addbbfiles = self.bb_cache.getVar('BBFILES', f, False) or None
+ if addbbfiles:
+ for aof in addbbfiles.split():
+ if not files.count(aof):
+ if not os.path.isabs(aof):
+ aof = os.path.join(os.path.dirname(f),aof)
+ files.append(aof)
# now inform the caller
if self.cb is not None:
- self.cb( i + 1, len( newfiles ), f, bb_data, fromCache )
+ self.cb( i + 1, len( newfiles ), f, self.bb_cache, fromCache )
except IOError, e:
+ self.bb_cache.remove(f)
bb.error("opening %s: %s" % (f, e))
pass
- except bb.parse.SkipPackage:
- skipped += 1
- pass
except KeyboardInterrupt:
+ self.bb_cache.sync()
raise
except Exception, e:
+ self.bb_cache.remove(f)
bb.error("%s while parsing %s" % (e, f))
+ except:
+ self.bb_cache.remove(f)
+ raise
if self.cb is not None:
print "\rNOTE: Parsing finished. %d cached, %d parsed, %d skipped, %d masked." % ( cached, parsed, skipped, masked ),
+ self.bb_cache.sync()
+
#============================================================================#
# main
#============================================================================#