summaryrefslogtreecommitdiffstats
path: root/bitbake
diff options
context:
space:
mode:
authorRichard Purdie <richard@openedhand.com>2007-08-11 22:42:15 +0000
committerRichard Purdie <richard@openedhand.com>2007-08-11 22:42:15 +0000
commit18026165c3086b77253663fb12d5b7470de8f2a1 (patch)
treec07368e40fa2d1ae1c39947b66474b45dd672130 /bitbake
parent0197eb2d870263b983ba217aca69ffe9f7708eb5 (diff)
downloadopenembedded-core-18026165c3086b77253663fb12d5b7470de8f2a1.tar.gz
bitbake: Sync with upstream
git-svn-id: https://svn.o-hand.com/repos/poky/trunk@2480 311d38ba-8fff-0310-9ca6-ca027cbcb966
Diffstat (limited to 'bitbake')
-rw-r--r--bitbake/ChangeLog6
-rwxr-xr-xbitbake/bin/bitbake19
-rw-r--r--bitbake/lib/bb/fetch/__init__.py103
-rw-r--r--bitbake/lib/bb/parse/parse_py/BBHandler.py40
-rw-r--r--bitbake/lib/bb/parse/parse_py/ConfHandler.py8
5 files changed, 92 insertions, 84 deletions
diff --git a/bitbake/ChangeLog b/bitbake/ChangeLog
index fd998496ec..c5641836d1 100644
--- a/bitbake/ChangeLog
+++ b/bitbake/ChangeLog
@@ -7,6 +7,12 @@ Changes in Bitbake 1.8.x:
- Sync fetcher code with that in trunk, adding SRCREV support for svn
- Add ConfigParsed Event after configuration parsing is complete
- data.emit_var() - only call getVar if we need the variable
+ - Stop generating the A variable (seems to be legacy code)
+ - Make sure intertask depends get processed correcting in recursive depends
+ - Add pn-PN to overrides when evaluating PREFERRED_VERSION
+ - Improve the progress indicator by skipping tasks that have
+ already run before starting the build rather than during it
+ - Add profiling option (-P)
Changes in Bitbake 1.8.6:
- Correctly redirect stdin when forking
diff --git a/bitbake/bin/bitbake b/bitbake/bin/bitbake
index 4b212adc2d..8b69a0a33f 100755
--- a/bitbake/bin/bitbake
+++ b/bitbake/bin/bitbake
@@ -102,6 +102,8 @@ Default BBFILES are the .bb files in the current directory.""" )
parser.add_option( "-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
action = "append", dest = "debug_domains", default = [] )
+ parser.add_option( "-P", "--profile", help = "profile the command and print a report",
+ action = "store_true", dest = "profile", default = False )
options, args = parser.parse_args(sys.argv)
@@ -110,8 +112,23 @@ Default BBFILES are the .bb files in the current directory.""" )
configuration.pkgs_to_build.extend(args[1:])
cooker = bb.cooker.BBCooker(configuration)
- cooker.cook()
+ if configuration.profile:
+ try:
+ import cProfile as profile
+ except:
+ import profile
+
+ profile.runctx("cooker.cook()", globals(), locals(), "profile.log")
+ import pstats
+ p = pstats.Stats('profile.log')
+ p.sort_stats('time')
+ p.print_stats()
+ p.print_callers()
+ p.sort_stats('cumulative')
+ p.print_stats()
+ else:
+ cooker.cook()
if __name__ == "__main__":
main()
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py
index f235526452..f739245bd1 100644
--- a/bitbake/lib/bb/fetch/__init__.py
+++ b/bitbake/lib/bb/fetch/__init__.py
@@ -80,6 +80,7 @@ def uri_replace(uri, uri_find, uri_replace, d):
return bb.encodeurl(result_decoded)
methods = []
+urldata_cache = {}
def fetcher_init(d):
"""
@@ -87,12 +88,16 @@ def fetcher_init(d):
Calls before this must not hit the cache.
"""
pd = persist_data.PersistData(d)
- # Clear any cached url data
- pd.delDomain("BB_URLDATA")
- # When to drop SCM head revisions should be controled by user policy
- pd.delDomain("BB_URI_HEADREVS")
+ # When to drop SCM head revisions controled by user policy
+ srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
+ if srcrev_policy == "cache":
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "Keeping SRCREV cache due to cache policy of: %s" % srcrev_policy)
+ elif srcrev_policy == "clear":
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "Clearing SRCREV cache due to cache policy of: %s" % srcrev_policy)
+ pd.delDomain("BB_URI_HEADREVS")
+ else:
+ bb.msg.fatal(bb.msg.domain.Fetcher, "Invalid SRCREV cache policy of: %s" % srcrev_policy)
# Make sure our domains exist
- pd.addDomain("BB_URLDATA")
pd.addDomain("BB_URI_HEADREVS")
pd.addDomain("BB_URI_LOCALCOUNT")
@@ -102,45 +107,30 @@ def fetcher_init(d):
# 3. localpaths
# localpath can be called at any time
-def init(urls, d, cache = True):
+def init(urls, d, setup = True):
urldata = {}
-
- if cache:
- urldata = getdata(d)
+ fn = bb.data.getVar('FILE', d, 1)
+ if fn in urldata_cache:
+ urldata = urldata_cache[fn]
for url in urls:
if url not in urldata:
- ud = FetchData(url, d)
- for m in methods:
- if m.supports(url, ud, d):
- ud.init(m, d)
- ud.setup_localpath(d)
- break
- urldata[url] = ud
-
- if cache:
- fn = bb.data.getVar('FILE', d, 1)
- pd = persist_data.PersistData(d)
- pd.setValue("BB_URLDATA", fn, pickle.dumps(urldata, 0))
+ urldata[url] = FetchData(url, d)
- return urldata
-
-def getdata(d):
- urldata = {}
- fn = bb.data.getVar('FILE', d, 1)
- pd = persist_data.PersistData(d)
- encdata = pd.getValue("BB_URLDATA", fn)
- if encdata:
- urldata = pickle.loads(str(encdata))
+ if setup:
+ for url in urldata:
+ if not urldata[url].setup:
+ urldata[url].setup_localpath(d)
+ urldata_cache[fn] = urldata
return urldata
-def go(d, urldata = None):
+def go(d):
"""
Fetch all urls
+ init must have previously been called
"""
- if not urldata:
- urldata = getdata(d)
+ urldata = init([], d, True)
for u in urldata:
ud = urldata[u]
@@ -154,13 +144,12 @@ def go(d, urldata = None):
if ud.localfile and not m.forcefetch(u, ud, d):
Fetch.write_md5sum(u, ud, d)
-def localpaths(d, urldata = None):
+def localpaths(d):
"""
Return a list of the local filenames, assuming successful fetch
"""
local = []
- if not urldata:
- urldata = getdata(d)
+ urldata = init([], d, True)
for u in urldata:
ud = urldata[u]
@@ -177,25 +166,14 @@ def get_srcrev(d):
have been set.
"""
scms = []
- urldata = getdata(d)
- if len(urldata) == 0:
- src_uri = bb.data.getVar('SRC_URI', d, 1).split()
- for url in src_uri:
- if url not in urldata:
- ud = FetchData(url, d)
- for m in methods:
- if m.supports(url, ud, d):
- ud.init(m, d)
- break
- urldata[url] = ud
- if ud.method.suppports_srcrev():
- scms.append(url)
- ud.setup_localpath(d)
- else:
- for u in urldata:
- ud = urldata[u]
- if ud.method.suppports_srcrev():
- scms.append(u)
+ # Only call setup_localpath on URIs which suppports_srcrev()
+ urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False)
+ for u in urldata:
+ ud = urldata[u]
+ if ud.method.suppports_srcrev():
+ if not ud.setup:
+ ud.setup_localpath(d)
+ scms.append(u)
if len(scms) == 0:
bb.msg.error(bb.msg.domain.Fetcher, "SRCREV was used yet no valid SCM was found in SRC_URI")
@@ -212,7 +190,7 @@ def localpath(url, d, cache = True):
Called from the parser with cache=False since the cache isn't ready
at this point. Also called from classed in OE e.g. patch.bbclass
"""
- ud = init([url], d, cache)
+ ud = init([url], d)
if ud[url].method:
return ud[url].localpath
return url
@@ -252,17 +230,22 @@ def runfetchcmd(cmd, d, quiet = False):
return output
class FetchData(object):
- """Class for fetcher variable store"""
+ """
+ A class which represents the fetcher state for a given URI.
+ """
def __init__(self, url, d):
self.localfile = ""
(self.type, self.host, self.path, self.user, self.pswd, self.parm) = bb.decodeurl(data.expand(url, d))
self.date = Fetch.getSRCDate(self, d)
self.url = url
-
- def init(self, method, d):
- self.method = method
+ self.setup = False
+ for m in methods:
+ if m.supports(url, self, d):
+ self.method = m
+ break
def setup_localpath(self, d):
+ self.setup = True
if "localpath" in self.parm:
self.localpath = self.parm["localpath"]
else:
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py
index 1afbc6f294..aaa262d3e2 100644
--- a/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -391,26 +391,26 @@ def set_additional_vars(file, d, include):
"""Deduce rest of variables, e.g. ${A} out of ${SRC_URI}"""
return
-
-# bb.msg.debug(2, bb.msg.domain.Parsing, "BB %s: set_additional_vars" % file)
-
-# src_uri = data.getVar('SRC_URI', d, 1)
-# if not src_uri:
-# return
-
-# a = (data.getVar('A', d, 1) or '').split()
-
-# from bb import fetch
-# try:
-# ud = fetch.init(src_uri.split(), d)
-# a += fetch.localpaths(d, ud)
-# except fetch.NoMethodError:
-# pass
-# except bb.MalformedUrl,e:
-# raise ParseError("Unable to generate local paths for SRC_URI due to malformed uri: %s" % e)
-# del fetch
-
-# data.setVar('A', " ".join(a), d)
+ # Nothing seems to use this variable
+ #bb.msg.debug(2, bb.msg.domain.Parsing, "BB %s: set_additional_vars" % file)
+
+ #src_uri = data.getVar('SRC_URI', d, 1)
+ #if not src_uri:
+ # return
+
+ #a = (data.getVar('A', d, 1) or '').split()
+
+ #from bb import fetch
+ #try:
+ # ud = fetch.init(src_uri.split(), d)
+ # a += fetch.localpaths(d, ud)
+ #except fetch.NoMethodError:
+ # pass
+ #except bb.MalformedUrl,e:
+ # raise ParseError("Unable to generate local paths for SRC_URI due to malformed uri: %s" % e)
+ #del fetch
+
+ #data.setVar('A', " ".join(a), d)
# Add us to the handlers list
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
index 6a44e28e90..6311e76902 100644
--- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -70,14 +70,14 @@ def obtain(fn, data):
return localfn
bb.mkdirhier(dldir)
try:
- ud = bb.fetch.init([fn], data, False)
+ bb.fetch.init([fn], data)
except bb.fetch.NoMethodError:
(type, value, traceback) = sys.exc_info()
bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: no method: %s" % value)
return localfn
try:
- bb.fetch.go(data, ud)
+ bb.fetch.go(data)
except bb.fetch.MissingParameterError:
(type, value, traceback) = sys.exc_info()
bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: missing parameters: %s" % value)
@@ -181,7 +181,9 @@ def feeder(lineno, s, fn, data):
if val == None:
val = groupd["value"]
elif "colon" in groupd and groupd["colon"] != None:
- val = bb.data.expand(groupd["value"], data)
+ e = data.createCopy()
+ bb.data.update_data(e)
+ val = bb.data.expand(groupd["value"], e)
elif "append" in groupd and groupd["append"] != None:
val = "%s %s" % ((getFunc(groupd, key, data) or ""), groupd["value"])
elif "prepend" in groupd and groupd["prepend"] != None: