aboutsummaryrefslogtreecommitdiffstats
path: root/lib/bb/fetch/__init__.py
diff options
context:
space:
mode:
authorChris Larson <chris_larson@mentor.com>2011-02-11 09:42:38 -0700
committerChris Larson <chris_larson@mentor.com>2011-02-11 09:55:11 -0700
commit34c7ff67d7f4c7dde2027e000def1a49f3286829 (patch)
tree3537e0b1d7e65b0a9970f2962b4bca2cd96de3fb /lib/bb/fetch/__init__.py
parentc197043717ce621c345800bde689b1231fe8b679 (diff)
downloadbitbake-34c7ff67d7f4c7dde2027e000def1a49f3286829.tar.gz
persist_data: drop SQLData indirection
The common case (if not the only case) is to only use a single domain. The only reason SQLData exists is to make it easier to delete a domain. Yet, there's no need for us to delete a domain if SQLTable knows how to clear itself out. So, add clear() to the table and pass the domain to persist(). Signed-off-by: Chris Larson <chris_larson@mentor.com>
Diffstat (limited to 'lib/bb/fetch/__init__.py')
-rw-r--r--lib/bb/fetch/__init__.py15
1 files changed, 6 insertions, 9 deletions
diff --git a/lib/bb/fetch/__init__.py b/lib/bb/fetch/__init__.py
index 2f92d87d9..4cf78779e 100644
--- a/lib/bb/fetch/__init__.py
+++ b/lib/bb/fetch/__init__.py
@@ -153,18 +153,18 @@ def fetcher_init(d):
Called to initialize the fetchers once the configuration data is known.
Calls before this must not hit the cache.
"""
- pd = persist_data.persist(d)
# When to drop SCM head revisions controlled by user policy
srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
if srcrev_policy == "cache":
logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
elif srcrev_policy == "clear":
logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
+ revs = persist_data.persist('BB_URI_HEADREVS', d)
try:
- bb.fetch.saved_headrevs = pd['BB_URI_HEADREVS'].items()
+ bb.fetch.saved_headrevs = revs.items()
except:
pass
- del pd['BB_URI_HEADREVS']
+ revs.clear()
else:
raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
@@ -178,8 +178,7 @@ def fetcher_compare_revisions(d):
return true/false on whether they've changed.
"""
- pd = persist_data.persist(d)
- data = pd['BB_URI_HEADREVS'].items()
+ data = persist_data.persist('BB_URI_HEADREVS', d).items()
data2 = bb.fetch.saved_headrevs
changed = False
@@ -756,8 +755,7 @@ class Fetch(object):
if not hasattr(self, "_latest_revision"):
raise ParameterError
- pd = persist_data.persist(d)
- revs = pd['BB_URI_HEADREVS']
+ revs = persist_data.persist('BB_URI_HEADREVS', d)
key = self.generate_revision_key(url, ud, d)
rev = revs[key]
if rev != None:
@@ -773,8 +771,7 @@ class Fetch(object):
if hasattr(self, "_sortable_revision"):
return self._sortable_revision(url, ud, d)
- pd = persist_data.persist(d)
- localcounts = pd['BB_URI_LOCALCOUNT']
+ localcounts = persist_data.persist('BB_URI_LOCALCOUNT', d)
key = self.generate_revision_key(url, ud, d)
latest_rev = self._build_revision(url, ud, d)