summaryrefslogtreecommitdiffstats
path: root/lib/bb/fetch2/__init__.py
diff options
context:
space:
mode:
authorChris Larson <chris_larson@mentor.com>2011-02-11 09:42:38 -0700
committerChris Larson <chris_larson@mentor.com>2011-02-11 09:55:11 -0700
commit34c7ff67d7f4c7dde2027e000def1a49f3286829 (patch)
tree3537e0b1d7e65b0a9970f2962b4bca2cd96de3fb /lib/bb/fetch2/__init__.py
parentc197043717ce621c345800bde689b1231fe8b679 (diff)
downloadbitbake-34c7ff67d7f4c7dde2027e000def1a49f3286829.tar.gz
persist_data: drop SQLData indirection
The common case (if not the only case) is to only use a single domain. The only reason SQLData exists is to make it easier to delete a domain. Yet, there's no need for us to delete a domain if SQLTable knows how to clear itself out. So, add clear() to the table and pass the domain to persist(). Signed-off-by: Chris Larson <chris_larson@mentor.com>
Diffstat (limited to 'lib/bb/fetch2/__init__.py')
-rw-r--r--lib/bb/fetch2/__init__.py15
1 files changed, 6 insertions, 9 deletions
diff --git a/lib/bb/fetch2/__init__.py b/lib/bb/fetch2/__init__.py
index 1ec42717f..e36835195 100644
--- a/lib/bb/fetch2/__init__.py
+++ b/lib/bb/fetch2/__init__.py
@@ -206,18 +206,18 @@ def fetcher_init(d):
Called to initialize the fetchers once the configuration data is known.
Calls before this must not hit the cache.
"""
- pd = persist_data.persist(d)
# When to drop SCM head revisions controlled by user policy
srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, True) or "clear"
if srcrev_policy == "cache":
logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
elif srcrev_policy == "clear":
logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
+ revs = persist_data.persist('BB_URI_HEADREVS', d)
try:
- bb.fetch2.saved_headrevs = pd['BB_URI_HEADREVS'].items()
+ bb.fetch2.saved_headrevs = revs.items()
except:
pass
- del pd['BB_URI_HEADREVS']
+ revs.clear()
else:
raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
@@ -231,8 +231,7 @@ def fetcher_compare_revisions(d):
return true/false on whether they've changed.
"""
- pd = persist_data.persist(d)
- data = pd['BB_URI_HEADREVS'].items()
+ data = persist_data.persist('BB_URI_HEADREVS', d).items()
data2 = bb.fetch2.saved_headrevs
changed = False
@@ -762,8 +761,7 @@ class FetchMethod(object):
if not hasattr(self, "_latest_revision"):
raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
- pd = persist_data.persist(d)
- revs = pd['BB_URI_HEADREVS']
+ revs = persist_data.persist('BB_URI_HEADREVS', d)
key = self.generate_revision_key(url, ud, d, name)
rev = revs[key]
if rev != None:
@@ -779,8 +777,7 @@ class FetchMethod(object):
if hasattr(self, "_sortable_revision"):
return self._sortable_revision(url, ud, d)
- pd = persist_data.persist(d)
- localcounts = pd['BB_URI_LOCALCOUNT']
+ localcounts = persist_data.persist('BB_URI_LOCALCOUNT', d)
key = self.generate_revision_key(url, ud, d, name)
latest_rev = self._build_revision(url, ud, d, name)