aboutsummaryrefslogtreecommitdiffstats
path: root/bitbake
diff options
context:
space:
mode:
authorChris Larson <chris_larson@mentor.com>2010-11-18 19:51:51 -0700
committerRichard Purdie <rpurdie@linux.intel.com>2011-01-05 00:58:23 +0000
commit3069c0b2588c9e88a4fa2fd4d37356410d364410 (patch)
treee0f101e24b50feb9c014c8d3ed98baf62b915467 /bitbake
parent30d27115ec38b8191d81504858d105b0d91277d8 (diff)
downloadopenembedded-core-contrib-3069c0b2588c9e88a4fa2fd4d37356410d364410.tar.gz
Revert "persist_data: cache connection and use cursor"
Caching the database connection can cause serious issues if it results in multiple processes (e.g. multiple tasks) simultaneously using the same connection. This reverts commit 8a6876752b90efd81d92f0947bfc9527d8260969. (Bitbake rev: 60b9b18eafad5ac46c7cf1048d749d673c2ee0ad) Signed-off-by: Chris Larson <chris_larson@mentor.com> Signed-off-by: Richard Purdie <rpurdie@linux.intel.com>
Diffstat (limited to 'bitbake')
-rw-r--r--bitbake/lib/bb/fetch/__init__.py9
-rw-r--r--bitbake/lib/bb/persist_data.py11
2 files changed, 7 insertions, 13 deletions
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py
index 668b788698..d8f5f167fc 100644
--- a/bitbake/lib/bb/fetch/__init__.py
+++ b/bitbake/lib/bb/fetch/__init__.py
@@ -144,14 +144,13 @@ def uri_replace(uri, uri_find, uri_replace, d):
methods = []
urldata_cache = {}
saved_headrevs = {}
-persistent_database_connection = {}
def fetcher_init(d):
"""
Called to initialize the fetchers once the configuration data is known.
Calls before this must not hit the cache.
"""
- pd = persist_data.PersistData(d, persistent_database_connection)
+ pd = persist_data.PersistData(d)
# When to drop SCM head revisions controlled by user policy
srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
if srcrev_policy == "cache":
@@ -180,7 +179,7 @@ def fetcher_compare_revisions(d):
return true/false on whether they've changed.
"""
- pd = persist_data.PersistData(d, persistent_database_connection)
+ pd = persist_data.PersistData(d)
data = pd.getKeyValues("BB_URI_HEADREVS")
data2 = bb.fetch.saved_headrevs
@@ -758,7 +757,7 @@ class Fetch(object):
if not hasattr(self, "_latest_revision"):
raise ParameterError
- pd = persist_data.PersistData(d, persistent_database_connection)
+ pd = persist_data.PersistData(d)
key = self.generate_revision_key(url, ud, d)
rev = pd.getValue("BB_URI_HEADREVS", key)
if rev != None:
@@ -775,7 +774,7 @@ class Fetch(object):
if hasattr(self, "_sortable_revision"):
return self._sortable_revision(url, ud, d)
- pd = persist_data.PersistData(d, persistent_database_connection)
+ pd = persist_data.PersistData(d)
key = self.generate_revision_key(url, ud, d)
latest_rev = self._build_revision(url, ud, d)
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py
index 76bff16658..9558e71283 100644
--- a/bitbake/lib/bb/persist_data.py
+++ b/bitbake/lib/bb/persist_data.py
@@ -47,10 +47,7 @@ class PersistData:
Why sqlite? It handles all the locking issues for us.
"""
- def __init__(self, d, persistent_database_connection):
- if "connection" in persistent_database_connection:
- self.cursor = persistent_database_connection["connection"].cursor()
- return
+ def __init__(self, d):
self.cachedir = bb.data.getVar("PERSISTENT_DIR", d, True) or bb.data.getVar("CACHE", d, True)
if self.cachedir in [None, '']:
bb.msg.fatal(bb.msg.domain.PersistData, "Please set the 'PERSISTENT_DIR' or 'CACHE' variable.")
@@ -62,9 +59,7 @@ class PersistData:
self.cachefile = os.path.join(self.cachedir, "bb_persist_data.sqlite3")
logger.debug(1, "Using '%s' as the persistent data cache", self.cachefile)
- connection = sqlite3.connect(self.cachefile, timeout=5, isolation_level=None)
- persistent_database_connection["connection"] = connection
- self.cursor = persistent_database_connection["connection"].cursor()
+ self.connection = sqlite3.connect(self.cachefile, timeout=5, isolation_level=None)
def addDomain(self, domain):
"""
@@ -127,7 +122,7 @@ class PersistData:
count = 0
while True:
try:
- return self.cursor.execute(*query)
+ return self.connection.execute(*query)
except sqlite3.OperationalError as e:
if 'database is locked' in str(e) and count < 500:
count = count + 1