diff options
author | Chris Larson <chris_larson@mentor.com> | 2010-09-21 18:11:54 -0700 |
---|---|---|
committer | Richard Purdie <rpurdie@linux.intel.com> | 2011-01-05 00:58:23 +0000 |
commit | 2671bb4197a06403e1cd3cedb0f6452046660d0a (patch) | |
tree | 317cba67a6a6d686598d7cb1aec735802f3e04da | |
parent | 3069c0b2588c9e88a4fa2fd4d37356410d364410 (diff) | |
download | openembedded-core-contrib-2671bb4197a06403e1cd3cedb0f6452046660d0a.tar.gz |
Rework the persist_data API
Rather than having to run .addDomain() and then .getValue(domain, key),
.setValue(domain, key), etc, now it just works as mappings.
As an example:
setValue(domain, key) -> persist[domain][key] = value
It also arranges things so we can construct objects of this type using any
arbitrary filename/path for the sqlite3 database, rather than being so
tightly bound to the metadata.
(Bitbake rev: d9e8b8af308ae871efdc8ef0782be30af8c1f894)
Signed-off-by: Chris Larson <chris_larson@mentor.com>
Signed-off-by: Richard Purdie <rpurdie@linux.intel.com>
-rw-r--r-- | bitbake/lib/bb/persist_data.py | 209 |
1 files changed, 137 insertions, 72 deletions
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py index 9558e712833..4f87c37f2bb 100644 --- a/bitbake/lib/bb/persist_data.py +++ b/bitbake/lib/bb/persist_data.py @@ -1,6 +1,12 @@ -# BitBake Persistent Data Store -# +"""BitBake Persistent Data Store + +Used to store data in a central location such that other threads/tasks can +access them at some future date. Acts as a convenience wrapper around sqlite, +currently, providing a key/value store accessed by 'domain'. +""" + # Copyright (C) 2007 Richard Purdie +# Copyright (C) 2010 Chris Larson <chris_larson@mentor.com> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as @@ -15,116 +21,175 @@ # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -import os +import collections import logging -import bb -import bb.utils - -logger = logging.getLogger("BitBake.PersistData") +import os.path +import sys +import warnings +import bb.msg, bb.data, bb.utils try: import sqlite3 except ImportError: - try: - from pysqlite2 import dbapi2 as sqlite3 - except ImportError: - bb.msg.fatal(bb.msg.domain.PersistData, "Importing sqlite3 and pysqlite2 failed, please install one of them. Python 2.5 or a 'python-pysqlite2' like package is likely to be what you need.") + from pysqlite2 import dbapi2 as sqlite3 sqlversion = sqlite3.sqlite_version_info if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3): - bb.msg.fatal(bb.msg.domain.PersistData, "sqlite3 version 3.3.0 or later is required.") + raise Exception("sqlite3 version 3.3.0 or later is required.") -class PersistData: - """ - BitBake Persistent Data Store - Used to store data in a central location such that other threads/tasks can - access them at some future date. +logger = logging.getLogger("BitBake.PersistData") - The "domain" is used as a key to isolate each data pool and in this - implementation corresponds to an SQL table. The SQL table consists of a - simple key and value pair. - Why sqlite? It handles all the locking issues for us. - """ - def __init__(self, d): - self.cachedir = bb.data.getVar("PERSISTENT_DIR", d, True) or bb.data.getVar("CACHE", d, True) - if self.cachedir in [None, '']: - bb.msg.fatal(bb.msg.domain.PersistData, "Please set the 'PERSISTENT_DIR' or 'CACHE' variable.") - try: - os.stat(self.cachedir) - except OSError: - bb.utils.mkdirhier(self.cachedir) +class SQLTable(collections.MutableMapping): + """Object representing a table/domain in the database""" + def __init__(self, cursor, table): + self.cursor = cursor + self.table = table + + cursor.execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);" + % table) + + def _execute(self, *query): + """Execute a query, waiting to acquire a lock if necessary""" + count = 0 + while True: + try: + return self.cursor.execute(*query) + break + except sqlite3.OperationalError as exc: + if 'database is locked' in str(exc) and count < 500: + count = count + 1 + continue + raise + + def __getitem__(self, key): + data = self.cursor.execute("SELECT * from %s where key=?;" % + self.table, [key]) + for row in data: + return row[1] + + def __delitem__(self, key): + self._execute("DELETE from %s where key=?;" % self.table, [key]) + + def __setitem__(self, key, value): + data = self.cursor.execute("SELECT * from %s where key=?;" % + self.table, [key]) + exists = len(list(data)) + if exists: + self._execute("UPDATE %s SET value=? WHERE key=?;" % self.table, + [value, key]) + else: + self._execute("INSERT into %s(key, value) values (?, ?);" % + self.table, [key, value]) + + def __contains__(self, key): + return key in set(self) + + def __len__(self): + data = self.cursor.execute("SELECT COUNT(key) FROM %s;" % self.table) + for row in data: + return row[0] + + def __iter__(self): + data = self.cursor.execute("SELECT key FROM %s;" % self.table) + for row in data: + yield row[0] + + def iteritems(self): + data = self.cursor.execute("SELECT * FROM %s;" % self.table) + for row in data: + yield row[0], row[1] + + def itervalues(self): + data = self.cursor.execute("SELECT value FROM %s;" % self.table) + for row in data: + yield row[0] + + +class SQLData(object): + """Object representing the persistent data""" + def __init__(self, filename): + bb.utils.mkdirhier(os.path.dirname(filename)) + + self.filename = filename + self.connection = sqlite3.connect(filename, timeout=5, + isolation_level=None) + self.cursor = self.connection.cursor() + self._tables = {} + + def __getitem__(self, table): + if not isinstance(table, basestring): + raise TypeError("table argument must be a string, not '%s'" % + type(table)) - self.cachefile = os.path.join(self.cachedir, "bb_persist_data.sqlite3") - logger.debug(1, "Using '%s' as the persistent data cache", self.cachefile) + if table in self._tables: + return self._tables[table] + else: + tableobj = self._tables[table] = SQLTable(self.cursor, table) + return tableobj + + def __delitem__(self, table): + if table in self._tables: + del self._tables[table] + self.cursor.execute("DROP TABLE IF EXISTS %s;" % table) - self.connection = sqlite3.connect(self.cachefile, timeout=5, isolation_level=None) + +class PersistData(object): + """Deprecated representation of the bitbake persistent data store""" + def __init__(self, d): + warnings.warn("Use of PersistData will be deprecated in the future", + category=PendingDeprecationWarning, + stacklevel=2) + + self.data = persist(d) + logger.debug(1, "Using '%s' as the persistent data cache", + self.data.filename) def addDomain(self, domain): """ - Should be called before any domain is used - Creates it if it doesn't exist. + Add a domain (pending deprecation) """ - self._execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);" % domain) + return self.data[domain] def delDomain(self, domain): """ Removes a domain and all the data it contains """ - self._execute("DROP TABLE IF EXISTS %s;" % domain) + del self.data[domain] def getKeyValues(self, domain): """ Return a list of key + value pairs for a domain """ - ret = {} - data = self._execute("SELECT key, value from %s;" % domain) - for row in data: - ret[str(row[0])] = str(row[1]) - - return ret + return self.data[domain].items() def getValue(self, domain, key): """ Return the value of a key for a domain """ - data = self._execute("SELECT * from %s where key=?;" % domain, [key]) - for row in data: - return row[1] + return self.data[domain][key] def setValue(self, domain, key, value): """ Sets the value of a key for a domain """ - data = self._execute("SELECT * from %s where key=?;" % domain, [key]) - rows = 0 - for row in data: - rows = rows + 1 - if rows: - self._execute("UPDATE %s SET value=? WHERE key=?;" % domain, [value, key]) - else: - self._execute("INSERT into %s(key, value) values (?, ?);" % domain, [key, value]) + self.data[domain][key] = value def delValue(self, domain, key): """ Deletes a key/value pair """ - self._execute("DELETE from %s where key=?;" % domain, [key]) - - # - # We wrap the sqlite execute calls as on contended machines or single threaded - # systems we can have multiple processes trying to access the DB at once and it seems - # sqlite sometimes doesn't wait for the timeout. We therefore loop but put in an - # emergency brake too - # - def _execute(self, *query): - count = 0 - while True: - try: - return self.connection.execute(*query) - except sqlite3.OperationalError as e: - if 'database is locked' in str(e) and count < 500: - count = count + 1 - continue - raise + del self.data[domain][key] + + +def persist(d): + """Convenience factory for construction of SQLData based upon metadata""" + cachedir = (bb.data.getVar("PERSISTENT_DIR", d, True) or + bb.data.getVar("CACHE", d, True)) + if not cachedir: + logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable") + sys.exit(1) + + cachefile = os.path.join(cachedir, "bb_persist_data.sqlite3") + return SQLData(cachefile) |