summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorRichard Purdie <rpurdie@linux.intel.com>2007-07-28 22:44:43 +0000
committerRichard Purdie <rpurdie@linux.intel.com>2007-07-28 22:44:43 +0000
commit1027cd62c4669e89d89684f920743c3c161846cb (patch)
tree08bbc926b93c82c9cd8aba5d1af3fe26f816e322
parentfabea4d6a6e061b8144810324b0ebb9ffb7b9e54 (diff)
downloadbitbake-1027cd62c4669e89d89684f920743c3c161846cb.tar.gz
Add persitent data store from trunk, sync the fetcher changes to use the persistent store
-rw-r--r--ChangeLog2
-rw-r--r--lib/bb/cooker.py2
-rw-r--r--lib/bb/fetch/__init__.py209
-rw-r--r--lib/bb/fetch/git.py45
-rw-r--r--lib/bb/fetch/svn.py44
-rw-r--r--lib/bb/msg.py1
-rw-r--r--lib/bb/persist_data.py92
7 files changed, 289 insertions, 106 deletions
diff --git a/ChangeLog b/ChangeLog
index f94409761..f4fc943b6 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -3,6 +3,8 @@ Changes in Bitbake 1.8.x:
as part of future SRCDATE="now" fixes
(requires new FETCHCMD_svn definition in bitbake.conf)
- Change SVNDIR layout to be more unique (fixes #2644 and #2624)
+ - Import persistent data store from trunk
+ - Sync fetcher code with that in trunk
Changes in Bitbake 1.8.6:
- Correctly redirect stdin when forking
diff --git a/lib/bb/cooker.py b/lib/bb/cooker.py
index 77f71a716..12e2f427c 100644
--- a/lib/bb/cooker.py
+++ b/lib/bb/cooker.py
@@ -336,6 +336,8 @@ class BBCooker:
if bb.data.getVarFlag(var, 'handler', data):
bb.event.register(var,bb.data.getVar(var, data))
+ bb.fetch.fetcher_init(self.configuration.data)
+
except IOError:
bb.msg.fatal(bb.msg.domain.Parsing, "Unable to open %s" % afile )
except bb.parse.ParseError, details:
diff --git a/lib/bb/fetch/__init__.py b/lib/bb/fetch/__init__.py
index 31a4adccb..9d1ecfefa 100644
--- a/lib/bb/fetch/__init__.py
+++ b/lib/bb/fetch/__init__.py
@@ -27,6 +27,12 @@ BitBake build tools.
import os, re
import bb
from bb import data
+from bb import persist_data
+
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
class FetchError(Exception):
"""Exception raised when a download fails"""
@@ -74,79 +80,150 @@ def uri_replace(uri, uri_find, uri_replace, d):
return bb.encodeurl(result_decoded)
methods = []
-urldata = {}
-
-def init(urls = [], d = None):
- if d == None:
- bb.msg.debug(2, bb.msg.domain.Fetcher, "BUG init called with None as data object!!!")
- return
-
- for m in methods:
- m.urls = []
- for u in urls:
- ud = initdata(u, d)
- if ud.method:
- ud.method.urls.append(u)
-
-def initdata(url, d):
- fn = bb.data.getVar('FILE', d, 1)
- if fn not in urldata:
- urldata[fn] = {}
- if url not in urldata[fn]:
- ud = FetchData()
- (ud.type, ud.host, ud.path, ud.user, ud.pswd, ud.parm) = bb.decodeurl(data.expand(url, d))
- ud.date = Fetch.getSRCDate(ud, d)
- for m in methods:
- if m.supports(url, ud, d):
- ud.localpath = m.localpath(url, ud, d)
- ud.md5 = ud.localpath + '.md5'
- # if user sets localpath for file, use it instead.
- if "localpath" in ud.parm:
- ud.localpath = ud.parm["localpath"]
- ud.method = m
- break
- urldata[fn][url] = ud
- return urldata[fn][url]
-
-def go(d):
- """Fetch all urls"""
+def fetcher_init(d):
+ """
+ Called to initilize the fetchers once the configuration data is known
+ Calls before this must not hit the cache.
+ """
+ pd = persist_data.PersistData(d)
+ # Clear any cached data
+ pd.delDomain("BB_URLDATA")
+ # Make sure our domain exists
+ pd.addDomain("BB_URLDATA")
+
+# Function call order is usually:
+# 1. init
+# 2. go
+# 3. localpaths
+# localpath can be called at any time
+
+def init(urls, d, cache = True):
+ urldata = {}
+
+ if cache:
+ urldata, pd, fn = getdata(d)
+
+ for url in urls:
+ if url not in urldata:
+ ud = FetchData(url, d)
+ for m in methods:
+ if m.supports(url, ud, d):
+ ud.init(m, d)
+ break
+ urldata[url] = ud
+
+ if cache:
+ pd.setValue("BB_URLDATA", fn, pickle.dumps(urldata, 0))
+
+ return urldata
+
+def getdata(d):
+ urldata = {}
fn = bb.data.getVar('FILE', d, 1)
- for m in methods:
- for u in m.urls:
- ud = urldata[fn][u]
- if ud.localfile and not m.forcefetch(u, ud, d) and os.path.exists(urldata[fn][u].md5):
- # File already present along with md5 stamp file
- # Touch md5 file to show activity
- os.utime(ud.md5, None)
- continue
- # RP - is olddir needed?
- # olddir = os.path.abspath(os.getcwd())
- m.go(u, ud , d)
- # os.chdir(olddir)
- if ud.localfile and not m.forcefetch(u, ud, d):
- Fetch.write_md5sum(u, ud, d)
-
-def localpaths(d):
- """Return a list of the local filenames, assuming successful fetch"""
+ pd = persist_data.PersistData(d)
+ encdata = pd.getValue("BB_URLDATA", fn)
+ if encdata:
+ urldata = pickle.loads(str(encdata))
+
+ return urldata, pd, fn
+
+def go(d, urldata = None):
+ """
+ Fetch all urls
+ """
+ if not urldata:
+ urldata, pd, fn = getdata(d)
+
+ for u in urldata:
+ ud = urldata[u]
+ m = ud.method
+ if ud.localfile and not m.forcefetch(u, ud, d) and os.path.exists(ud.md5):
+ # File already present along with md5 stamp file
+ # Touch md5 file to show activity
+ os.utime(ud.md5, None)
+ continue
+ # RP - is olddir needed?
+ # olddir = os.path.abspath(os.getcwd())
+ m.go(u, ud, d)
+ # os.chdir(olddir)
+ if ud.localfile and not m.forcefetch(u, ud, d):
+ Fetch.write_md5sum(u, ud, d)
+
+def localpaths(d, urldata = None):
+ """
+ Return a list of the local filenames, assuming successful fetch
+ """
local = []
- fn = bb.data.getVar('FILE', d, 1)
- for m in methods:
- for u in m.urls:
- local.append(urldata[fn][u].localpath)
+ if not urldata:
+ urldata, pd, fn = getdata(d)
+
+ for u in urldata:
+ ud = urldata[u]
+ local.append(ud.localpath)
+
return local
-def localpath(url, d):
- ud = initdata(url, d)
- if ud.method:
- return ud.localpath
+def localpath(url, d, cache = True):
+ """
+ Called from the parser with cache=False since the cache isn't ready
+ at this point. Also called from classed in OE e.g. patch.bbclass
+ """
+ ud = init([url], d, cache)
+ if ud[url].method:
+ return ud[url].localpath
return url
+def runfetchcmd(cmd, d, quiet = False):
+ """
+ Run cmd returning the command output
+ Raise an error if interrupted or cmd fails
+ Optionally echo command output to stdout
+ """
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd)
+
+ # Need to export PATH as binary could be in metadata paths
+ # rather than host provided
+ pathcmd = 'export PATH=%s; %s' % (data.expand('${PATH}', d), cmd)
+
+ stdout_handle = os.popen(pathcmd, "r")
+ output = ""
+
+ while 1:
+ line = stdout_handle.readline()
+ if not line:
+ break
+ if not quiet:
+ print line
+ output += line
+
+ status = stdout_handle.close() or 0
+ signal = status >> 8
+ exitstatus = status & 0xff
+
+ if signal:
+ raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % pathcmd, signal, output)
+ elif status != 0:
+ raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % pathcmd, status, output)
+
+ return output
+
class FetchData(object):
"""Class for fetcher variable store"""
- def __init__(self):
+ def __init__(self, url, d):
self.localfile = ""
-
+ (self.type, self.host, self.path, self.user, self.pswd, self.parm) = bb.decodeurl(data.expand(url, d))
+ self.date = Fetch.getSRCDate(self, d)
+ self.url = url
+ self.force = False
+
+ def init(self, method, d):
+ self.method = method
+ self.localpath = method.localpath(self.url, self, d)
+ self.md5 = self.localpath + '.md5'
+ # if user sets localpath for file, use it instead.
+ if "localpath" in self.parm:
+ self.localpath = self.parm["localpath"]
class Fetch(object):
"""Base class for 'fetch'ing data"""
@@ -278,11 +355,11 @@ import svk
import ssh
import perforce
-methods.append(cvs.Cvs())
-methods.append(git.Git())
methods.append(local.Local())
-methods.append(svn.Svn())
methods.append(wget.Wget())
+methods.append(svn.Svn())
+methods.append(git.Git())
+methods.append(cvs.Cvs())
methods.append(svk.Svk())
methods.append(ssh.SSH())
methods.append(perforce.Perforce())
diff --git a/lib/bb/fetch/git.py b/lib/bb/fetch/git.py
index c0cd27df0..891fe1474 100644
--- a/lib/bb/fetch/git.py
+++ b/lib/bb/fetch/git.py
@@ -25,6 +25,7 @@ import bb
from bb import data
from bb.fetch import Fetch
from bb.fetch import FetchError
+from bb.fetch import runfetchcmd
def prunedir(topdir):
# Delete everything reachable from the directory named in 'topdir'.
@@ -35,19 +36,6 @@ def prunedir(topdir):
for name in dirs:
os.rmdir(os.path.join(root, name))
-def rungitcmd(cmd,d):
-
- bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd)
-
- # Need to export PATH as git is likely to be in metadata paths
- # rather than host provided
- pathcmd = 'export PATH=%s; %s' % (data.expand('${PATH}', d), cmd)
-
- myret = os.system(pathcmd)
-
- if myret != 0:
- raise FetchError("Git: %s failed" % pathcmd)
-
class Git(Fetch):
"""Class to fetch a module or modules from git repositories"""
def supports(self, url, ud, d):
@@ -65,6 +53,7 @@ class Git(Fetch):
ud.tag = "master"
if 'tag' in ud.parm:
ud.tag = ud.parm['tag']
+ # FIXME, set tag to latest revision so local filestash works
ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.tag), d)
@@ -96,32 +85,38 @@ class Git(Fetch):
if Fetch.try_mirror(d, repofilename):
bb.mkdirhier(repodir)
os.chdir(repodir)
- rungitcmd("tar -xzf %s" % (repofile),d)
+ runfetchcmd("tar -xzf %s" % (repofile), d)
else:
- rungitcmd("git clone -n %s://%s%s %s" % (ud.proto, ud.host, ud.path, repodir),d)
+ runfetchcmd("git clone -n %s://%s%s %s" % (ud.proto, ud.host, ud.path, repodir), d)
os.chdir(repodir)
- rungitcmd("git pull %s://%s%s" % (ud.proto, ud.host, ud.path),d)
- rungitcmd("git pull --tags %s://%s%s" % (ud.proto, ud.host, ud.path),d)
- rungitcmd("git prune-packed", d)
- rungitcmd("git pack-redundant --all | xargs -r rm", d)
+ runfetchcmd("git pull %s://%s%s" % (ud.proto, ud.host, ud.path), d)
+ runfetchcmd("git pull --tags %s://%s%s" % (ud.proto, ud.host, ud.path), d)
+ runfetchcmd("git prune-packed", d)
+ runfetchcmd("git pack-redundant --all | xargs -r rm", d)
# Remove all but the .git directory
- rungitcmd("rm * -Rf", d)
+ runfetchcmd("rm * -Rf", d)
# old method of downloading tags
- #rungitcmd("rsync -a --verbose --stats --progress rsync://%s%s/ %s" % (ud.host, ud.path, os.path.join(repodir, ".git", "")),d)
+ #runfetchcmd("rsync -a --verbose --stats --progress rsync://%s%s/ %s" % (ud.host, ud.path, os.path.join(repodir, ".git", "")), d)
os.chdir(repodir)
bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository")
- rungitcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ),d)
+ runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d)
if os.path.exists(codir):
prunedir(codir)
bb.mkdirhier(codir)
os.chdir(repodir)
- rungitcmd("git read-tree %s" % (ud.tag),d)
- rungitcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")),d)
+ runfetchcmd("git read-tree %s" % (ud.tag), d)
+ runfetchcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")), d)
os.chdir(codir)
bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout")
- rungitcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ),d)
+ runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d)
+
+ def latest_revision(self, url, ud, d):
+
+ output = rungitcmd("git ls-remote %s://%s%s %s" % (ud.proto, ud.host, ud.path, ud.tag), d, True)
+ return output.split()[0]
+
diff --git a/lib/bb/fetch/svn.py b/lib/bb/fetch/svn.py
index 23ca262bc..5229838c3 100644
--- a/lib/bb/fetch/svn.py
+++ b/lib/bb/fetch/svn.py
@@ -30,6 +30,7 @@ from bb import data
from bb.fetch import Fetch
from bb.fetch import FetchError
from bb.fetch import MissingParameterError
+from bb.fetch import runfetchcmd
class Svn(Fetch):
"""Class to fetch a module or modules from svn repositories"""
@@ -48,19 +49,19 @@ class Svn(Fetch):
ud.revision = ""
if 'rev' in ud.parm:
+ ud.date = ""
ud.revision = ud.parm['rev']
-
- if ud.revision:
+ elif ud.date == "now":
ud.date = ""
+ # FIXME caching
+ ud.revision = self.latest_revision(url, ud, d)
ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.moddir, ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def forcefetch(self, url, ud, d):
- if (ud.date == "now"):
- return True
- return False
+ return ud.force
def _buildsvncommand(self, ud, d, command):
"""
@@ -80,11 +81,11 @@ class Svn(Fetch):
svnroot = ud.host + ud.path
- # either use the revision, or SRCDATE in braces, or nothing for SRCDATE = "now"
+ # either use the revision, or SRCDATE in braces,
options = []
if ud.revision:
options.append("-r %s" % ud.revision)
- elif ud.date != "now":
+ elif ud.date:
options.append("-r {%s}" % ud.date)
if ud.user:
@@ -130,7 +131,7 @@ class Svn(Fetch):
# update sources there
os.chdir(moddir)
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnupdatecmd)
- myret = os.system(svnupdatecmd)
+ runfetchcmd(svnupdatecmd, d)
else:
svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
@@ -138,18 +139,31 @@ class Svn(Fetch):
bb.mkdirhier(pkgdir)
os.chdir(pkgdir)
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnfetchcmd)
- myret = os.system(svnfetchcmd)
-
- if myret != 0:
- raise FetchError(ud.module)
+ runfetchcmd(svnfetchcmd, d)
os.chdir(pkgdir)
# tar them up to a defined filename
- myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)))
- if myret != 0:
+ try:
+ runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)), d)
+ except:
+ t, v, tb = sys.exc_info()
try:
os.unlink(ud.localpath)
except OSError:
pass
- raise FetchError(ud.module)
+ raise t, v, tb
+
+ def latest_revision(self, url, ud, d):
+ output = runfetchcmd("LANG= LC_ALL= " + self._buildsvncommand(ud, d, "info"), d, True)
+
+ revision = None
+ for line in output.splitlines():
+ if "Last Changed Rev" in line:
+ revision = line.split(":")[1].strip()
+
+ return revision
+
+ def sortable_revision(self, url, ud, d):
+
+ return self.latest_revision(url, ud, d)
diff --git a/lib/bb/msg.py b/lib/bb/msg.py
index 71b0b05b7..98cb6e6bf 100644
--- a/lib/bb/msg.py
+++ b/lib/bb/msg.py
@@ -37,6 +37,7 @@ domain = bb.utils.Enum(
'Depends',
'Fetcher',
'Parsing',
+ 'PersistData',
'Provider',
'RunQueue',
'TaskData',
diff --git a/lib/bb/persist_data.py b/lib/bb/persist_data.py
new file mode 100644
index 000000000..a0263e453
--- /dev/null
+++ b/lib/bb/persist_data.py
@@ -0,0 +1,92 @@
+# BitBake Persistent Data Store
+#
+# Copyright (C) 2007 Richard Purdie
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import bb, os
+
+try:
+ import sqlite3
+except ImportError:
+ bb.msg.fatal(bb.msg.domain.PersistData, "Importing sqlite3 failed, please install it.")
+
+class PersistData:
+ """
+ BitBake Persistent Data Store
+
+ Used to store data in a central location such that other threads/tasks can
+ access them at some future date.
+
+ The "domain" is used as a key to isolate each data pool and in this
+ implementation corresponds to an SQL table. The SQL table consists of a
+ simple key and value pair.
+
+ Why sqlite? It handles all the locking issues for us.
+ """
+ def __init__(self, d):
+ self.cachedir = bb.data.getVar("CACHE", d, True)
+ self.cachedir = "/tmp"
+ if self.cachedir in [None, '']:
+ bb.msg.fatal(bb.msg.domain.PersistData, "Please set the 'CACHE' variable.")
+ try:
+ os.stat(self.cachedir)
+ except OSError:
+ bb.mkdirhier(self.cachedir)
+
+ self.cachefile = os.path.join(self.cachedir,"bb_persist_data.sqlite3")
+ bb.msg.debug(1, bb.msg.domain.PersistData, "Using '%s' as the persistent data cache" % self.cachefile)
+
+ self.connection = sqlite3.connect(self.cachefile, isolation_level=None)
+
+ def addDomain(self, domain):
+ """
+ Should be called before any domain is used
+ Creates it if it doesn't exist.
+ """
+ self.connection.execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);" % domain)
+
+ def delDomain(self, domain):
+ """
+ Removes a domain and all the data it contains
+ """
+ self.connection.execute("DROP TABLE IF EXISTS %s;" % domain)
+
+ def getValue(self, domain, key):
+ """
+ Return the value of a key for a domain
+ """
+ data = self.connection.execute("SELECT * from %s where key=?;" % domain, [key])
+ for row in data:
+ return row[1]
+
+ def setValue(self, domain, key, value):
+ """
+ Sets the value of a key for a domain
+ """
+ data = self.connection.execute("SELECT * from %s where key=?;" % domain, [key])
+ rows = 0
+ for row in data:
+ rows = rows + 1
+ if rows:
+ self.connection.execute("UPDATE %s SET value=? WHERE key=?;" % domain, [value, key])
+ else:
+ self.connection.execute("INSERT into %s(key, value) values (?, ?);" % domain, [key, value])
+
+ def delValue(self, domain, key):
+ """
+ Deletes a key/value pair
+ """
+ self.connection.execute("DELETE from %s where key=?;" % domain, [key])
+