summaryrefslogtreecommitdiffstats
path: root/lib/bb
diff options
context:
space:
mode:
authorYu Ke <ke.yu@intel.com>2011-01-26 20:14:06 +0800
committerRichard Purdie <richard.purdie@linuxfoundation.org>2011-02-10 23:36:48 +0000
commit7b7a4517c60025c075dc8da973ceb2860d4c1e06 (patch)
treef57e68feb81d4af7afeabf25a37d85b58ca76372 /lib/bb
parent26b9af80971dfe5f0d59c950ff92aff8d59569e0 (diff)
downloadbitbake-7b7a4517c60025c075dc8da973ceb2860d4c1e06.tar.gz
fetch2: Allow multiple src rev to be used in one src_uri
* SRC_URI format: the SRC_URI are extended to allow multiple src rev: name=<name1>,<name2>,...<name-n> branch=<branch1>,<branch2>,...,<branch-n> also SRCREV can be defined with SRCREV_<name1> = xxxxx SRCREV_<name2> = xxxxx * FetchData extention to support multiple src rev, several FetchData data are added: - FetchData.names: list of name in SRC_URI, one name per srcrev. name is the index of revision and branch - FetchData.revisions: dictionary of name->revision. - FetchData.branches: dictionary of name->branch. For example, linux-yocto recipes becomes: SRC_URI = "git://git.pokylinux.org/linux-yocto-2.6.37;protocol=git;branch=${KBRANCH},meta;name=machine,meta" FetchData.names = ['machine', 'meta'] FetchData.revisions = { 'machine':xxxxx, 'meta':xxxxxx } FetchData.branches = { 'machine':${KBRANCH}, 'meta':'meta'} * generic revision handling extension the related revision handling code in fetch2.__init__.py are changed accordingly. the major change is add name parameter to indicate which src rev to handling. originally there is one src rev per FetchData, so FetchData parameter is enough. now since one FetchData has multiple src rev, it is necessary to use FetchData + name to specifiy src rev. * git extension git fetcher are also revised to take advantage of the multiple src rev in FetchData. especially the download() method are enhanced to fetch multiple src rev. * other fetcher (svn, hg, ...) does not support multiple src rev. they just sync the API to add name, and then simply ignore the name. no actually functional change (From Poky rev: 0e837e6844be449659bb96a1498e54a8b9442d13) Signed-off-by: Yu Ke <ke.yu@intel.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'lib/bb')
-rw-r--r--lib/bb/fetch2/__init__.py59
-rw-r--r--lib/bb/fetch2/bzr.py4
-rw-r--r--lib/bb/fetch2/git.py66
-rw-r--r--lib/bb/fetch2/hg.py4
-rw-r--r--lib/bb/fetch2/svn.py4
5 files changed, 73 insertions, 64 deletions
diff --git a/lib/bb/fetch2/__init__.py b/lib/bb/fetch2/__init__.py
index d62ba81b3..41848edd2 100644
--- a/lib/bb/fetch2/__init__.py
+++ b/lib/bb/fetch2/__init__.py
@@ -394,8 +394,8 @@ def get_srcrev(d):
logger.error("SRCREV was used yet no valid SCM was found in SRC_URI")
raise ParameterError
- if len(scms) == 1:
- return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d)
+ if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
+ return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d, urldata[scms[0]].names[0])
#
# Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
@@ -406,9 +406,9 @@ def get_srcrev(d):
raise ParameterError
for scm in scms:
- if 'name' in urldata[scm].parm:
- name = urldata[scm].parm["name"]
- rev = urldata[scm].method.sortable_revision(scm, urldata[scm], d)
+ ud = urldata[scm]
+ for name in ud.names:
+ rev = ud.method.sortable_revision(scm, ud, d, name)
format = format.replace(name, rev)
return format
@@ -550,16 +550,29 @@ class FetchData(object):
self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d)
self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d)
+ self.names = self.parm.get("name",'').split(',')
for m in methods:
if m.supports(url, self, d):
self.method = m
if hasattr(m,"urldata_init"):
m.urldata_init(self, d)
if m.supports_srcrev():
- self.revision = Fetch.srcrev_internal_helper(self, d);
+ self.setup_srcrevs(d)
return
raise NoMethodError("Missing implementation for url %s" % url)
+ def setup_srcrevs(self, d):
+ if not self.method.supports_srcrev():
+ return
+
+ self.revisions = {}
+ for name in self.names:
+ self.revisions[name] = Fetch.srcrev_internal_helper(self, d, name)
+
+ # add compatibility code for non name specified case
+ if len(self.names) == 1:
+ self.revision = self.revisions[self.names[0]]
+
def setup_localpath(self, d):
self.setup = True
if "localpath" in self.parm:
@@ -757,7 +770,7 @@ class Fetch(object):
return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
getSRCDate = staticmethod(getSRCDate)
- def srcrev_internal_helper(ud, d):
+ def srcrev_internal_helper(ud, d, name):
"""
Return:
a) a source revision if specified
@@ -772,25 +785,25 @@ class Fetch(object):
return ud.parm['tag']
rev = None
- if 'name' in ud.parm:
+ if name != '':
pn = data.getVar("PN", d, 1)
- rev = data.getVar("SRCREV_%s_pn-%s" % (ud.parm['name'], pn), d, 1)
+ rev = data.getVar("SRCREV_%s_pn-%s" % (name, pn), d, 1)
if not rev:
- rev = data.getVar("SRCREV_pn-%s_%s" % (pn, ud.parm['name']), d, 1)
+ rev = data.getVar("SRCREV_pn-%s_%s" % (pn, name), d, 1)
if not rev:
- rev = data.getVar("SRCREV_%s" % (ud.parm['name']), d, 1)
+ rev = data.getVar("SRCREV_%s" % name, d, 1)
if not rev:
rev = data.getVar("SRCREV", d, 1)
if rev == "INVALID":
raise InvalidSRCREV("Please set SRCREV to a valid value")
if rev == "AUTOINC":
- rev = ud.method.latest_revision(ud.url, ud, d)
+ rev = ud.method.latest_revision(ud.url, ud, d, name)
return rev
srcrev_internal_helper = staticmethod(srcrev_internal_helper)
- def localcount_internal_helper(ud, d):
+ def localcount_internal_helper(ud, d, name):
"""
Return:
a) a locked localcount if specified
@@ -798,9 +811,9 @@ class Fetch(object):
"""
localcount = None
- if 'name' in ud.parm:
+ if name != '':
pn = data.getVar("PN", d, 1)
- localcount = data.getVar("LOCALCOUNT_" + ud.parm['name'], d, 1)
+ localcount = data.getVar("LOCALCOUNT_" + name, d, 1)
if not localcount:
localcount = data.getVar("LOCALCOUNT", d, 1)
return localcount
@@ -829,7 +842,7 @@ class Fetch(object):
md5out.close()
write_md5sum = staticmethod(write_md5sum)
- def latest_revision(self, url, ud, d):
+ def latest_revision(self, url, ud, d, name):
"""
Look in the cache for the latest revision, if not present ask the SCM.
"""
@@ -838,15 +851,15 @@ class Fetch(object):
pd = persist_data.persist(d)
revs = pd['BB_URI_HEADREVS']
- key = self.generate_revision_key(url, ud, d)
+ key = self.generate_revision_key(url, ud, d, name)
rev = revs[key]
if rev != None:
return str(rev)
- revs[key] = rev = self._latest_revision(url, ud, d)
+ revs[key] = rev = self._latest_revision(url, ud, d, name)
return rev
- def sortable_revision(self, url, ud, d):
+ def sortable_revision(self, url, ud, d, name):
"""
"""
@@ -855,9 +868,9 @@ class Fetch(object):
pd = persist_data.persist(d)
localcounts = pd['BB_URI_LOCALCOUNT']
- key = self.generate_revision_key(url, ud, d)
+ key = self.generate_revision_key(url, ud, d, name)
- latest_rev = self._build_revision(url, ud, d)
+ latest_rev = self._build_revision(url, ud, d, name)
last_rev = localcounts[key + '_rev']
uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False
count = None
@@ -885,8 +898,8 @@ class Fetch(object):
return str(count + "+" + latest_rev)
- def generate_revision_key(self, url, ud, d):
- key = self._revision_key(url, ud, d)
+ def generate_revision_key(self, url, ud, d, name):
+ key = self._revision_key(url, ud, d, name)
return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "")
from . import cvs
diff --git a/lib/bb/fetch2/bzr.py b/lib/bb/fetch2/bzr.py
index 6e1970b71..80c02f835 100644
--- a/lib/bb/fetch2/bzr.py
+++ b/lib/bb/fetch2/bzr.py
@@ -120,13 +120,13 @@ class Bzr(Fetch):
def supports_srcrev(self):
return True
- def _revision_key(self, url, ud, d):
+ def _revision_key(self, url, ud, d, name):
"""
Return a unique key for the url
"""
return "bzr:" + ud.pkgdir
- def _latest_revision(self, url, ud, d):
+ def _latest_revision(self, url, ud, d, name):
"""
Return the latest upstream revision number
"""
diff --git a/lib/bb/fetch2/git.py b/lib/bb/fetch2/git.py
index 07af02f06..c54d826a0 100644
--- a/lib/bb/fetch2/git.py
+++ b/lib/bb/fetch2/git.py
@@ -57,7 +57,13 @@ class Git(Fetch):
if 'nocheckout' in ud.parm:
ud.nocheckout = True
- ud.branch = ud.parm.get("branch", "master")
+ branches = ud.parm.get("branch", "master").split(',')
+ if len(branches) != len(ud.names):
+ raise bb.fetch2.ParameterError("SRC_URI (%) name and branch number mismatch" % ud.url)
+ ud.branches = {}
+ for name in ud.names:
+ branch = branches[ud.names.index(name)]
+ ud.branches[name] = branch
gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.'))
ud.mirrortarball = 'git_%s.tar.gz' % (gitsrcname)
@@ -66,25 +72,18 @@ class Git(Fetch):
ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
def localpath(self, url, ud, d):
- ud.tag = ud.revision
- if not ud.tag or ud.tag == "master":
- ud.tag = self.latest_revision(url, ud, d)
+ for name in ud.names:
+ if not ud.revisions[name] or ud.revisions[name] == "master":
+ ud.revisions[name] = self.latest_revision(url, ud, d, name)
ud.localfile = ud.mirrortarball
- if 'noclone' in ud.parm:
- ud.localfile = None
- return None
-
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def forcefetch(self, url, ud, d):
- if 'fullclone' in ud.parm:
- return True
- if 'noclone' in ud.parm:
- return False
- if not self._contains_ref(ud.tag, d):
- return True
+ for name in ud.names:
+ if not self._contains_ref(ud.revisions[name], d):
+ return True
return False
def try_premirror(self, u, ud, d):
@@ -122,18 +121,15 @@ class Git(Fetch):
os.chdir(ud.clonedir)
# Update the checkout if needed
- if not self._contains_ref(ud.tag, d) or 'fullclone' in ud.parm:
- # Remove all but the .git directory
- bb.fetch2.check_network_access(d, "git fetch %s%s" %(ud.host, ud.path))
- runfetchcmd("rm * -Rf", d)
- if 'fullclone' in ud.parm:
- runfetchcmd("%s fetch --all" % (ud.basecmd), d)
- else:
- runfetchcmd("%s fetch %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.branch), d)
- runfetchcmd("%s fetch --tags %s://%s%s%s" % (ud.basecmd, ud.proto, username, ud.host, ud.path), d)
- runfetchcmd("%s prune-packed" % ud.basecmd, d)
- runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
- ud.repochanged = True
+ for name in ud.names:
+ if not self._contains_ref(ud.revisions[name], d):
+ # Remove all but the .git directory
+ bb.fetch2.check_network_access(d, "git fetch %s%s" %(ud.host, ud.path))
+ runfetchcmd("%s fetch %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.branches[name]), d)
+ runfetchcmd("%s fetch --tags %s://%s%s%s" % (ud.basecmd, ud.proto, username, ud.host, ud.path), d)
+ runfetchcmd("%s prune-packed" % ud.basecmd, d)
+ runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
+ ud.repochanged = True
def build_mirror_data(self, url, ud, d):
# Generate a mirror tarball if needed
@@ -141,7 +137,7 @@ class Git(Fetch):
os.chdir(ud.clonedir)
mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True)
- if (mirror_tarballs != "0" or 'fullclone' in ud.parm) and ud.repochanged:
+ if mirror_tarballs != "0" and ud.repochanged:
logger.info("Creating tarball of git repository")
runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d)
@@ -165,7 +161,7 @@ class Git(Fetch):
runfetchcmd("cp -af %s/.git/packed-refs %s/.git/" %(ud.clonedir, destdir), d)
if not ud.nocheckout:
os.chdir(destdir)
- runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.tag, readpathspec), d)
+ runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d)
runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d)
return True
@@ -177,13 +173,13 @@ class Git(Fetch):
output = runfetchcmd("%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (basecmd, tag), d, quiet=True)
return output.split()[0] != "0"
- def _revision_key(self, url, ud, d):
+ def _revision_key(self, url, ud, d, name):
"""
Return a unique key for the url
"""
- return "git:" + ud.host + ud.path.replace('/', '.') + ud.branch
+ return "git:" + ud.host + ud.path.replace('/', '.') + ud.branches[name]
- def _latest_revision(self, url, ud, d):
+ def _latest_revision(self, url, ud, d, name):
"""
Compute the HEAD revision for the url
"""
@@ -192,16 +188,16 @@ class Git(Fetch):
else:
username = ""
- bb.fetch2.check_network_access(d, "git ls-remote %s%s %s" % (ud.host, ud.path, ud.branch))
+ bb.fetch2.check_network_access(d, "git ls-remote %s%s %s" % (ud.host, ud.path, ud.branches[name]))
basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
- cmd = "%s ls-remote %s://%s%s%s %s" % (basecmd, ud.proto, username, ud.host, ud.path, ud.branch)
+ cmd = "%s ls-remote %s://%s%s%s %s" % (basecmd, ud.proto, username, ud.host, ud.path, ud.branches[name])
output = runfetchcmd(cmd, d, True)
if not output:
raise bb.fetch2.FetchError("Fetch command %s gave empty output\n" % (cmd))
return output.split()[0]
- def _build_revision(self, url, ud, d):
- return ud.tag
+ def _build_revision(self, url, ud, d, name):
+ return ud.revisions[name]
def _sortable_buildindex_disabled(self, url, ud, d, rev):
"""
diff --git a/lib/bb/fetch2/hg.py b/lib/bb/fetch2/hg.py
index 4ba28c710..13e9b8673 100644
--- a/lib/bb/fetch2/hg.py
+++ b/lib/bb/fetch2/hg.py
@@ -163,7 +163,7 @@ class Hg(Fetch):
def supports_srcrev(self):
return True
- def _latest_revision(self, url, ud, d):
+ def _latest_revision(self, url, ud, d, name):
"""
Compute tip revision for the url
"""
@@ -174,7 +174,7 @@ class Hg(Fetch):
def _build_revision(self, url, ud, d):
return ud.revision
- def _revision_key(self, url, ud, d):
+ def _revision_key(self, url, ud, d, name):
"""
Return a unique key for the url
"""
diff --git a/lib/bb/fetch2/svn.py b/lib/bb/fetch2/svn.py
index 8d768026b..96d5b1683 100644
--- a/lib/bb/fetch2/svn.py
+++ b/lib/bb/fetch2/svn.py
@@ -172,13 +172,13 @@ class Svn(Fetch):
def supports_srcrev(self):
return True
- def _revision_key(self, url, ud, d):
+ def _revision_key(self, url, ud, d, name):
"""
Return a unique key for the url
"""
return "svn:" + ud.moddir
- def _latest_revision(self, url, ud, d):
+ def _latest_revision(self, url, ud, d, name):
"""
Return the latest upstream revision number
"""