diff options
author | Richard Purdie <rpurdie@linux.intel.com> | 2006-11-12 00:12:32 +0000 |
---|---|---|
committer | Richard Purdie <rpurdie@linux.intel.com> | 2006-11-12 00:12:32 +0000 |
commit | e8c28acecb269b3f115f5a548f5f3897f38d9296 (patch) | |
tree | b0a1369d22336b29ccb19c8faa4f8a545956da2e /lib/bb/fetch | |
parent | e9cd560307a02c33452f13f4cd3ab0280af28b69 (diff) | |
download | bitbake-e8c28acecb269b3f115f5a548f5f3897f38d9296.tar.gz |
fetchers: Refactor interating through urls into the fetcher core (preparing for common md5/tarball stash code)
Diffstat (limited to 'lib/bb/fetch')
-rw-r--r-- | lib/bb/fetch/__init__.py | 6 | ||||
-rw-r--r-- | lib/bb/fetch/cvs.py | 219 | ||||
-rw-r--r-- | lib/bb/fetch/git.py | 117 | ||||
-rw-r--r-- | lib/bb/fetch/local.py | 2 | ||||
-rw-r--r-- | lib/bb/fetch/ssh.py | 88 | ||||
-rw-r--r-- | lib/bb/fetch/svk.py | 147 | ||||
-rw-r--r-- | lib/bb/fetch/svn.py | 197 | ||||
-rw-r--r-- | lib/bb/fetch/wget.py | 90 |
8 files changed, 421 insertions, 445 deletions
diff --git a/lib/bb/fetch/__init__.py b/lib/bb/fetch/__init__.py index 946011640..00747d7dc 100644 --- a/lib/bb/fetch/__init__.py +++ b/lib/bb/fetch/__init__.py @@ -93,8 +93,8 @@ def init(urls = [], d = None): def go(d): """Fetch all urls""" for m in methods: - if m.urls: - m.go(d) + for u in m.urls: + m.go(d, u) def localpaths(d): """Return a list of the local filenames, assuming successful fetch""" @@ -148,7 +148,7 @@ class Fetch(object): data = property(getData, setData, None, "Data property") - def go(self, urls = []): + def go(self, d, url): """Fetch urls""" raise NoMethodError("Missing implementation for url") diff --git a/lib/bb/fetch/cvs.py b/lib/bb/fetch/cvs.py index 45d40c163..4a467bef1 100644 --- a/lib/bb/fetch/cvs.py +++ b/lib/bb/fetch/cvs.py @@ -67,132 +67,127 @@ class Cvs(Fetch): return os.path.join(data.getVar("DL_DIR", d, 1),data.expand('%s_%s_%s_%s.tar.gz' % ( module.replace('/', '.'), host, tag, date), d)) localpath = staticmethod(localpath) - def go(self, d, urls = []): - """Fetch urls""" - if not urls: - urls = self.urls + def go(self, d, loc): localdata = data.createCopy(d) data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) - for loc in urls: - (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) - if not "module" in parm: - raise MissingParameterError("cvs method needs a 'module' parameter") - else: - module = parm["module"] - - dlfile = self.localpath(loc, localdata) - dldir = data.getVar('DL_DIR', localdata, 1) -# if local path contains the cvs -# module, consider the dir above it to be the -# download directory -# pos = dlfile.find(module) -# if pos: -# dldir = dlfile[:pos] -# else: -# dldir = os.path.dirname(dlfile) - -# setup cvs options - options = [] - if 'tag' in parm: - tag = parm['tag'] - else: - tag = "" + (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) + if not "module" in parm: + raise MissingParameterError("cvs method needs a 'module' parameter") + else: + module = parm["module"] - if 'date' in parm: - date = parm['date'] - else: - if not tag: - date = Fetch.getSRCDate(d) - else: - date = "" + dlfile = self.localpath(loc, localdata) + dldir = data.getVar('DL_DIR', localdata, 1) +# if local path contains the cvs +# module, consider the dir above it to be the +# download directory +# pos = dlfile.find(module) +# if pos: +# dldir = dlfile[:pos] +# else: +# dldir = os.path.dirname(dlfile) + +# setup cvs options + options = [] + if 'tag' in parm: + tag = parm['tag'] + else: + tag = "" - if "method" in parm: - method = parm["method"] + if 'date' in parm: + date = parm['date'] + else: + if not tag: + date = Fetch.getSRCDate(d) else: - method = "pserver" + date = "" - if "localdir" in parm: - localdir = parm["localdir"] - else: - localdir = module + if "method" in parm: + method = parm["method"] + else: + method = "pserver" - cvs_rsh = None - if method == "ext": - if "rsh" in parm: - cvs_rsh = parm["rsh"] + if "localdir" in parm: + localdir = parm["localdir"] + else: + localdir = module - tarfn = data.expand('%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, tag, date), localdata) - data.setVar('TARFILES', dlfile, localdata) - data.setVar('TARFN', tarfn, localdata) + cvs_rsh = None + if method == "ext": + if "rsh" in parm: + cvs_rsh = parm["rsh"] - # try to use the tarball stash - if Fetch.check_for_tarball(d, tarfn, dldir, date): - bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping cvs checkout." % tarfn) - continue + tarfn = data.expand('%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, tag, date), localdata) + data.setVar('TARFILES', dlfile, localdata) + data.setVar('TARFN', tarfn, localdata) - if date: - options.append("-D %s" % date) - if tag: - options.append("-r %s" % tag) + # try to use the tarball stash + if Fetch.check_for_tarball(d, tarfn, dldir, date): + bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping cvs checkout." % tarfn) + return - olddir = os.path.abspath(os.getcwd()) - os.chdir(data.expand(dldir, localdata)) + if date: + options.append("-D %s" % date) + if tag: + options.append("-r %s" % tag) -# setup cvsroot - if method == "dir": - cvsroot = path - else: - cvsroot = ":" + method + ":" + user - if pswd: - cvsroot += ":" + pswd - cvsroot += "@" + host + ":" + path - - data.setVar('CVSROOT', cvsroot, localdata) - data.setVar('CVSCOOPTS', " ".join(options), localdata) - data.setVar('CVSMODULE', module, localdata) - cvscmd = data.getVar('FETCHCOMMAND', localdata, 1) - cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1) - - if cvs_rsh: - cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) - cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) - -# create module directory - bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory") - pkg=data.expand('${PN}', d) - pkgdir=os.path.join(data.expand('${CVSDIR}', localdata), pkg) - moddir=os.path.join(pkgdir,localdir) - if os.access(os.path.join(moddir,'CVS'), os.R_OK): - bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) -# update sources there - os.chdir(moddir) - myret = os.system(cvsupdatecmd) - else: - bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) -# check out sources there - bb.mkdirhier(pkgdir) - os.chdir(pkgdir) - bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cvscmd) - myret = os.system(cvscmd) - - if myret != 0 or not os.access(moddir, os.R_OK): - try: - os.rmdir(moddir) - except OSError: - pass - raise FetchError(module) + olddir = os.path.abspath(os.getcwd()) + os.chdir(data.expand(dldir, localdata)) +# setup cvsroot + if method == "dir": + cvsroot = path + else: + cvsroot = ":" + method + ":" + user + if pswd: + cvsroot += ":" + pswd + cvsroot += "@" + host + ":" + path + + data.setVar('CVSROOT', cvsroot, localdata) + data.setVar('CVSCOOPTS', " ".join(options), localdata) + data.setVar('CVSMODULE', module, localdata) + cvscmd = data.getVar('FETCHCOMMAND', localdata, 1) + cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1) + + if cvs_rsh: + cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) + cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) + +# create module directory + bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory") + pkg=data.expand('${PN}', d) + pkgdir=os.path.join(data.expand('${CVSDIR}', localdata), pkg) + moddir=os.path.join(pkgdir,localdir) + if os.access(os.path.join(moddir,'CVS'), os.R_OK): + bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) +# update sources there os.chdir(moddir) - os.chdir('..') -# tar them up to a defined filename - myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(moddir))) - if myret != 0: - try: - os.unlink(tarfn) - except OSError: - pass - os.chdir(olddir) - del localdata + myret = os.system(cvsupdatecmd) + else: + bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) +# check out sources there + bb.mkdirhier(pkgdir) + os.chdir(pkgdir) + bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cvscmd) + myret = os.system(cvscmd) + + if myret != 0 or not os.access(moddir, os.R_OK): + try: + os.rmdir(moddir) + except OSError: + pass + raise FetchError(module) + + os.chdir(moddir) + os.chdir('..') +# tar them up to a defined filename + myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(moddir))) + if myret != 0: + try: + os.unlink(tarfn) + except OSError: + pass + os.chdir(olddir) diff --git a/lib/bb/fetch/git.py b/lib/bb/fetch/git.py index fe88ee058..b511b450e 100644 --- a/lib/bb/fetch/git.py +++ b/lib/bb/fetch/git.py @@ -96,63 +96,60 @@ class Git(Fetch): localpath = staticmethod(localpath) - def go(self, d, urls = []): - """Fetch urls""" - if not urls: - urls = self.urls - - for loc in urls: - (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, d)) - - tag = gettag(parm) - proto = getprotocol(parm) - - gitsrcname = '%s%s' % (host, path.replace('/', '.')) - - repofilename = 'git_%s.tar.gz' % (gitsrcname) - repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename) - repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) - - coname = '%s' % (tag) - codir = os.path.join(repodir, coname) - - cofile = self.localpath(loc, d) - - # tag=="master" must always update - if (tag != "master") and Fetch.try_mirror(d, localfile(loc, d)): - bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % cofile) - continue - - if not os.path.exists(repodir): - if Fetch.try_mirror(d, repofilename): - bb.mkdirhier(repodir) - os.chdir(repodir) - rungitcmd("tar -xzf %s" % (repofile),d) - else: - rungitcmd("git clone -n %s://%s%s %s" % (proto, host, path, repodir),d) - - os.chdir(repodir) - rungitcmd("git pull %s://%s%s" % (proto, host, path),d) - rungitcmd("git pull --tags %s://%s%s" % (proto, host, path),d) - rungitcmd("git prune-packed", d) - rungitcmd("git pack-redundant --all | xargs -r rm", d) - # Remove all but the .git directory - rungitcmd("rm * -Rf", d) - # old method of downloading tags - #rungitcmd("rsync -a --verbose --stats --progress rsync://%s%s/ %s" % (host, path, os.path.join(repodir, ".git", "")),d) - - os.chdir(repodir) - bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository") - rungitcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ),d) - - if os.path.exists(codir): - prunedir(codir) - - bb.mkdirhier(codir) - os.chdir(repodir) - rungitcmd("git read-tree %s" % (tag),d) - rungitcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")),d) - - os.chdir(codir) - bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout") - rungitcmd("tar -czf %s %s" % (cofile, os.path.join(".", "*") ),d) + def go(self, d, loc): + """Fetch url""" + + (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, d)) + + tag = gettag(parm) + proto = getprotocol(parm) + + gitsrcname = '%s%s' % (host, path.replace('/', '.')) + + repofilename = 'git_%s.tar.gz' % (gitsrcname) + repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename) + repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) + + coname = '%s' % (tag) + codir = os.path.join(repodir, coname) + + cofile = self.localpath(loc, d) + + # tag=="master" must always update + if (tag != "master") and Fetch.try_mirror(d, localfile(loc, d)): + bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % cofile) + return + + if not os.path.exists(repodir): + if Fetch.try_mirror(d, repofilename): + bb.mkdirhier(repodir) + os.chdir(repodir) + rungitcmd("tar -xzf %s" % (repofile),d) + else: + rungitcmd("git clone -n %s://%s%s %s" % (proto, host, path, repodir),d) + + os.chdir(repodir) + rungitcmd("git pull %s://%s%s" % (proto, host, path),d) + rungitcmd("git pull --tags %s://%s%s" % (proto, host, path),d) + rungitcmd("git prune-packed", d) + rungitcmd("git pack-redundant --all | xargs -r rm", d) + # Remove all but the .git directory + rungitcmd("rm * -Rf", d) + # old method of downloading tags + #rungitcmd("rsync -a --verbose --stats --progress rsync://%s%s/ %s" % (host, path, os.path.join(repodir, ".git", "")),d) + + os.chdir(repodir) + bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository") + rungitcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ),d) + + if os.path.exists(codir): + prunedir(codir) + + bb.mkdirhier(codir) + os.chdir(repodir) + rungitcmd("git read-tree %s" % (tag),d) + rungitcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")),d) + + os.chdir(codir) + bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout") + rungitcmd("tar -czf %s %s" % (cofile, os.path.join(".", "*") ),d) diff --git a/lib/bb/fetch/local.py b/lib/bb/fetch/local.py index 51938f823..68a63fac7 100644 --- a/lib/bb/fetch/local.py +++ b/lib/bb/fetch/local.py @@ -55,7 +55,7 @@ class Local(Fetch): return newpath localpath = staticmethod(localpath) - def go(self, urls = []): + def go(self, d, url): """Fetch urls (no-op for Local method)""" # no need to fetch local files, we'll deal with them in place. return 1 diff --git a/lib/bb/fetch/ssh.py b/lib/bb/fetch/ssh.py index 57874d5ba..4197229da 100644 --- a/lib/bb/fetch/ssh.py +++ b/lib/bb/fetch/ssh.py @@ -74,49 +74,45 @@ class SSH(Fetch): lpath = os.path.join(data.getVar('DL_DIR', d, 1), host, os.path.basename(path)) return lpath - def go(self, d, urls = []): - if not urls: - urls = self.urls - - for url in urls: - dldir = data.getVar('DL_DIR', d, 1) - - m = __pattern__.match(url) - path = m.group('path') - host = m.group('host') - port = m.group('port') - user = m.group('user') - password = m.group('pass') - - ldir = os.path.join(dldir, host) - lpath = os.path.join(ldir, os.path.basename(path)) - - if not os.path.exists(ldir): - os.makedirs(ldir) - - if port: - port = '-P %s' % port - else: - port = '' - - if user: - fr = user - if password: - fr += ':%s' % password - fr += '@%s' % host - else: - fr = host - fr += ':%s' % path - - - import commands - cmd = 'scp -B -r %s %s %s/' % ( - port, - commands.mkarg(fr), - commands.mkarg(ldir) - ) - - (exitstatus, output) = commands.getstatusoutput(cmd) - if exitstatus != 0: - print output - raise FetchError('Unable to fetch %s' % url) + def go(self, d, url): + dldir = data.getVar('DL_DIR', d, 1) + + m = __pattern__.match(url) + path = m.group('path') + host = m.group('host') + port = m.group('port') + user = m.group('user') + password = m.group('pass') + + ldir = os.path.join(dldir, host) + lpath = os.path.join(ldir, os.path.basename(path)) + + if not os.path.exists(ldir): + os.makedirs(ldir) + + if port: + port = '-P %s' % port + else: + port = '' + + if user: + fr = user + if password: + fr += ':%s' % password + fr += '@%s' % host + else: + fr = host + fr += ':%s' % path + + + import commands + cmd = 'scp -B -r %s %s %s/' % ( + port, + commands.mkarg(fr), + commands.mkarg(ldir) + ) + + (exitstatus, output) = commands.getstatusoutput(cmd) + if exitstatus != 0: + print output + raise FetchError('Unable to fetch %s' % url) diff --git a/lib/bb/fetch/svk.py b/lib/bb/fetch/svk.py index 3c9ccba10..46366c85a 100644 --- a/lib/bb/fetch/svk.py +++ b/lib/bb/fetch/svk.py @@ -70,84 +70,81 @@ class Svk(Fetch): return os.path.join(data.getVar("DL_DIR", d, 1),data.expand('%s_%s_%s_%s_%s.tar.gz' % ( module.replace('/', '.'), host, path.replace('/', '.'), revision, date), d)) localpath = staticmethod(localpath) - def go(self, d, urls = []): + def go(self, d, loc): """Fetch urls""" - if not urls: - urls = self.urls localdata = data.createCopy(d) data.setVar('OVERRIDES', "svk:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) - for loc in urls: - (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) - if not "module" in parm: - raise MissingParameterError("svk method needs a 'module' parameter") - else: - module = parm["module"] - - dlfile = self.localpath(loc, localdata) - dldir = data.getVar('DL_DIR', localdata, 1) - -# setup svk options - options = [] - if 'rev' in parm: - revision = parm['rev'] - else: - revision = "" - - date = Fetch.getSRCDate(d) - tarfn = data.expand('%s_%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, path.replace('/', '.'), revision, date), localdata) - data.setVar('TARFILES', dlfile, localdata) - data.setVar('TARFN', tarfn, localdata) - - if Fetch.check_for_tarball(d, tarfn, dldir, date): - continue - - olddir = os.path.abspath(os.getcwd()) - os.chdir(data.expand(dldir, localdata)) - - svkroot = host + path - - data.setVar('SVKROOT', svkroot, localdata) - data.setVar('SVKCOOPTS', " ".join(options), localdata) - data.setVar('SVKMODULE', module, localdata) - svkcmd = "svk co -r {%s} %s/%s" % (date, svkroot, module) - - if revision: - svkcmd = "svk co -r %s/%s" % (revision, svkroot, module) - -# create temp directory - bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory") - bb.mkdirhier(data.expand('${WORKDIR}', localdata)) - data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) - tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") - tmpfile = tmppipe.readline().strip() - if not tmpfile: - bb.msg.error(bb.msg.domain.Fetcher, "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") - raise FetchError(module) - -# check out sources there - os.chdir(tmpfile) - bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) - bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svkcmd) - myret = os.system(svkcmd) - if myret != 0: - try: - os.rmdir(tmpfile) - except OSError: - pass - raise FetchError(module) - - os.chdir(os.path.join(tmpfile, os.path.dirname(module))) -# tar them up to a defined filename - myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(module))) - if myret != 0: - try: - os.unlink(tarfn) - except OSError: - pass -# cleanup - os.system('rm -rf %s' % tmpfile) - os.chdir(olddir) - del localdata + (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) + if not "module" in parm: + raise MissingParameterError("svk method needs a 'module' parameter") + else: + module = parm["module"] + + dlfile = self.localpath(loc, localdata) + dldir = data.getVar('DL_DIR', localdata, 1) + +# setup svk options + options = [] + if 'rev' in parm: + revision = parm['rev'] + else: + revision = "" + + date = Fetch.getSRCDate(d) + tarfn = data.expand('%s_%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, path.replace('/', '.'), revision, date), localdata) + data.setVar('TARFILES', dlfile, localdata) + data.setVar('TARFN', tarfn, localdata) + + if Fetch.check_for_tarball(d, tarfn, dldir, date): + return + + olddir = os.path.abspath(os.getcwd()) + os.chdir(data.expand(dldir, localdata)) + + svkroot = host + path + + data.setVar('SVKROOT', svkroot, localdata) + data.setVar('SVKCOOPTS', " ".join(options), localdata) + data.setVar('SVKMODULE', module, localdata) + svkcmd = "svk co -r {%s} %s/%s" % (date, svkroot, module) + + if revision: + svkcmd = "svk co -r %s/%s" % (revision, svkroot, module) + +# create temp directory + bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory") + bb.mkdirhier(data.expand('${WORKDIR}', localdata)) + data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) + tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") + tmpfile = tmppipe.readline().strip() + if not tmpfile: + bb.msg.error(bb.msg.domain.Fetcher, "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") + raise FetchError(module) + +# check out sources there + os.chdir(tmpfile) + bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) + bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svkcmd) + myret = os.system(svkcmd) + if myret != 0: + try: + os.rmdir(tmpfile) + except OSError: + pass + raise FetchError(module) + + os.chdir(os.path.join(tmpfile, os.path.dirname(module))) +# tar them up to a defined filename + myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(module))) + if myret != 0: + try: + os.unlink(tarfn) + except OSError: + pass +# cleanup + os.system('rm -rf %s' % tmpfile) + os.chdir(olddir) + diff --git a/lib/bb/fetch/svn.py b/lib/bb/fetch/svn.py index ad36b017f..79d7d3d29 100644 --- a/lib/bb/fetch/svn.py +++ b/lib/bb/fetch/svn.py @@ -63,111 +63,106 @@ class Svn(Fetch): return os.path.join(data.getVar("DL_DIR", d, 1),data.expand('%s_%s_%s_%s_%s.tar.gz' % ( module.replace('/', '.'), host, path.replace('/', '.'), revision, date), d)) localpath = staticmethod(localpath) - def go(self, d, urls = []): - """Fetch urls""" - if not urls: - urls = self.urls + def go(self, d, loc): + """Fetch url""" localdata = data.createCopy(d) data.setVar('OVERRIDES', "svn:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) - for loc in urls: - (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) - if not "module" in parm: - raise MissingParameterError("svn method needs a 'module' parameter") - else: - module = parm["module"] - - dlfile = self.localpath(loc, localdata) - dldir = data.getVar('DL_DIR', localdata, 1) -# if local path contains the svn -# module, consider the dir above it to be the -# download directory -# pos = dlfile.find(module) -# if pos: -# dldir = dlfile[:pos] -# else: -# dldir = os.path.dirname(dlfile) - -# setup svn options - options = [] - if 'rev' in parm: - revision = parm['rev'] - else: - revision = "" - - date = Fetch.getSRCDate(d) - - if "proto" in parm: - proto = parm["proto"] - else: - proto = "svn" - - svn_rsh = None - if proto == "svn+ssh" and "rsh" in parm: - svn_rsh = parm["rsh"] - - tarfn = data.expand('%s_%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, path.replace('/', '.'), revision, date), localdata) - data.setVar('TARFILES', dlfile, localdata) - data.setVar('TARFN', tarfn, localdata) - - # try to use the tarball stash - if Fetch.check_for_tarball(d, tarfn, dldir, date): - bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping svn checkout." % tarfn) - continue - - olddir = os.path.abspath(os.getcwd()) - os.chdir(data.expand(dldir, localdata)) - - svnroot = host + path - - # either use the revision, or SRCDATE in braces, or nothing for SRCDATE = "now" - if revision: - options.append("-r %s" % revision) - elif date != "now": - options.append("-r {%s}" % date) - - data.setVar('SVNROOT', "%s://%s/%s" % (proto, svnroot, module), localdata) - data.setVar('SVNCOOPTS', " ".join(options), localdata) - data.setVar('SVNMODULE', module, localdata) - svncmd = data.getVar('FETCHCOMMAND', localdata, 1) - svnupcmd = data.getVar('UPDATECOMMAND', localdata, 1) - - if svn_rsh: - svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd) - svnupcmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svnupcmd) - - pkg=data.expand('${PN}', d) - pkgdir=os.path.join(data.expand('${SVNDIR}', localdata), pkg) - moddir=os.path.join(pkgdir, module) - bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + moddir + "'") - - if os.access(os.path.join(moddir,'.svn'), os.R_OK): - bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) - # update sources there - os.chdir(moddir) - bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnupcmd) - myret = os.system(svnupcmd) - else: - bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) - # check out sources there - bb.mkdirhier(pkgdir) - os.chdir(pkgdir) - bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svncmd) - myret = os.system(svncmd) - - if myret != 0: - raise FetchError(module) + (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) + if not "module" in parm: + raise MissingParameterError("svn method needs a 'module' parameter") + else: + module = parm["module"] + dlfile = self.localpath(loc, localdata) + dldir = data.getVar('DL_DIR', localdata, 1) +# if local path contains the svn +# module, consider the dir above it to be the +# download directory +# pos = dlfile.find(module) +# if pos: +# dldir = dlfile[:pos] +# else: +# dldir = os.path.dirname(dlfile) + +# setup svn options + options = [] + if 'rev' in parm: + revision = parm['rev'] + else: + revision = "" + + date = Fetch.getSRCDate(d) + + if "proto" in parm: + proto = parm["proto"] + else: + proto = "svn" + + svn_rsh = None + if proto == "svn+ssh" and "rsh" in parm: + svn_rsh = parm["rsh"] + + tarfn = data.expand('%s_%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, path.replace('/', '.'), revision, date), localdata) + data.setVar('TARFILES', dlfile, localdata) + data.setVar('TARFN', tarfn, localdata) + + # try to use the tarball stash + if Fetch.check_for_tarball(d, tarfn, dldir, date): + bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping svn checkout." % tarfn) + return + + olddir = os.path.abspath(os.getcwd()) + os.chdir(data.expand(dldir, localdata)) + + svnroot = host + path + + # either use the revision, or SRCDATE in braces, or nothing for SRCDATE = "now" + if revision: + options.append("-r %s" % revision) + elif date != "now": + options.append("-r {%s}" % date) + + data.setVar('SVNROOT', "%s://%s/%s" % (proto, svnroot, module), localdata) + data.setVar('SVNCOOPTS', " ".join(options), localdata) + data.setVar('SVNMODULE', module, localdata) + svncmd = data.getVar('FETCHCOMMAND', localdata, 1) + svnupcmd = data.getVar('UPDATECOMMAND', localdata, 1) + + if svn_rsh: + svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd) + svnupcmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svnupcmd) + + pkg=data.expand('${PN}', d) + pkgdir=os.path.join(data.expand('${SVNDIR}', localdata), pkg) + moddir=os.path.join(pkgdir, module) + bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + moddir + "'") + + if os.access(os.path.join(moddir,'.svn'), os.R_OK): + bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) + # update sources there + os.chdir(moddir) + bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnupcmd) + myret = os.system(svnupcmd) + else: + bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) + # check out sources there + bb.mkdirhier(pkgdir) os.chdir(pkgdir) - # tar them up to a defined filename - myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(module))) - if myret != 0: - try: - os.unlink(tarfn) - except OSError: - pass - os.chdir(olddir) - - del localdata + bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svncmd) + myret = os.system(svncmd) + + if myret != 0: + raise FetchError(module) + + os.chdir(pkgdir) + # tar them up to a defined filename + myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(module))) + if myret != 0: + try: + os.unlink(tarfn) + except OSError: + pass + os.chdir(olddir) diff --git a/lib/bb/fetch/wget.py b/lib/bb/fetch/wget.py index c0097408f..1defb8e93 100644 --- a/lib/bb/fetch/wget.py +++ b/lib/bb/fetch/wget.py @@ -44,17 +44,17 @@ class Wget(Fetch): supports = staticmethod(supports) def localpath(url, d): -# strip off parameters + # strip off parameters (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) if "localpath" in parm: -# if user overrides local path, use it. + # if user overrides local path, use it. return parm["localpath"] url = bb.encodeurl([type, host, path, user, pswd, {}]) return os.path.join(data.getVar("DL_DIR", d), os.path.basename(url)) localpath = staticmethod(localpath) - def go(self, d, urls = []): + def go(self, d, uri): """Fetch urls""" def md5_sum(parm, d): @@ -81,7 +81,7 @@ class Wget(Fetch): # the MD5 sum we want to verify wanted_md5sum = md5_sum(parm, d) if os.path.exists(dl): -# file exists, but we didnt complete it.. trying again.. + # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, 1) else: fetchcmd = data.getVar("FETCHCOMMAND", d, 1) @@ -101,7 +101,7 @@ class Wget(Fetch): bb.msg.debug(2, bb.msg.domain.Fetcher, "sourceforge.net send us to the mirror on %s" % basename) return False -# supposedly complete.. write out md5sum + # supposedly complete.. write out md5sum if bb.which(data.getVar('PATH', d), 'md5sum'): try: md5pipe = os.popen('md5sum ' + dl) @@ -119,49 +119,45 @@ class Wget(Fetch): md5out.close() return True - if not urls: - urls = self.urls - localdata = data.createCopy(d) data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) - for uri in urls: - completed = 0 - (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(uri, localdata)) - basename = os.path.basename(path) - dl = self.localpath(uri, d) - dl = data.expand(dl, localdata) - md5 = dl + '.md5' - - if os.path.exists(md5): -# complete, nothing to see here.. - continue - - premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ] - for (find, replace) in premirrors: - newuri = uri_replace(uri, find, replace, d) - if newuri != uri: - if fetch_uri(newuri, basename, dl, md5, parm, localdata): - completed = 1 - break - - if completed: - continue - - if fetch_uri(uri, basename, dl, md5, parm, localdata): - continue - -# try mirrors - mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ] - for (find, replace) in mirrors: - newuri = uri_replace(uri, find, replace, d) - if newuri != uri: - if fetch_uri(newuri, basename, dl, md5, parm, localdata): - completed = 1 - break - - if not completed: - raise FetchError(uri) - - del localdata + completed = 0 + (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(uri, localdata)) + basename = os.path.basename(path) + dl = self.localpath(uri, d) + dl = data.expand(dl, localdata) + md5 = dl + '.md5' + + if os.path.exists(md5): + #complete, nothing to see here.. + #touch md5 file to show activity + os.utime(md5, None) + return + + premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ] + for (find, replace) in premirrors: + newuri = uri_replace(uri, find, replace, d) + if newuri != uri: + if fetch_uri(newuri, basename, dl, md5, parm, localdata): + completed = 1 + break + + if completed: + return + + if fetch_uri(uri, basename, dl, md5, parm, localdata): + return + + # try mirrors + mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ] + for (find, replace) in mirrors: + newuri = uri_replace(uri, find, replace, d) + if newuri != uri: + if fetch_uri(newuri, basename, dl, md5, parm, localdata): + completed = 1 + break + + if not completed: + raise FetchError(uri) |