summaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
authorRichard Purdie <richard.purdie@linuxfoundation.org>2011-02-04 14:40:41 +0000
committerRichard Purdie <richard.purdie@linuxfoundation.org>2011-02-10 23:37:07 +0000
commitb5dd674a96736c78f5fb3ad787e568eef07be23b (patch)
tree8207cbd5558d40f69208e61e500c6881fb466d91 /lib
parenta6ea08e7ab08772144abc65836dc272c4012afa3 (diff)
downloadbitbake-b5dd674a96736c78f5fb3ad787e568eef07be23b.tar.gz
bitbake/fetch2: Use True instead of integer values
(From Poky rev: 7202a77134029cb37540c785ce0161a4dd574853) Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'lib')
-rw-r--r--lib/bb/fetch2/__init__.py30
-rw-r--r--lib/bb/fetch2/cvs.py4
-rw-r--r--lib/bb/fetch2/git.py4
-rw-r--r--lib/bb/fetch2/local.py4
-rw-r--r--lib/bb/fetch2/perforce.py10
-rw-r--r--lib/bb/fetch2/ssh.py2
-rw-r--r--lib/bb/fetch2/svk.py2
-rw-r--r--lib/bb/fetch2/wget.py6
8 files changed, 31 insertions, 31 deletions
diff --git a/lib/bb/fetch2/__init__.py b/lib/bb/fetch2/__init__.py
index c7e058d0d..cf9456b75 100644
--- a/lib/bb/fetch2/__init__.py
+++ b/lib/bb/fetch2/__init__.py
@@ -201,7 +201,7 @@ def fetcher_init(d):
"""
pd = persist_data.persist(d)
# When to drop SCM head revisions controlled by user policy
- srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
+ srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, True) or "clear"
if srcrev_policy == "cache":
logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
elif srcrev_policy == "clear":
@@ -322,7 +322,7 @@ def get_srcrev(d):
#
# Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
#
- format = bb.data.getVar('SRCREV_FORMAT', d, 1)
+ format = bb.data.getVar('SRCREV_FORMAT', d, True)
if not format:
raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
@@ -410,7 +410,7 @@ def try_mirrors(d, uri, mirrors, check = False, force = False):
uri is the original uri we're trying to download
mirrors is the list of mirrors we're going to try
"""
- fpath = os.path.join(data.getVar("DL_DIR", d, 1), os.path.basename(uri))
+ fpath = os.path.join(data.getVar("DL_DIR", d, True), os.path.basename(uri))
if not check and os.access(fpath, os.R_OK) and not force:
logger.debug(1, "%s already exists, skipping checkout.", fpath)
return fpath
@@ -463,12 +463,12 @@ def srcrev_internal_helper(ud, d, name):
rev = None
if name != '':
- pn = data.getVar("PN", d, 1)
- rev = data.getVar("SRCREV_%s_pn-%s" % (name, pn), d, 1)
+ pn = data.getVar("PN", d, True)
+ rev = data.getVar("SRCREV_%s_pn-%s" % (name, pn), d, True)
if not rev:
- rev = data.getVar("SRCREV_%s" % name, d, 1)
+ rev = data.getVar("SRCREV_%s" % name, d, True)
if not rev:
- rev = data.getVar("SRCREV", d, 1)
+ rev = data.getVar("SRCREV", d, True)
if rev == "INVALID":
raise FetchError("Please set SRCREV to a valid value", ud.url)
if rev == "AUTOINC":
@@ -618,7 +618,7 @@ class FetchMethod(object):
file = urldata.localpath
dots = file.split(".")
if dots[-1] in ['gz', 'bz2', 'Z']:
- efile = os.path.join(bb.data.getVar('WORKDIR', data, 1),os.path.basename('.'.join(dots[0:-1])))
+ efile = os.path.join(bb.data.getVar('WORKDIR', data, True),os.path.basename('.'.join(dots[0:-1])))
else:
efile = file
cmd = None
@@ -642,7 +642,7 @@ class FetchMethod(object):
cmd = '%s -a' % cmd
cmd = "%s '%s'" % (cmd, file)
elif os.path.isdir(file):
- filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, 1))
+ filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, True))
destdir = "."
if file[0:len(filesdir)] == filesdir:
destdir = file[len(filesdir):file.rfind('/')]
@@ -679,7 +679,7 @@ class FetchMethod(object):
bb.mkdirhier(newdir)
os.chdir(newdir)
- cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, 1), cmd)
+ cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, True), cmd)
bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
@@ -718,10 +718,10 @@ class FetchMethod(object):
localcount = None
if name != '':
- pn = data.getVar("PN", d, 1)
- localcount = data.getVar("LOCALCOUNT_" + name, d, 1)
+ pn = data.getVar("PN", d, True)
+ localcount = data.getVar("LOCALCOUNT_" + name, d, True)
if not localcount:
- localcount = data.getVar("LOCALCOUNT", d, 1)
+ localcount = data.getVar("LOCALCOUNT", d, True)
return localcount
localcount_internal_helper = staticmethod(localcount_internal_helper)
@@ -789,12 +789,12 @@ class FetchMethod(object):
class Fetch(object):
def __init__(self, urls, d):
if len(urls) == 0:
- urls = d.getVar("SRC_URI", 1).split()
+ urls = d.getVar("SRC_URI", True).split()
self.urls = urls
self.d = d
self.ud = {}
- fn = bb.data.getVar('FILE', d, 1)
+ fn = bb.data.getVar('FILE', d, True)
if fn in urldata_cache:
self.ud = urldata_cache[fn]
diff --git a/lib/bb/fetch2/cvs.py b/lib/bb/fetch2/cvs.py
index b440ed7b1..b77e742c3 100644
--- a/lib/bb/fetch2/cvs.py
+++ b/lib/bb/fetch2/cvs.py
@@ -115,8 +115,8 @@ class Cvs(FetchMethod):
data.setVar('CVSROOT', cvsroot, localdata)
data.setVar('CVSCOOPTS', " ".join(options), localdata)
data.setVar('CVSMODULE', ud.module, localdata)
- cvscmd = data.getVar('FETCHCOMMAND', localdata, 1)
- cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1)
+ cvscmd = data.getVar('FETCHCOMMAND', localdata, True)
+ cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, True)
if cvs_rsh:
cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
diff --git a/lib/bb/fetch2/git.py b/lib/bb/fetch2/git.py
index 38e2c93be..35031d28c 100644
--- a/lib/bb/fetch2/git.py
+++ b/lib/bb/fetch2/git.py
@@ -101,7 +101,7 @@ class Git(FetchMethod):
else:
username = ""
- repofile = os.path.join(data.getVar("DL_DIR", d, 1), ud.mirrortarball)
+ repofile = os.path.join(data.getVar("DL_DIR", d, True), ud.mirrortarball)
ud.repochanged = not os.path.exists(repofile)
@@ -138,7 +138,7 @@ class Git(FetchMethod):
def build_mirror_data(self, url, ud, d):
# Generate a mirror tarball if needed
- repofile = os.path.join(data.getVar("DL_DIR", d, 1), ud.mirrortarball)
+ repofile = os.path.join(data.getVar("DL_DIR", d, True), ud.mirrortarball)
os.chdir(ud.clonedir)
mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True)
diff --git a/lib/bb/fetch2/local.py b/lib/bb/fetch2/local.py
index 7b840a46d..d77d39375 100644
--- a/lib/bb/fetch2/local.py
+++ b/lib/bb/fetch2/local.py
@@ -50,11 +50,11 @@ class Local(FetchMethod):
path = path.split(";")[0]
newpath = path
if path[0] != "/":
- filespath = data.getVar('FILESPATH', d, 1)
+ filespath = data.getVar('FILESPATH', d, True)
if filespath:
newpath = bb.utils.which(filespath, path)
if not newpath:
- filesdir = data.getVar('FILESDIR', d, 1)
+ filesdir = data.getVar('FILESDIR', d, True)
if filesdir:
newpath = os.path.join(filesdir, path)
return newpath
diff --git a/lib/bb/fetch2/perforce.py b/lib/bb/fetch2/perforce.py
index 583dfb93b..e3e9c71da 100644
--- a/lib/bb/fetch2/perforce.py
+++ b/lib/bb/fetch2/perforce.py
@@ -79,7 +79,7 @@ class Perforce(FetchMethod):
if host:
p4opt += " -p %s" % (host)
- p4date = data.getVar("P4DATE", d, 1)
+ p4date = data.getVar("P4DATE", d, True)
if "revision" in parm:
depot += "#%s" % (parm["revision"])
elif "label" in parm:
@@ -87,7 +87,7 @@ class Perforce(FetchMethod):
elif p4date:
depot += "@%s" % (p4date)
- p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1)
+ p4cmd = data.getVar('FETCHCOMMAND_p4', d, True)
logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
cset = p4file.readline().strip()
@@ -105,7 +105,7 @@ class Perforce(FetchMethod):
if "label" in parm:
ud.localfile = "%s.tar.gz" % (parm["label"])
- return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile)
+ return
base = path
which = path.find('/...')
@@ -147,13 +147,13 @@ class Perforce(FetchMethod):
if host:
p4opt += " -p %s" % (host)
- p4cmd = data.getVar('FETCHCOMMAND', localdata, 1)
+ p4cmd = data.getVar('FETCHCOMMAND', localdata, True)
# create temp directory
logger.debug(2, "Fetch: creating temporary directory")
bb.mkdirhier(data.expand('${WORKDIR}', localdata))
data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
- tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
+ tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
tmpfile = tmppipe.readline().strip()
if not tmpfile:
raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc)
diff --git a/lib/bb/fetch2/ssh.py b/lib/bb/fetch2/ssh.py
index 8b07b3a71..081fe1335 100644
--- a/lib/bb/fetch2/ssh.py
+++ b/lib/bb/fetch2/ssh.py
@@ -75,7 +75,7 @@ class SSH(FetchMethod):
return lpath
def download(self, url, urldata, d):
- dldir = data.getVar('DL_DIR', d, 1)
+ dldir = data.getVar('DL_DIR', d, True)
m = __pattern__.match(url)
path = m.group('path')
diff --git a/lib/bb/fetch2/svk.py b/lib/bb/fetch2/svk.py
index 213d0d3ec..70f72c80a 100644
--- a/lib/bb/fetch2/svk.py
+++ b/lib/bb/fetch2/svk.py
@@ -72,7 +72,7 @@ class Svk(FetchMethod):
logger.debug(2, "Fetch: creating temporary directory")
bb.mkdirhier(data.expand('${WORKDIR}', localdata))
data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
- tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
+ tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
tmpfile = tmppipe.readline().strip()
if not tmpfile:
logger.error()
diff --git a/lib/bb/fetch2/wget.py b/lib/bb/fetch2/wget.py
index 8e34b0c2b..5a15147e6 100644
--- a/lib/bb/fetch2/wget.py
+++ b/lib/bb/fetch2/wget.py
@@ -50,12 +50,12 @@ class Wget(FetchMethod):
def fetch_uri(uri, ud, d):
if checkonly:
- fetchcmd = data.getVar("CHECKCOMMAND", d, 1)
+ fetchcmd = data.getVar("CHECKCOMMAND", d, True)
elif os.path.exists(ud.localpath):
# file exists, but we didnt complete it.. trying again..
- fetchcmd = data.getVar("RESUMECOMMAND", d, 1)
+ fetchcmd = data.getVar("RESUMECOMMAND", d, True)
else:
- fetchcmd = data.getVar("FETCHCOMMAND", d, 1)
+ fetchcmd = data.getVar("FETCHCOMMAND", d, True)
uri = uri.split(";")[0]
uri_decoded = list(decodeurl(uri))