diff options
author | Joshua Watt <JPEWhacker@gmail.com> | 2021-02-09 09:50:21 -0600 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2021-02-10 23:47:40 +0000 |
commit | f68682a79d83e6399eb403f30a1f113516575f51 (patch) | |
tree | 9b6810d8893aac3fd24c90e62e1659c4a13de0f9 /lib/bb/fetch2 | |
parent | ceddb5b3d229b83c172656053cd29aeb521fcce0 (diff) | |
download | bitbake-contrib-f68682a79d83e6399eb403f30a1f113516575f51.tar.gz |
logging: Make bitbake logger compatible with python logger
The bitbake logger overrode the definition of the debug() logging call
to include a debug level, but this causes problems with code that may
be using standard python logging, since the extra argument is
interpreted differently.
Instead, change the bitbake loggers debug() call to match the python
logger call and add a debug2() and debug3() API to replace calls that
were logging to a different debug level.
[RP: Small fix to ensure bb.debug calls bbdebug()]
Signed-off-by: Joshua Watt <JPEWhacker@gmail.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'lib/bb/fetch2')
-rw-r--r-- | lib/bb/fetch2/__init__.py | 38 | ||||
-rw-r--r-- | lib/bb/fetch2/bzr.py | 8 | ||||
-rw-r--r-- | lib/bb/fetch2/clearcase.py | 2 | ||||
-rw-r--r-- | lib/bb/fetch2/cvs.py | 4 | ||||
-rw-r--r-- | lib/bb/fetch2/gitsm.py | 4 | ||||
-rw-r--r-- | lib/bb/fetch2/hg.py | 16 | ||||
-rw-r--r-- | lib/bb/fetch2/local.py | 4 | ||||
-rw-r--r-- | lib/bb/fetch2/osc.py | 6 | ||||
-rw-r--r-- | lib/bb/fetch2/perforce.py | 10 | ||||
-rw-r--r-- | lib/bb/fetch2/repo.py | 2 | ||||
-rw-r--r-- | lib/bb/fetch2/svn.py | 6 | ||||
-rw-r--r-- | lib/bb/fetch2/wget.py | 6 |
12 files changed, 53 insertions, 53 deletions
diff --git a/lib/bb/fetch2/__init__.py b/lib/bb/fetch2/__init__.py index ee3d7b167..19169d780 100644 --- a/lib/bb/fetch2/__init__.py +++ b/lib/bb/fetch2/__init__.py @@ -428,7 +428,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): uri_decoded = list(decodeurl(ud.url)) uri_find_decoded = list(decodeurl(uri_find)) uri_replace_decoded = list(decodeurl(uri_replace)) - logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) + logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) result_decoded = ['', '', '', '', '', {}] for loc, i in enumerate(uri_find_decoded): result_decoded[loc] = uri_decoded[loc] @@ -474,7 +474,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): result = encodeurl(result_decoded) if result == ud.url: return None - logger.debug(2, "For url %s returning %s" % (ud.url, result)) + logger.debug2("For url %s returning %s" % (ud.url, result)) return result methods = [] @@ -499,9 +499,9 @@ def fetcher_init(d): # When to drop SCM head revisions controlled by user policy srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear" if srcrev_policy == "cache": - logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) + logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) elif srcrev_policy == "clear": - logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) + logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) revs.clear() else: raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) @@ -857,9 +857,9 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None): cmd = 'export PSEUDO_DISABLED=1; ' + cmd if workdir: - logger.debug(1, "Running '%s' in %s" % (cmd, workdir)) + logger.debug("Running '%s' in %s" % (cmd, workdir)) else: - logger.debug(1, "Running %s", cmd) + logger.debug("Running %s", cmd) success = False error_message = "" @@ -900,7 +900,7 @@ def check_network_access(d, info, url): elif not trusted_network(d, url): raise UntrustedUrl(url, info) else: - logger.debug(1, "Fetcher accessed the network with the command %s" % info) + logger.debug("Fetcher accessed the network with the command %s" % info) def build_mirroruris(origud, mirrors, ld): uris = [] @@ -926,7 +926,7 @@ def build_mirroruris(origud, mirrors, ld): continue if not trusted_network(ld, newuri): - logger.debug(1, "Mirror %s not in the list of trusted networks, skipping" % (newuri)) + logger.debug("Mirror %s not in the list of trusted networks, skipping" % (newuri)) continue # Create a local copy of the mirrors minus the current line @@ -939,8 +939,8 @@ def build_mirroruris(origud, mirrors, ld): newud = FetchData(newuri, ld) newud.setup_localpath(ld) except bb.fetch2.BBFetchException as e: - logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url)) - logger.debug(1, str(e)) + logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url)) + logger.debug(str(e)) try: # setup_localpath of file:// urls may fail, we should still see # if mirrors of the url exist @@ -1043,8 +1043,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False): elif isinstance(e, NoChecksumError): raise else: - logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url)) - logger.debug(1, str(e)) + logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url)) + logger.debug(str(e)) try: ud.method.clean(ud, ld) except UnboundLocalError: @@ -1688,7 +1688,7 @@ class Fetch(object): if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d): done = True elif m.try_premirror(ud, self.d): - logger.debug(1, "Trying PREMIRRORS") + logger.debug("Trying PREMIRRORS") mirrors = mirror_from_string(self.d.getVar('PREMIRRORS')) done = m.try_mirrors(self, ud, self.d, mirrors) if done: @@ -1698,7 +1698,7 @@ class Fetch(object): m.update_donestamp(ud, self.d) except ChecksumError as e: logger.warning("Checksum failure encountered with premirror download of %s - will attempt other sources." % u) - logger.debug(1, str(e)) + logger.debug(str(e)) done = False if premirroronly: @@ -1710,7 +1710,7 @@ class Fetch(object): try: if not trusted_network(self.d, ud.url): raise UntrustedUrl(ud.url) - logger.debug(1, "Trying Upstream") + logger.debug("Trying Upstream") m.download(ud, self.d) if hasattr(m, "build_mirror_data"): m.build_mirror_data(ud, self.d) @@ -1725,19 +1725,19 @@ class Fetch(object): except BBFetchException as e: if isinstance(e, ChecksumError): logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u) - logger.debug(1, str(e)) + logger.debug(str(e)) if os.path.exists(ud.localpath): rename_bad_checksum(ud, e.checksum) elif isinstance(e, NoChecksumError): raise else: logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u) - logger.debug(1, str(e)) + logger.debug(str(e)) firsterr = e # Remove any incomplete fetch if not verified_stamp: m.clean(ud, self.d) - logger.debug(1, "Trying MIRRORS") + logger.debug("Trying MIRRORS") mirrors = mirror_from_string(self.d.getVar('MIRRORS')) done = m.try_mirrors(self, ud, self.d, mirrors) @@ -1774,7 +1774,7 @@ class Fetch(object): ud = self.ud[u] ud.setup_localpath(self.d) m = ud.method - logger.debug(1, "Testing URL %s", u) + logger.debug("Testing URL %s", u) # First try checking uri, u, from PREMIRRORS mirrors = mirror_from_string(self.d.getVar('PREMIRRORS')) ret = m.try_mirrors(self, ud, self.d, mirrors, True) diff --git a/lib/bb/fetch2/bzr.py b/lib/bb/fetch2/bzr.py index 566ace9f0..fc558f50b 100644 --- a/lib/bb/fetch2/bzr.py +++ b/lib/bb/fetch2/bzr.py @@ -74,16 +74,16 @@ class Bzr(FetchMethod): if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): bzrcmd = self._buildbzrcommand(ud, d, "update") - logger.debug(1, "BZR Update %s", ud.url) + logger.debug("BZR Update %s", ud.url) bb.fetch2.check_network_access(d, bzrcmd, ud.url) runfetchcmd(bzrcmd, d, workdir=os.path.join(ud.pkgdir, os.path.basename(ud.path))) else: bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True) bzrcmd = self._buildbzrcommand(ud, d, "fetch") bb.fetch2.check_network_access(d, bzrcmd, ud.url) - logger.debug(1, "BZR Checkout %s", ud.url) + logger.debug("BZR Checkout %s", ud.url) bb.utils.mkdirhier(ud.pkgdir) - logger.debug(1, "Running %s", bzrcmd) + logger.debug("Running %s", bzrcmd) runfetchcmd(bzrcmd, d, workdir=ud.pkgdir) scmdata = ud.parm.get("scmdata", "") @@ -109,7 +109,7 @@ class Bzr(FetchMethod): """ Return the latest upstream revision number """ - logger.debug(2, "BZR fetcher hitting network for %s", ud.url) + logger.debug2("BZR fetcher hitting network for %s", ud.url) bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url) diff --git a/lib/bb/fetch2/clearcase.py b/lib/bb/fetch2/clearcase.py index 49d7ae1b0..1a9c86376 100644 --- a/lib/bb/fetch2/clearcase.py +++ b/lib/bb/fetch2/clearcase.py @@ -70,7 +70,7 @@ class ClearCase(FetchMethod): return ud.type in ['ccrc'] def debug(self, msg): - logger.debug(1, "ClearCase: %s", msg) + logger.debug("ClearCase: %s", msg) def urldata_init(self, ud, d): """ diff --git a/lib/bb/fetch2/cvs.py b/lib/bb/fetch2/cvs.py index 22abdef79..01de5ff4c 100644 --- a/lib/bb/fetch2/cvs.py +++ b/lib/bb/fetch2/cvs.py @@ -109,7 +109,7 @@ class Cvs(FetchMethod): cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) # create module directory - logger.debug(2, "Fetch: checking for module directory") + logger.debug2("Fetch: checking for module directory") moddir = os.path.join(ud.pkgdir, localdir) workdir = None if os.access(os.path.join(moddir, 'CVS'), os.R_OK): @@ -123,7 +123,7 @@ class Cvs(FetchMethod): # check out sources there bb.utils.mkdirhier(ud.pkgdir) workdir = ud.pkgdir - logger.debug(1, "Running %s", cvscmd) + logger.debug("Running %s", cvscmd) bb.fetch2.check_network_access(d, cvscmd, ud.url) cmd = cvscmd diff --git a/lib/bb/fetch2/gitsm.py b/lib/bb/fetch2/gitsm.py index d6e5c5c05..a4527bf36 100644 --- a/lib/bb/fetch2/gitsm.py +++ b/lib/bb/fetch2/gitsm.py @@ -78,7 +78,7 @@ class GitSM(Git): module_hash = "" if not module_hash: - logger.debug(1, "submodule %s is defined, but is not initialized in the repository. Skipping", m) + logger.debug("submodule %s is defined, but is not initialized in the repository. Skipping", m) continue submodules.append(m) @@ -179,7 +179,7 @@ class GitSM(Git): (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir) if len(need_update_list) > 0: - logger.debug(1, 'gitsm: Submodules requiring update: %s' % (' '.join(need_update_list))) + logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list))) return True return False diff --git a/lib/bb/fetch2/hg.py b/lib/bb/fetch2/hg.py index 8f503701e..063e13008 100644 --- a/lib/bb/fetch2/hg.py +++ b/lib/bb/fetch2/hg.py @@ -150,7 +150,7 @@ class Hg(FetchMethod): def download(self, ud, d): """Fetch url""" - logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") + logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'") # If the checkout doesn't exist and the mirror tarball does, extract it if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror): @@ -160,7 +160,7 @@ class Hg(FetchMethod): if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): # Found the source, check whether need pull updatecmd = self._buildhgcommand(ud, d, "update") - logger.debug(1, "Running %s", updatecmd) + logger.debug("Running %s", updatecmd) try: runfetchcmd(updatecmd, d, workdir=ud.moddir) except bb.fetch2.FetchError: @@ -168,7 +168,7 @@ class Hg(FetchMethod): pullcmd = self._buildhgcommand(ud, d, "pull") logger.info("Pulling " + ud.url) # update sources there - logger.debug(1, "Running %s", pullcmd) + logger.debug("Running %s", pullcmd) bb.fetch2.check_network_access(d, pullcmd, ud.url) runfetchcmd(pullcmd, d, workdir=ud.moddir) try: @@ -183,14 +183,14 @@ class Hg(FetchMethod): logger.info("Fetch " + ud.url) # check out sources there bb.utils.mkdirhier(ud.pkgdir) - logger.debug(1, "Running %s", fetchcmd) + logger.debug("Running %s", fetchcmd) bb.fetch2.check_network_access(d, fetchcmd, ud.url) runfetchcmd(fetchcmd, d, workdir=ud.pkgdir) # Even when we clone (fetch), we still need to update as hg's clone # won't checkout the specified revision if its on a branch updatecmd = self._buildhgcommand(ud, d, "update") - logger.debug(1, "Running %s", updatecmd) + logger.debug("Running %s", updatecmd) runfetchcmd(updatecmd, d, workdir=ud.moddir) def clean(self, ud, d): @@ -247,9 +247,9 @@ class Hg(FetchMethod): if scmdata != "nokeep": proto = ud.parm.get('protocol', 'http') if not os.access(os.path.join(codir, '.hg'), os.R_OK): - logger.debug(2, "Unpack: creating new hg repository in '" + codir + "'") + logger.debug2("Unpack: creating new hg repository in '" + codir + "'") runfetchcmd("%s init %s" % (ud.basecmd, codir), d) - logger.debug(2, "Unpack: updating source in '" + codir + "'") + logger.debug2("Unpack: updating source in '" + codir + "'") if ud.user and ud.pswd: runfetchcmd("%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull %s" % (ud.basecmd, ud.user, ud.pswd, proto, ud.moddir), d, workdir=codir) else: @@ -259,5 +259,5 @@ class Hg(FetchMethod): else: runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d, workdir=codir) else: - logger.debug(2, "Unpack: extracting source to '" + codir + "'") + logger.debug2("Unpack: extracting source to '" + codir + "'") runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d, workdir=ud.moddir) diff --git a/lib/bb/fetch2/local.py b/lib/bb/fetch2/local.py index 25d4557db..e7d1c8c58 100644 --- a/lib/bb/fetch2/local.py +++ b/lib/bb/fetch2/local.py @@ -54,12 +54,12 @@ class Local(FetchMethod): return [path] filespath = d.getVar('FILESPATH') if filespath: - logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":")))) + logger.debug2("Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":")))) newpath, hist = bb.utils.which(filespath, path, history=True) searched.extend(hist) if not os.path.exists(newpath): dldirfile = os.path.join(d.getVar("DL_DIR"), path) - logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path)) + logger.debug2("Defaulting to %s for %s" % (dldirfile, path)) bb.utils.mkdirhier(os.path.dirname(dldirfile)) searched.append(dldirfile) return searched diff --git a/lib/bb/fetch2/osc.py b/lib/bb/fetch2/osc.py index 3a6cd2951..d9ce44390 100644 --- a/lib/bb/fetch2/osc.py +++ b/lib/bb/fetch2/osc.py @@ -84,13 +84,13 @@ class Osc(FetchMethod): Fetch url """ - logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") + logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(d.getVar('OSCDIR'), ud.path, ud.module), os.R_OK): oscupdatecmd = self._buildosccommand(ud, d, "update") logger.info("Update "+ ud.url) # update sources there - logger.debug(1, "Running %s", oscupdatecmd) + logger.debug("Running %s", oscupdatecmd) bb.fetch2.check_network_access(d, oscupdatecmd, ud.url) runfetchcmd(oscupdatecmd, d, workdir=ud.moddir) else: @@ -98,7 +98,7 @@ class Osc(FetchMethod): logger.info("Fetch " + ud.url) # check out sources there bb.utils.mkdirhier(ud.pkgdir) - logger.debug(1, "Running %s", oscfetchcmd) + logger.debug("Running %s", oscfetchcmd) bb.fetch2.check_network_access(d, oscfetchcmd, ud.url) runfetchcmd(oscfetchcmd, d, workdir=ud.pkgdir) diff --git a/lib/bb/fetch2/perforce.py b/lib/bb/fetch2/perforce.py index da6d33746..e2a41a4a1 100644 --- a/lib/bb/fetch2/perforce.py +++ b/lib/bb/fetch2/perforce.py @@ -90,16 +90,16 @@ class Perforce(FetchMethod): p4port = d.getVar('P4PORT') if p4port: - logger.debug(1, 'Using recipe provided P4PORT: %s' % p4port) + logger.debug('Using recipe provided P4PORT: %s' % p4port) ud.host = p4port else: - logger.debug(1, 'Trying to use P4CONFIG to automatically set P4PORT...') + logger.debug('Trying to use P4CONFIG to automatically set P4PORT...') ud.usingp4config = True p4cmd = '%s info | grep "Server address"' % ud.basecmd bb.fetch2.check_network_access(d, p4cmd, ud.url) ud.host = runfetchcmd(p4cmd, d, True) ud.host = ud.host.split(': ')[1].strip() - logger.debug(1, 'Determined P4PORT to be: %s' % ud.host) + logger.debug('Determined P4PORT to be: %s' % ud.host) if not ud.host: raise FetchError('Could not determine P4PORT from P4CONFIG') @@ -208,7 +208,7 @@ class Perforce(FetchMethod): for filename in p4fileslist: item = filename.split(' - ') lastaction = item[1].split() - logger.debug(1, 'File: %s Last Action: %s' % (item[0], lastaction[0])) + logger.debug('File: %s Last Action: %s' % (item[0], lastaction[0])) if lastaction[0] == 'delete': continue filelist.append(item[0]) @@ -255,7 +255,7 @@ class Perforce(FetchMethod): raise FetchError('Could not determine the latest perforce changelist') tipcset = tip.split(' ')[1] - logger.debug(1, 'p4 tip found to be changelist %s' % tipcset) + logger.debug('p4 tip found to be changelist %s' % tipcset) return tipcset def sortable_revision(self, ud, d, name): diff --git a/lib/bb/fetch2/repo.py b/lib/bb/fetch2/repo.py index 2bdbbd409..fa4cb8149 100644 --- a/lib/bb/fetch2/repo.py +++ b/lib/bb/fetch2/repo.py @@ -47,7 +47,7 @@ class Repo(FetchMethod): """Fetch url""" if os.access(os.path.join(d.getVar("DL_DIR"), ud.localfile), os.R_OK): - logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath) + logger.debug("%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath) return repodir = d.getVar("REPODIR") or (d.getVar("DL_DIR") + "/repo") diff --git a/lib/bb/fetch2/svn.py b/lib/bb/fetch2/svn.py index 971a5add4..8856ef1c6 100644 --- a/lib/bb/fetch2/svn.py +++ b/lib/bb/fetch2/svn.py @@ -116,7 +116,7 @@ class Svn(FetchMethod): def download(self, ud, d): """Fetch url""" - logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") + logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'") lf = bb.utils.lockfile(ud.svnlock) @@ -129,7 +129,7 @@ class Svn(FetchMethod): runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir) except FetchError: pass - logger.debug(1, "Running %s", svncmd) + logger.debug("Running %s", svncmd) bb.fetch2.check_network_access(d, svncmd, ud.url) runfetchcmd(svncmd, d, workdir=ud.moddir) else: @@ -137,7 +137,7 @@ class Svn(FetchMethod): logger.info("Fetch " + ud.url) # check out sources there bb.utils.mkdirhier(ud.pkgdir) - logger.debug(1, "Running %s", svncmd) + logger.debug("Running %s", svncmd) bb.fetch2.check_network_access(d, svncmd, ud.url) runfetchcmd(svncmd, d, workdir=ud.pkgdir) diff --git a/lib/bb/fetch2/wget.py b/lib/bb/fetch2/wget.py index e952f411c..78a49676f 100644 --- a/lib/bb/fetch2/wget.py +++ b/lib/bb/fetch2/wget.py @@ -88,7 +88,7 @@ class Wget(FetchMethod): progresshandler = WgetProgressHandler(d) - logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command)) + logger.debug2("Fetching %s using command '%s'" % (ud.url, command)) bb.fetch2.check_network_access(d, command, ud.url) runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler, workdir=workdir) @@ -326,11 +326,11 @@ class Wget(FetchMethod): pass except urllib.error.URLError as e: if try_again: - logger.debug(2, "checkstatus: trying again") + logger.debug2("checkstatus: trying again") return self.checkstatus(fetch, ud, d, False) else: # debug for now to avoid spamming the logs in e.g. remote sstate searches - logger.debug(2, "checkstatus() urlopen failed: %s" % e) + logger.debug2("checkstatus() urlopen failed: %s" % e) return False return True |