aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--SECURITY.md24
-rwxr-xr-xbin/bitbake-getvar48
-rwxr-xr-xbin/bitbake-worker14
-rw-r--r--doc/bitbake-user-manual/bitbake-user-manual-fetching.rst4
-rw-r--r--lib/bb/__init__.py7
-rw-r--r--lib/bb/build.py19
-rw-r--r--lib/bb/command.py28
-rw-r--r--lib/bb/compat.py10
-rw-r--r--lib/bb/cooker.py42
-rw-r--r--lib/bb/cookerdata.py14
-rw-r--r--lib/bb/data.py1
-rw-r--r--lib/bb/data_smart.py20
-rw-r--r--lib/bb/event.py16
-rw-r--r--lib/bb/fetch2/__init__.py5
-rw-r--r--lib/bb/fetch2/git.py54
-rw-r--r--lib/bb/fetch2/wget.py24
-rw-r--r--lib/bb/monitordisk.py7
-rw-r--r--lib/bb/msg.py6
-rw-r--r--lib/bb/parse/ast.py2
-rw-r--r--lib/bb/parse/parse_py/BBHandler.py2
-rw-r--r--lib/bb/parse/parse_py/ConfHandler.py2
-rw-r--r--lib/bb/persist_data.py13
-rw-r--r--lib/bb/process.py3
-rw-r--r--lib/bb/providers.py4
-rw-r--r--lib/bb/runqueue.py178
-rw-r--r--lib/bb/server/process.py16
-rw-r--r--lib/bb/siggen.py3
-rw-r--r--lib/bb/tests/codeparser.py30
-rw-r--r--lib/bb/tests/event.py17
-rw-r--r--lib/bb/tests/fetch.py117
-rw-r--r--lib/bb/tinfoil.py17
-rw-r--r--lib/bb/ui/knotty.py32
-rw-r--r--lib/bb/ui/taskexp.py5
-rw-r--r--lib/bb/utils.py52
-rw-r--r--lib/bblayers/action.py4
-rw-r--r--lib/bblayers/layerindex.py1
-rw-r--r--lib/bblayers/query.py8
-rw-r--r--lib/hashserv/server.py23
-rw-r--r--lib/layerindexlib/__init__.py1
-rw-r--r--lib/toaster/toastergui/api.py26
40 files changed, 689 insertions, 210 deletions
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 000000000..7d2ce1f63
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,24 @@
+How to Report a Potential Vulnerability?
+========================================
+
+If you would like to report a public issue (for example, one with a released
+CVE number), please report it using the
+[https://bugzilla.yoctoproject.org/enter_bug.cgi?product=Security Security Bugzilla].
+If you have a patch ready, submit it following the same procedure as any other
+patch as described in README.md.
+
+If you are dealing with a not-yet released or urgent issue, please send a
+message to security AT yoctoproject DOT org, including as many details as
+possible: the layer or software module affected, the recipe and its version,
+and any example code, if available.
+
+Branches maintained with security fixes
+---------------------------------------
+
+See [https://wiki.yoctoproject.org/wiki/Stable_Release_and_LTS Stable release and LTS]
+for detailed info regarding the policies and maintenance of Stable branches.
+
+The [https://wiki.yoctoproject.org/wiki/Releases Release page] contains a list of all
+releases of the Yocto Project. Versions in grey are no longer actively maintained with
+security patches, but well-tested patches may still be accepted for them for
+significant issues.
diff --git a/bin/bitbake-getvar b/bin/bitbake-getvar
new file mode 100755
index 000000000..942321925
--- /dev/null
+++ b/bin/bitbake-getvar
@@ -0,0 +1,48 @@
+#! /usr/bin/env python3
+#
+# Copyright (C) 2021 Richard Purdie
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import argparse
+import io
+import os
+import sys
+
+bindir = os.path.dirname(__file__)
+topdir = os.path.dirname(bindir)
+sys.path[0:0] = [os.path.join(topdir, 'lib')]
+
+import bb.tinfoil
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(description="Bitbake Query Variable")
+ parser.add_argument("variable", help="variable name to query")
+ parser.add_argument("-r", "--recipe", help="Recipe name to query", default=None, required=False)
+ parser.add_argument('-u', '--unexpand', help='Do not expand the value (with --value)', action="store_true")
+ parser.add_argument('-f', '--flag', help='Specify a variable flag to query (with --value)', default=None)
+ parser.add_argument('--value', help='Only report the value, no history and no variable name', action="store_true")
+ args = parser.parse_args()
+
+ if args.unexpand and not args.value:
+ print("--unexpand only makes sense with --value")
+ sys.exit(1)
+
+ if args.flag and not args.value:
+ print("--flag only makes sense with --value")
+ sys.exit(1)
+
+ with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
+ if args.recipe:
+ tinfoil.prepare(quiet=2)
+ d = tinfoil.parse_recipe(args.recipe)
+ else:
+ tinfoil.prepare(quiet=2, config_only=True)
+ d = tinfoil.config_data
+ if args.flag:
+ print(str(d.getVarFlag(args.variable, args.flag, expand=(not args.unexpand))))
+ elif args.value:
+ print(str(d.getVar(args.variable, expand=(not args.unexpand))))
+ else:
+ bb.data.emit_var(args.variable, d=d, all=True)
diff --git a/bin/bitbake-worker b/bin/bitbake-worker
index 97cc0fd60..e3ce01eec 100755
--- a/bin/bitbake-worker
+++ b/bin/bitbake-worker
@@ -413,9 +413,9 @@ class BitbakeWorker(object):
def handle_workerdata(self, data):
self.workerdata = pickle.loads(data)
+ bb.build.verboseShellLogging = self.workerdata["build_verbose_shell"]
+ bb.build.verboseStdoutLogging = self.workerdata["build_verbose_stdout"]
bb.msg.loggerDefaultLogLevel = self.workerdata["logdefaultlevel"]
- bb.msg.loggerDefaultVerbose = self.workerdata["logdefaultverbose"]
- bb.msg.loggerVerboseLogs = self.workerdata["logdefaultverboselogs"]
bb.msg.loggerDefaultDomains = self.workerdata["logdefaultdomain"]
for mc in self.databuilder.mcdata:
self.databuilder.mcdata[mc].setVar("PRSERV_HOST", self.workerdata["prhost"])
@@ -505,9 +505,11 @@ except BaseException as e:
import traceback
sys.stderr.write(traceback.format_exc())
sys.stderr.write(str(e))
+finally:
+ worker_thread_exit = True
+ worker_thread.join()
-worker_thread_exit = True
-worker_thread.join()
-
-workerlog_write("exitting")
+workerlog_write("exiting")
+if not normalexit:
+ sys.exit(1)
sys.exit(0)
diff --git a/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst b/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
index 93ac18b78..75e8dd69d 100644
--- a/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
+++ b/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
@@ -405,8 +405,8 @@ This fetcher supports the following parameters:
- *"nobranch":* Tells the fetcher to not check the SHA validation for
the branch when set to "1". The default is "0". Set this option for
- the recipe that refers to the commit that is valid for a tag instead
- of the branch.
+ the recipe that refers to the commit that is valid for any namespace
+ (branch, tag, ...) instead of the branch.
- *"bareclone":* Tells the fetcher to clone a bare clone into the
destination directory without checking out a working tree. Only the
diff --git a/lib/bb/__init__.py b/lib/bb/__init__.py
index c98e23ce3..ba8039497 100644
--- a/lib/bb/__init__.py
+++ b/lib/bb/__init__.py
@@ -15,6 +15,13 @@ import sys
if sys.version_info < (3, 5, 0):
raise RuntimeError("Sorry, python 3.5.0 or later is required for this version of bitbake")
+if sys.version_info < (3, 10, 0):
+ # With python 3.8 and 3.9, we see errors of "libgcc_s.so.1 must be installed for pthread_cancel to work"
+ # https://stackoverflow.com/questions/64797838/libgcc-s-so-1-must-be-installed-for-pthread-cancel-to-work
+ # https://bugs.ams1.psf.io/issue42888
+ # so ensure libgcc_s is loaded early on
+ import ctypes
+ libgcc_s = ctypes.CDLL('libgcc_s.so.1')
class BBHandledException(Exception):
"""
diff --git a/lib/bb/build.py b/lib/bb/build.py
index 23b6ee455..aaada8a18 100644
--- a/lib/bb/build.py
+++ b/lib/bb/build.py
@@ -27,6 +27,9 @@ from bb import data, event, utils
bblogger = logging.getLogger('BitBake')
logger = logging.getLogger('BitBake.Build')
+verboseShellLogging = False
+verboseStdoutLogging = False
+
__mtime_cache = {}
def cached_mtime_noerror(f):
@@ -290,8 +293,8 @@ def exec_func_python(func, d, runfile, cwd=None):
lineno = int(d.getVarFlag(func, "lineno", False))
bb.methodpool.insert_method(func, text, fn, lineno - 1)
- comp = utils.better_compile(code, func, "exec_python_func() autogenerated")
- utils.better_exec(comp, {"d": d}, code, "exec_python_func() autogenerated")
+ comp = utils.better_compile(code, func, "exec_func_python() autogenerated")
+ utils.better_exec(comp, {"d": d}, code, "exec_func_python() autogenerated")
finally:
bb.debug(2, "Python function %s finished" % func)
@@ -371,7 +374,7 @@ def exec_func_shell(func, d, runfile, cwd=None):
bb.data.emit_func(func, script, d)
- if bb.msg.loggerVerboseLogs:
+ if verboseShellLogging or bb.utils.to_boolean(d.getVar("BB_VERBOSE_LOGS", False)):
script.write("set -x\n")
if cwd:
script.write("cd '%s'\n" % cwd)
@@ -391,7 +394,7 @@ exit $ret
if fakerootcmd:
cmd = [fakerootcmd, runfile]
- if bb.msg.loggerDefaultVerbose:
+ if verboseStdoutLogging:
logfile = LogTee(logger, StdoutNoopContextManager())
else:
logfile = StdoutNoopContextManager()
@@ -587,11 +590,15 @@ def _exec_task(fn, task, d, quieterr):
except bb.BBHandledException:
event.fire(TaskFailed(task, fn, logfn, localdata, True), localdata)
return 1
- except Exception as exc:
+ except (Exception, SystemExit) as exc:
if quieterr:
event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata)
else:
errprinted = errchk.triggered
+ # If the output is already on stdout, we've printed the information in the
+ # logs once already so don't duplicate
+ if verboseStdoutLogging:
+ errprinted = True
logger.error(str(exc))
event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata)
return 1
@@ -901,6 +908,8 @@ def tasksbetween(task_start, task_end, d):
def follow_chain(task, endtask, chain=None):
if not chain:
chain = []
+ if task in chain:
+ bb.fatal("Circular task dependencies as %s depends on itself via the chain %s" % (task, " -> ".join(chain)))
chain.append(task)
for othertask in tasks:
if othertask == task:
diff --git a/lib/bb/command.py b/lib/bb/command.py
index 6abf38668..b8429b277 100644
--- a/lib/bb/command.py
+++ b/lib/bb/command.py
@@ -20,6 +20,7 @@ Commands are queued in a CommandQueue
from collections import OrderedDict, defaultdict
+import io
import bb.event
import bb.cooker
import bb.remotedata
@@ -74,8 +75,12 @@ class Command:
result = command_method(self, commandline)
except CommandError as exc:
return None, exc.args[0]
- except (Exception, SystemExit):
+ except (Exception, SystemExit) as exc:
import traceback
+ if isinstance(exc, bb.BBHandledException):
+ # We need to start returning real exceptions here. Until we do, we can't
+ # tell if an exception is an instance of bb.BBHandledException
+ return None, "bb.BBHandledException()\n" + traceback.format_exc()
return None, traceback.format_exc()
else:
return result, None
@@ -474,6 +479,17 @@ class CommandsSync:
d = command.remotedatastores[dsindex].varhistory
return getattr(d, method)(*args, **kwargs)
+ def dataStoreConnectorVarHistCmdEmit(self, command, params):
+ dsindex = params[0]
+ var = params[1]
+ oval = params[2]
+ val = params[3]
+ d = command.remotedatastores[params[4]]
+
+ o = io.StringIO()
+ command.remotedatastores[dsindex].varhistory.emit(var, oval, val, o, d)
+ return o.getvalue()
+
def dataStoreConnectorIncHistCmd(self, command, params):
dsindex = params[0]
method = params[1]
@@ -620,6 +636,16 @@ class CommandsAsync:
command.finishAsyncCommand()
findFilesMatchingInDir.needcache = False
+ def testCookerCommandEvent(self, command, params):
+ """
+ Dummy command used by OEQA selftest to test tinfoil without IO
+ """
+ pattern = params[0]
+
+ command.cooker.testCookerCommandEvent(pattern)
+ command.finishAsyncCommand()
+ testCookerCommandEvent.needcache = False
+
def findConfigFilePath(self, command, params):
"""
Find the path of the requested configuration file
diff --git a/lib/bb/compat.py b/lib/bb/compat.py
deleted file mode 100644
index 49356681a..000000000
--- a/lib/bb/compat.py
+++ /dev/null
@@ -1,10 +0,0 @@
-#
-# SPDX-License-Identifier: GPL-2.0-only
-#
-
-"""Code pulled from future python versions, here for compatibility"""
-
-from collections import MutableMapping, KeysView, ValuesView, ItemsView, OrderedDict
-from functools import total_ordering
-
-
diff --git a/lib/bb/cooker.py b/lib/bb/cooker.py
index 11cc2b954..6743bce58 100644
--- a/lib/bb/cooker.py
+++ b/lib/bb/cooker.py
@@ -13,7 +13,6 @@ import sys, os, glob, os.path, re, time
import itertools
import logging
import multiprocessing
-import sre_constants
import threading
from io import StringIO, UnsupportedOperation
from contextlib import closing
@@ -411,10 +410,7 @@ class BBCooker:
self.data.disableTracking()
def parseConfiguration(self):
- # Set log file verbosity
- verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
- if verboselogs:
- bb.msg.loggerVerboseLogs = True
+ self.updateCacheSync()
# Change nice level if we're asked to
nice = self.data.getVar("BB_NICE_LEVEL")
@@ -1022,6 +1018,11 @@ class BBCooker:
if matches:
bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
+ def testCookerCommandEvent(self, filepattern):
+ # Dummy command used by OEQA selftest to test tinfoil without IO
+ matches = ["A", "B"]
+ bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
+
def findProviders(self, mc=''):
return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
@@ -1636,6 +1637,7 @@ class BBCooker:
return
def post_serve(self):
+ self.shutdown(force=True)
prserv.serv.auto_shutdown()
if self.hashserv:
self.hashserv.process.terminate()
@@ -1650,6 +1652,7 @@ class BBCooker:
if self.parser:
self.parser.shutdown(clean=not force, force=force)
+ self.parser.final_cleanup()
def finishcommand(self):
self.state = state.initial
@@ -1791,7 +1794,7 @@ class CookerCollectFiles(object):
try:
re.compile(mask)
bbmasks.append(mask)
- except sre_constants.error:
+ except re.error:
collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
# Then validate the combined regular expressions. This should never
@@ -1799,7 +1802,7 @@ class CookerCollectFiles(object):
bbmask = "|".join(bbmasks)
try:
bbmask_compiled = re.compile(bbmask)
- except sre_constants.error:
+ except re.error:
collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
bbmask = None
@@ -1931,7 +1934,8 @@ class Parser(multiprocessing.Process):
except queue.Empty:
pass
else:
- self.results.cancel_join_thread()
+ self.results.close()
+ self.results.join_thread()
break
if pending:
@@ -1940,6 +1944,8 @@ class Parser(multiprocessing.Process):
try:
job = self.jobs.pop()
except IndexError:
+ self.results.close()
+ self.results.join_thread()
break
result = self.parse(*job)
# Clear the siggen cache after parsing to control memory usage, its huge
@@ -2015,6 +2021,7 @@ class CookerParser(object):
self.start()
self.haveshutdown = False
+ self.syncthread = None
def start(self):
self.results = self.load_cached()
@@ -2056,12 +2063,9 @@ class CookerParser(object):
self.total)
bb.event.fire(event, self.cfgdata)
- for process in self.processes:
- self.parser_quit.put(None)
- else:
- self.parser_quit.cancel_join_thread()
- for process in self.processes:
- self.parser_quit.put(None)
+
+ for process in self.processes:
+ self.parser_quit.put(None)
# Cleanup the queue before call process.join(), otherwise there might be
# deadlocks.
@@ -2078,9 +2082,13 @@ class CookerParser(object):
else:
process.join()
+ self.parser_quit.close()
+ # Allow data left in the cancel queue to be discarded
+ self.parser_quit.cancel_join_thread()
+
sync = threading.Thread(target=self.bb_cache.sync)
+ self.syncthread = sync
sync.start()
- multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
bb.codeparser.parser_cache_savemerge()
bb.fetch.fetcher_parse_done()
if self.cooker.configuration.profile:
@@ -2094,6 +2102,10 @@ class CookerParser(object):
bb.utils.process_profilelog(profiles, pout = pout)
print("Processed parsing statistics saved to %s" % (pout))
+ def final_cleanup(self):
+ if self.syncthread:
+ self.syncthread.join()
+
def load_cached(self):
for filename, appends in self.fromcache:
cached, infos = self.bb_cache.load(filename, appends)
diff --git a/lib/bb/cookerdata.py b/lib/bb/cookerdata.py
index 472423fdc..30727bf2e 100644
--- a/lib/bb/cookerdata.py
+++ b/lib/bb/cookerdata.py
@@ -58,11 +58,14 @@ class ConfigParameters(object):
def updateToServer(self, server, environment):
options = {}
for o in ["abort", "force", "invalidate_stamp",
- "verbose", "debug", "dry_run", "dump_signatures",
+ "debug", "dry_run", "dump_signatures",
"debug_domains", "extra_assume_provided", "profile",
"prefile", "postfile", "server_timeout"]:
options[o] = getattr(self.options, o)
+ options['build_verbose_shell'] = self.options.verbose
+ options['build_verbose_stdout'] = self.options.verbose
+
ret, error = server.runCommand(["updateConfig", options, environment, sys.argv])
if error:
raise Exception("Unable to update the server configuration with local parameters: %s" % error)
@@ -125,6 +128,8 @@ class CookerConfiguration(object):
self.skipsetscene = False
self.invalidate_stamp = False
self.dump_signatures = []
+ self.build_verbose_shell = False
+ self.build_verbose_stdout = False
self.dry_run = False
self.tracking = False
self.xmlrpcinterface = []
@@ -297,6 +302,8 @@ class CookerDataBuilder(object):
multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split()
for config in multiconfig:
+ if config[0].isdigit():
+ bb.fatal("Multiconfig name '%s' is invalid as multiconfigs cannot start with a digit" % config)
mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config)
bb.event.fire(bb.event.ConfigParsed(), mcdata)
self.mcdata[config] = mcdata
@@ -348,6 +355,9 @@ class CookerDataBuilder(object):
layers = (data.getVar('BBLAYERS') or "").split()
broken_layers = []
+ if not layers:
+ bb.fatal("The bblayers.conf file doesn't contain any BBLAYERS definition")
+
data = bb.data.createCopy(data)
approved = bb.utils.approved_variables()
@@ -399,6 +409,8 @@ class CookerDataBuilder(object):
if c in collections_tmp:
bb.fatal("Found duplicated BBFILE_COLLECTIONS '%s', check bblayers.conf or layer.conf to fix it." % c)
compat = set((data.getVar("LAYERSERIES_COMPAT_%s" % c) or "").split())
+ if compat and not layerseries:
+ bb.fatal("No core layer found to work with layer '%s'. Missing entry in bblayers.conf?" % c)
if compat and not (compat & layerseries):
bb.fatal("Layer %s is not compatible with the core layer which only supports these series: %s (layer is compatible with %s)"
% (c, " ".join(layerseries), " ".join(compat)))
diff --git a/lib/bb/data.py b/lib/bb/data.py
index b0683c518..1d21e00a1 100644
--- a/lib/bb/data.py
+++ b/lib/bb/data.py
@@ -301,6 +301,7 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
value += "\n_remove of %s" % r
deps |= r2.references
deps = deps | (keys & r2.execs)
+ value = handle_contains(value, r2.contains, d)
return value
if "vardepvalue" in varflags:
diff --git a/lib/bb/data_smart.py b/lib/bb/data_smart.py
index 1d8774ee5..c46d3f0a0 100644
--- a/lib/bb/data_smart.py
+++ b/lib/bb/data_smart.py
@@ -17,7 +17,7 @@ BitBake build tools.
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
import copy, re, sys, traceback
-from collections import MutableMapping
+from collections.abc import MutableMapping
import logging
import hashlib
import bb, bb.codeparser
@@ -28,7 +28,7 @@ logger = logging.getLogger("BitBake.Data")
__setvar_keyword__ = ["_append", "_prepend", "_remove"]
__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>[^A-Z]*))?$')
-__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~]+?}")
+__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~:]+?}")
__expand_python_regexp__ = re.compile(r"\${@.+?}")
__whitespace_split__ = re.compile(r'(\s)')
__override_regexp__ = re.compile(r'[a-z0-9]+')
@@ -403,7 +403,7 @@ class DataSmart(MutableMapping):
s = __expand_python_regexp__.sub(varparse.python_sub, s)
except SyntaxError as e:
# Likely unmatched brackets, just don't expand the expression
- if e.msg != "EOL while scanning string literal":
+ if e.msg != "EOL while scanning string literal" and not e.msg.startswith("unterminated string literal"):
raise
if s == olds:
break
@@ -411,6 +411,8 @@ class DataSmart(MutableMapping):
raise
except bb.parse.SkipRecipe:
raise
+ except bb.BBHandledException:
+ raise
except Exception as exc:
tb = sys.exc_info()[2]
raise ExpansionError(varname, s, exc).with_traceback(tb) from exc
@@ -481,6 +483,7 @@ class DataSmart(MutableMapping):
def setVar(self, var, value, **loginfo):
#print("var=" + str(var) + " val=" + str(value))
+ var = var.replace(":", "_")
self.expand_cache = {}
parsing=False
if 'parsing' in loginfo:
@@ -589,6 +592,8 @@ class DataSmart(MutableMapping):
"""
Rename the variable key to newkey
"""
+ key = key.replace(":", "_")
+ newkey = newkey.replace(":", "_")
if key == newkey:
bb.warn("Calling renameVar with equivalent keys (%s) is invalid" % key)
return
@@ -637,6 +642,7 @@ class DataSmart(MutableMapping):
self.setVar(var + "_prepend", value, ignore=True, parsing=True)
def delVar(self, var, **loginfo):
+ var = var.replace(":", "_")
self.expand_cache = {}
loginfo['detail'] = ""
@@ -664,6 +670,7 @@ class DataSmart(MutableMapping):
override = None
def setVarFlag(self, var, flag, value, **loginfo):
+ var = var.replace(":", "_")
self.expand_cache = {}
if 'op' not in loginfo:
@@ -687,6 +694,7 @@ class DataSmart(MutableMapping):
self.dict["__exportlist"]["_content"].add(var)
def getVarFlag(self, var, flag, expand=True, noweakdefault=False, parsing=False, retparser=False):
+ var = var.replace(":", "_")
if flag == "_content":
cachename = var
else:
@@ -814,6 +822,7 @@ class DataSmart(MutableMapping):
return value
def delVarFlag(self, var, flag, **loginfo):
+ var = var.replace(":", "_")
self.expand_cache = {}
local_var, _ = self._findVar(var)
@@ -831,6 +840,7 @@ class DataSmart(MutableMapping):
del self.dict[var][flag]
def appendVarFlag(self, var, flag, value, **loginfo):
+ var = var.replace(":", "_")
loginfo['op'] = 'append'
loginfo['flag'] = flag
self.varhistory.record(**loginfo)
@@ -838,6 +848,7 @@ class DataSmart(MutableMapping):
self.setVarFlag(var, flag, newvalue, ignore=True)
def prependVarFlag(self, var, flag, value, **loginfo):
+ var = var.replace(":", "_")
loginfo['op'] = 'prepend'
loginfo['flag'] = flag
self.varhistory.record(**loginfo)
@@ -845,6 +856,7 @@ class DataSmart(MutableMapping):
self.setVarFlag(var, flag, newvalue, ignore=True)
def setVarFlags(self, var, flags, **loginfo):
+ var = var.replace(":", "_")
self.expand_cache = {}
infer_caller_details(loginfo)
if not var in self.dict:
@@ -859,6 +871,7 @@ class DataSmart(MutableMapping):
self.dict[var][i] = flags[i]
def getVarFlags(self, var, expand = False, internalflags=False):
+ var = var.replace(":", "_")
local_var, _ = self._findVar(var)
flags = {}
@@ -875,6 +888,7 @@ class DataSmart(MutableMapping):
def delVarFlags(self, var, **loginfo):
+ var = var.replace(":", "_")
self.expand_cache = {}
if not var in self.dict:
self._makeShadowCopy(var)
diff --git a/lib/bb/event.py b/lib/bb/event.py
index d1359f010..cb0b3b334 100644
--- a/lib/bb/event.py
+++ b/lib/bb/event.py
@@ -10,17 +10,17 @@ BitBake build tools.
# SPDX-License-Identifier: GPL-2.0-only
#
-import sys
-import pickle
-import logging
-import atexit
-import traceback
import ast
+import atexit
+import collections
+import logging
+import pickle
+import sys
import threading
+import traceback
-import bb.utils
-import bb.compat
import bb.exceptions
+import bb.utils
# This is the pid for which we should generate the event. This is set when
# the runqueue forks off.
@@ -56,7 +56,7 @@ def set_class_handlers(h):
_handlers = h
def clean_class_handlers():
- return bb.compat.OrderedDict()
+ return collections.OrderedDict()
# Internal
_handlers = clean_class_handlers()
diff --git a/lib/bb/fetch2/__init__.py b/lib/bb/fetch2/__init__.py
index dc99914cd..3e6555bd6 100644
--- a/lib/bb/fetch2/__init__.py
+++ b/lib/bb/fetch2/__init__.py
@@ -562,6 +562,9 @@ def verify_checksum(ud, d, precomputed={}):
checksum_expected = getattr(ud, "%s_expected" % checksum_id)
+ if checksum_expected == '':
+ checksum_expected = None
+
return {
"id": checksum_id,
"name": checksum_name,
@@ -612,7 +615,7 @@ def verify_checksum(ud, d, precomputed={}):
for ci in checksum_infos:
if ci["expected"] and ci["expected"] != ci["data"]:
- messages.append("File: '%s' has %s checksum %s when %s was " \
+ messages.append("File: '%s' has %s checksum '%s' when '%s' was " \
"expected" % (ud.localpath, ci["id"], ci["data"], ci["expected"]))
bad_checksum = ci["data"]
diff --git a/lib/bb/fetch2/git.py b/lib/bb/fetch2/git.py
index 8740e9c05..cad1ae820 100644
--- a/lib/bb/fetch2/git.py
+++ b/lib/bb/fetch2/git.py
@@ -44,7 +44,8 @@ Supported SRC_URI options are:
- nobranch
Don't check the SHA validation for branch. set this option for the recipe
- referring to commit which is valid in tag instead of branch.
+ referring to commit which is valid in any namespace (branch, tag, ...)
+ instead of branch.
The default is "0", set nobranch=1 if needed.
- usehead
@@ -63,10 +64,12 @@ import errno
import fnmatch
import os
import re
+import shlex
import subprocess
import tempfile
import bb
import bb.progress
+from contextlib import contextmanager
from bb.fetch2 import FetchMethod
from bb.fetch2 import runfetchcmd
from bb.fetch2 import logger
@@ -140,6 +143,10 @@ class Git(FetchMethod):
ud.proto = 'file'
else:
ud.proto = "git"
+ if ud.host == "github.com" and ud.proto == "git":
+ # github stopped supporting git protocol
+ # https://github.blog/2021-09-01-improving-git-protocol-security-github/#no-more-unauthenticated-git
+ ud.proto = "https"
if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
@@ -219,7 +226,12 @@ class Git(FetchMethod):
ud.shallow = False
if ud.usehead:
- ud.unresolvedrev['default'] = 'HEAD'
+ # When usehead is set let's associate 'HEAD' with the unresolved
+ # rev of this repository. This will get resolved into a revision
+ # later. If an actual revision happens to have also been provided
+ # then this setting will be overridden.
+ for name in ud.names:
+ ud.unresolvedrev[name] = 'HEAD'
ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0"
@@ -342,7 +354,7 @@ class Git(FetchMethod):
# We do this since git will use a "-l" option automatically for local urls where possible
if repourl.startswith("file://"):
repourl = repourl[7:]
- clone_cmd = "LANG=C %s clone --bare --mirror \"%s\" %s --progress" % (ud.basecmd, repourl, ud.clonedir)
+ clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, shlex.quote(repourl), ud.clonedir)
if ud.proto.lower() != 'file':
bb.fetch2.check_network_access(d, clone_cmd, ud.url)
progresshandler = GitProgressHandler(d)
@@ -354,8 +366,12 @@ class Git(FetchMethod):
if "origin" in output:
runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir)
- runfetchcmd("%s remote add --mirror=fetch origin \"%s\"" % (ud.basecmd, repourl), d, workdir=ud.clonedir)
- fetch_cmd = "LANG=C %s fetch -f --progress \"%s\" refs/*:refs/*" % (ud.basecmd, repourl)
+ runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=ud.clonedir)
+
+ if ud.nobranch:
+ fetch_cmd = "LANG=C %s fetch -f --progress %s refs/*:refs/*" % (ud.basecmd, shlex.quote(repourl))
+ else:
+ fetch_cmd = "LANG=C %s fetch -f --progress %s refs/heads/*:refs/heads/* refs/tags/*:refs/tags/*" % (ud.basecmd, shlex.quote(repourl))
if ud.proto.lower() != 'file':
bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
progresshandler = GitProgressHandler(d)
@@ -388,7 +404,7 @@ class Git(FetchMethod):
tmpdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
try:
# Do the checkout. This implicitly involves a Git LFS fetch.
- self.unpack(ud, tmpdir, d)
+ Git.unpack(self, ud, tmpdir, d)
# Scoop up a copy of any stuff that Git LFS downloaded. Merge them into
# the bare clonedir.
@@ -408,6 +424,20 @@ class Git(FetchMethod):
bb.utils.remove(tmpdir, recurse=True)
def build_mirror_data(self, ud, d):
+
+ # Create as a temp file and move atomically into position to avoid races
+ @contextmanager
+ def create_atomic(filename):
+ fd, tfile = tempfile.mkstemp(dir=os.path.dirname(filename))
+ try:
+ yield tfile
+ umask = os.umask(0o666)
+ os.umask(umask)
+ os.chmod(tfile, (0o666 & ~umask))
+ os.rename(tfile, filename)
+ finally:
+ os.close(fd)
+
if ud.shallow and ud.write_shallow_tarballs:
if not os.path.exists(ud.fullshallow):
if os.path.islink(ud.fullshallow):
@@ -418,7 +448,8 @@ class Git(FetchMethod):
self.clone_shallow_local(ud, shallowclone, d)
logger.info("Creating tarball of git repository")
- runfetchcmd("tar -czf %s ." % ud.fullshallow, d, workdir=shallowclone)
+ with create_atomic(ud.fullshallow) as tfile:
+ runfetchcmd("tar -czf %s ." % tfile, d, workdir=shallowclone)
runfetchcmd("touch %s.done" % ud.fullshallow, d)
finally:
bb.utils.remove(tempdir, recurse=True)
@@ -427,7 +458,8 @@ class Git(FetchMethod):
os.unlink(ud.fullmirror)
logger.info("Creating tarball of git repository")
- runfetchcmd("tar -czf %s ." % ud.fullmirror, d, workdir=ud.clonedir)
+ with create_atomic(ud.fullmirror) as tfile:
+ runfetchcmd("tar -czf %s ." % tfile, d, workdir=ud.clonedir)
runfetchcmd("touch %s.done" % ud.fullmirror, d)
def clone_shallow_local(self, ud, dest, d):
@@ -533,7 +565,7 @@ class Git(FetchMethod):
raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url)
repourl = self._get_repo_url(ud)
- runfetchcmd("%s remote set-url origin \"%s\"" % (ud.basecmd, repourl), d, workdir=destdir)
+ runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=destdir)
if self._contains_lfs(ud, d, destdir):
if need_lfs and not self._find_git_lfs(d):
@@ -661,8 +693,8 @@ class Git(FetchMethod):
d.setVar('_BB_GIT_IN_LSREMOTE', '1')
try:
repourl = self._get_repo_url(ud)
- cmd = "%s ls-remote \"%s\" %s" % \
- (ud.basecmd, repourl, search)
+ cmd = "%s ls-remote %s %s" % \
+ (ud.basecmd, shlex.quote(repourl), search)
if ud.proto.lower() != 'file':
bb.fetch2.check_network_access(d, cmd, repourl)
output = runfetchcmd(cmd, d, True)
diff --git a/lib/bb/fetch2/wget.py b/lib/bb/fetch2/wget.py
index f7d1de26b..368c64433 100644
--- a/lib/bb/fetch2/wget.py
+++ b/lib/bb/fetch2/wget.py
@@ -52,6 +52,12 @@ class WgetProgressHandler(bb.progress.LineFilterProgressHandler):
class Wget(FetchMethod):
+
+ # CDNs like CloudFlare may do a 'browser integrity test' which can fail
+ # with the standard wget/urllib User-Agent, so pretend to be a modern
+ # browser.
+ user_agent = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0"
+
"""Class to fetch urls via 'wget'"""
def supports(self, ud, d):
"""
@@ -91,10 +97,9 @@ class Wget(FetchMethod):
fetchcmd = self.basecmd
- if 'downloadfilename' in ud.parm:
- localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile)
- bb.utils.mkdirhier(os.path.dirname(localpath))
- fetchcmd += " -O %s" % shlex.quote(localpath)
+ localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) + ".tmp"
+ bb.utils.mkdirhier(os.path.dirname(localpath))
+ fetchcmd += " -O %s" % shlex.quote(localpath)
if ud.user and ud.pswd:
fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (ud.user, ud.pswd)
@@ -108,6 +113,10 @@ class Wget(FetchMethod):
self._runwget(ud, d, fetchcmd, False)
+ # Remove the ".tmp" and move the file into position atomically
+ # Our lock prevents multiple writers but mirroring code may grab incomplete files
+ os.rename(localpath, localpath[:-4])
+
# Sanity check since wget can pretend it succeed when it didn't
# Also, this used to happen if sourceforge sent us to the mirror page
if not os.path.exists(ud.localpath):
@@ -300,7 +309,7 @@ class Wget(FetchMethod):
# Some servers (FusionForge, as used on Alioth) require that the
# optional Accept header is set.
r.add_header("Accept", "*/*")
- r.add_header("User-Agent", "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12")
+ r.add_header("User-Agent", self.user_agent)
def add_basic_auth(login_str, request):
'''Adds Basic auth to http request, pass in login:password as string'''
import base64
@@ -319,7 +328,7 @@ class Wget(FetchMethod):
except (TypeError, ImportError, IOError, netrc.NetrcParseError):
pass
- with opener.open(r) as response:
+ with opener.open(r, timeout=30) as response:
pass
except urllib.error.URLError as e:
if try_again:
@@ -404,9 +413,8 @@ class Wget(FetchMethod):
"""
f = tempfile.NamedTemporaryFile()
with tempfile.TemporaryDirectory(prefix="wget-index-") as workdir, tempfile.NamedTemporaryFile(dir=workdir, prefix="wget-listing-") as f:
- agent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12"
fetchcmd = self.basecmd
- fetchcmd += " -O " + f.name + " --user-agent='" + agent + "' '" + uri + "'"
+ fetchcmd += " -O " + f.name + " --user-agent='" + self.user_agent + "' '" + uri + "'"
try:
self._runwget(ud, d, fetchcmd, True, workdir=workdir)
fetchresult = f.read()
diff --git a/lib/bb/monitordisk.py b/lib/bb/monitordisk.py
index e7c07264a..4d243af30 100644
--- a/lib/bb/monitordisk.py
+++ b/lib/bb/monitordisk.py
@@ -229,9 +229,10 @@ class diskMonitor:
freeInode = st.f_favail
if minInode and freeInode < minInode:
- # Some filesystems use dynamic inodes so can't run out
- # (e.g. btrfs). This is reported by the inode count being 0.
- if st.f_files == 0:
+ # Some filesystems use dynamic inodes so can't run out.
+ # This is reported by the inode count being 0 (btrfs) or the free
+ # inode count being -1 (cephfs).
+ if st.f_files == 0 or st.f_favail == -1:
self.devDict[k][2] = None
continue
# Always show warning, the self.checked would always be False if the action is WARN
diff --git a/lib/bb/msg.py b/lib/bb/msg.py
index 2d88c4e72..1b1a23bb5 100644
--- a/lib/bb/msg.py
+++ b/lib/bb/msg.py
@@ -146,18 +146,12 @@ class LogFilterLTLevel(logging.Filter):
#
loggerDefaultLogLevel = BBLogFormatter.NOTE
-loggerDefaultVerbose = False
-loggerVerboseLogs = False
loggerDefaultDomains = {}
def init_msgconfig(verbose, debug, debug_domains=None):
"""
Set default verbosity and debug levels config the logger
"""
- bb.msg.loggerDefaultVerbose = verbose
- if verbose:
- bb.msg.loggerVerboseLogs = True
-
if debug:
bb.msg.loggerDefaultLogLevel = BBLogFormatter.DEBUG - debug + 1
elif verbose:
diff --git a/lib/bb/parse/ast.py b/lib/bb/parse/ast.py
index eb8cfa21b..9f46f3f35 100644
--- a/lib/bb/parse/ast.py
+++ b/lib/bb/parse/ast.py
@@ -97,6 +97,7 @@ class DataNode(AstNode):
def eval(self, data):
groupd = self.groupd
key = groupd["var"]
+ key = key.replace(":", "_")
loginfo = {
'variable': key,
'file': self.filename,
@@ -207,6 +208,7 @@ class ExportFuncsNode(AstNode):
def eval(self, data):
for func in self.n:
+ func = func.replace(":", "_")
calledfunc = self.classname + "_" + func
if data.getVar(func, False) and not data.getVarFlag(func, 'export_func', False):
diff --git a/lib/bb/parse/parse_py/BBHandler.py b/lib/bb/parse/parse_py/BBHandler.py
index 6e216effb..8781129fc 100644
--- a/lib/bb/parse/parse_py/BBHandler.py
+++ b/lib/bb/parse/parse_py/BBHandler.py
@@ -22,7 +22,7 @@ from .ConfHandler import include, init
# For compatibility
bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"])
-__func_start_regexp__ = re.compile(r"(((?P<py>python)|(?P<fr>fakeroot))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" )
+__func_start_regexp__ = re.compile(r"(((?P<py>python(?=(\s|\()))|(?P<fr>fakeroot(?=\s)))\s*)*(?P<func>[\w\.\-\+\{\}\$:]+)?\s*\(\s*\)\s*{$" )
__inherit_regexp__ = re.compile(r"inherit\s+(.+)" )
__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" )
__addtask_regexp__ = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*")
diff --git a/lib/bb/parse/parse_py/ConfHandler.py b/lib/bb/parse/parse_py/ConfHandler.py
index af64d3446..a7e81bd6a 100644
--- a/lib/bb/parse/parse_py/ConfHandler.py
+++ b/lib/bb/parse/parse_py/ConfHandler.py
@@ -20,7 +20,7 @@ from bb.parse import ParseError, resolve_file, ast, logger, handle
__config_regexp__ = re.compile( r"""
^
(?P<exp>export\s+)?
- (?P<var>[a-zA-Z0-9\-_+.${}/~]+?)
+ (?P<var>[a-zA-Z0-9\-_+.${}/~:]+?)
(\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?
\s* (
diff --git a/lib/bb/persist_data.py b/lib/bb/persist_data.py
index 7357ab2d4..56c983f81 100644
--- a/lib/bb/persist_data.py
+++ b/lib/bb/persist_data.py
@@ -12,14 +12,15 @@ currently, providing a key/value store accessed by 'domain'.
#
import collections
+import collections.abc
+import contextlib
+import functools
import logging
import os.path
+import sqlite3
import sys
import warnings
-from bb.compat import total_ordering
-from collections import Mapping
-import sqlite3
-import contextlib
+from collections.abc import Mapping
sqlversion = sqlite3.sqlite_version_info
if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
@@ -28,8 +29,8 @@ if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
logger = logging.getLogger("BitBake.PersistData")
-@total_ordering
-class SQLTable(collections.MutableMapping):
+@functools.total_ordering
+class SQLTable(collections.abc.MutableMapping):
class _Decorators(object):
@staticmethod
def retry(*, reconnect=True):
diff --git a/lib/bb/process.py b/lib/bb/process.py
index 2dc472a86..24c588e53 100644
--- a/lib/bb/process.py
+++ b/lib/bb/process.py
@@ -179,5 +179,8 @@ def run(cmd, input=None, log=None, extrafiles=None, **options):
stderr = stderr.decode("utf-8")
if pipe.returncode != 0:
+ if log:
+ # Don't duplicate the output in the exception if logging it
+ raise ExecutionError(cmd, pipe.returncode, None, None)
raise ExecutionError(cmd, pipe.returncode, stdout, stderr)
return stdout, stderr
diff --git a/lib/bb/providers.py b/lib/bb/providers.py
index 81459c36d..484e1ea4f 100644
--- a/lib/bb/providers.py
+++ b/lib/bb/providers.py
@@ -151,7 +151,7 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
if item:
itemstr = " (for item %s)" % item
if preferred_file is None:
- logger.info("preferred version %s of %s not available%s", pv_str, pn, itemstr)
+ logger.warning("preferred version %s of %s not available%s", pv_str, pn, itemstr)
available_vers = []
for file_set in pkg_pn:
for f in file_set:
@@ -163,7 +163,7 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
available_vers.append(ver_str)
if available_vers:
available_vers.sort()
- logger.info("versions of %s available: %s", pn, ' '.join(available_vers))
+ logger.warning("versions of %s available: %s", pn, ' '.join(available_vers))
else:
logger.debug(1, "selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
diff --git a/lib/bb/runqueue.py b/lib/bb/runqueue.py
index 2bb97b6eb..886eef1f2 100644
--- a/lib/bb/runqueue.py
+++ b/lib/bb/runqueue.py
@@ -24,6 +24,7 @@ import pickle
from multiprocessing import Process
import shlex
import pprint
+import time
bblogger = logging.getLogger("BitBake")
logger = logging.getLogger("BitBake.RunQueue")
@@ -142,6 +143,55 @@ class RunQueueScheduler(object):
self.buildable.append(tid)
self.rev_prio_map = None
+ self.is_pressure_usable()
+
+ def is_pressure_usable(self):
+ """
+ If monitoring pressure, return True if pressure files can be open and read. For example
+ openSUSE /proc/pressure/* files have readable file permissions but when read the error EOPNOTSUPP (Operation not supported)
+ is returned.
+ """
+ if self.rq.max_cpu_pressure or self.rq.max_io_pressure or self.rq.max_memory_pressure:
+ try:
+ with open("/proc/pressure/cpu") as cpu_pressure_fds, \
+ open("/proc/pressure/io") as io_pressure_fds, \
+ open("/proc/pressure/memory") as memory_pressure_fds:
+
+ self.prev_cpu_pressure = cpu_pressure_fds.readline().split()[4].split("=")[1]
+ self.prev_io_pressure = io_pressure_fds.readline().split()[4].split("=")[1]
+ self.prev_memory_pressure = memory_pressure_fds.readline().split()[4].split("=")[1]
+ self.prev_pressure_time = time.time()
+ self.check_pressure = True
+ except:
+ bb.note("The /proc/pressure files can't be read. Continuing build without monitoring pressure")
+ self.check_pressure = False
+ else:
+ self.check_pressure = False
+
+ def exceeds_max_pressure(self):
+ """
+ Monitor the difference in total pressure at least once per second, if
+ BB_PRESSURE_MAX_{CPU|IO|MEMORY} are set, return True if above threshold.
+ """
+ if self.check_pressure:
+ with open("/proc/pressure/cpu") as cpu_pressure_fds, \
+ open("/proc/pressure/io") as io_pressure_fds, \
+ open("/proc/pressure/memory") as memory_pressure_fds:
+ # extract "total" from /proc/pressure/{cpu|io}
+ curr_cpu_pressure = cpu_pressure_fds.readline().split()[4].split("=")[1]
+ curr_io_pressure = io_pressure_fds.readline().split()[4].split("=")[1]
+ curr_memory_pressure = memory_pressure_fds.readline().split()[4].split("=")[1]
+ exceeds_cpu_pressure = self.rq.max_cpu_pressure and (float(curr_cpu_pressure) - float(self.prev_cpu_pressure)) > self.rq.max_cpu_pressure
+ exceeds_io_pressure = self.rq.max_io_pressure and (float(curr_io_pressure) - float(self.prev_io_pressure)) > self.rq.max_io_pressure
+ exceeds_memory_pressure = self.rq.max_memory_pressure and (float(curr_memory_pressure) - float(self.prev_memory_pressure)) > self.rq.max_memory_pressure
+ now = time.time()
+ if now - self.prev_pressure_time > 1.0:
+ self.prev_cpu_pressure = curr_cpu_pressure
+ self.prev_io_pressure = curr_io_pressure
+ self.prev_memory_pressure = curr_memory_pressure
+ self.prev_pressure_time = now
+ return (exceeds_cpu_pressure or exceeds_io_pressure or exceeds_memory_pressure)
+ return False
def next_buildable_task(self):
"""
@@ -155,6 +205,12 @@ class RunQueueScheduler(object):
if not buildable:
return None
+ # Bitbake requires that at least one task be active. Only check for pressure if
+ # this is the case, otherwise the pressure limitation could result in no tasks
+ # being active and no new tasks started thereby, at times, breaking the scheduler.
+ if self.rq.stats.active and self.exceeds_max_pressure():
+ return None
+
# Filter out tasks that have a max number of threads that have been exceeded
skip_buildable = {}
for running in self.rq.runq_running.difference(self.rq.runq_complete):
@@ -1256,8 +1312,8 @@ class RunQueue:
"fakerootnoenv" : self.rqdata.dataCaches[mc].fakerootnoenv,
"sigdata" : bb.parse.siggen.get_taskdata(),
"logdefaultlevel" : bb.msg.loggerDefaultLogLevel,
- "logdefaultverbose" : bb.msg.loggerDefaultVerbose,
- "logdefaultverboselogs" : bb.msg.loggerVerboseLogs,
+ "build_verbose_shell" : self.cooker.configuration.build_verbose_shell,
+ "build_verbose_stdout" : self.cooker.configuration.build_verbose_stdout,
"logdefaultdomain" : bb.msg.loggerDefaultDomains,
"prhost" : self.cooker.prhost,
"buildname" : self.cfgData.getVar("BUILDNAME"),
@@ -1700,6 +1756,9 @@ class RunQueueExecute:
self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS") or 1)
self.scheduler = self.cfgData.getVar("BB_SCHEDULER") or "speed"
+ self.max_cpu_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_CPU")
+ self.max_io_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_IO")
+ self.max_memory_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_MEMORY")
self.sq_buildable = set()
self.sq_running = set()
@@ -1735,6 +1794,29 @@ class RunQueueExecute:
if self.number_tasks <= 0:
bb.fatal("Invalid BB_NUMBER_THREADS %s" % self.number_tasks)
+ lower_limit = 1.0
+ upper_limit = 1000000.0
+ if self.max_cpu_pressure:
+ self.max_cpu_pressure = float(self.max_cpu_pressure)
+ if self.max_cpu_pressure < lower_limit:
+ bb.fatal("Invalid BB_PRESSURE_MAX_CPU %s, minimum value is %s." % (self.max_cpu_pressure, lower_limit))
+ if self.max_cpu_pressure > upper_limit:
+ bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_CPU is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_cpu_pressure))
+
+ if self.max_io_pressure:
+ self.max_io_pressure = float(self.max_io_pressure)
+ if self.max_io_pressure < lower_limit:
+ bb.fatal("Invalid BB_PRESSURE_MAX_IO %s, minimum value is %s." % (self.max_io_pressure, lower_limit))
+ if self.max_io_pressure > upper_limit:
+ bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_IO is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_io_pressure))
+
+ if self.max_memory_pressure:
+ self.max_memory_pressure = float(self.max_memory_pressure)
+ if self.max_memory_pressure < lower_limit:
+ bb.fatal("Invalid BB_PRESSURE_MAX_MEMORY %s, minimum value is %s." % (self.max_memory_pressure, lower_limit))
+ if self.max_memory_pressure > upper_limit:
+ bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_MEMORY is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_io_pressure))
+
# List of setscene tasks which we've covered
self.scenequeue_covered = set()
# List of tasks which are covered (including setscene ones)
@@ -1893,6 +1975,20 @@ class RunQueueExecute:
self.setbuildable(revdep)
logger.debug(1, "Marking task %s as buildable", revdep)
+ found = None
+ for t in sorted(self.sq_deferred.copy()):
+ if self.sq_deferred[t] == task:
+ # Allow the next deferred task to run. Any other deferred tasks should be deferred after that task.
+ # We shouldn't allow all to run at once as it is prone to races.
+ if not found:
+ bb.note("Deferred task %s now buildable" % t)
+ del self.sq_deferred[t]
+ update_scenequeue_data([t], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False)
+ found = t
+ else:
+ bb.note("Deferring %s after %s" % (t, found))
+ self.sq_deferred[t] = found
+
def task_complete(self, task):
self.stats.taskCompleted()
bb.event.fire(runQueueTaskCompleted(task, self.stats, self.rq), self.cfgData)
@@ -2002,8 +2098,6 @@ class RunQueueExecute:
logger.debug(1, "%s didn't become valid, skipping setscene" % nexttask)
self.sq_task_failoutright(nexttask)
return True
- else:
- self.sqdata.outrightfail.remove(nexttask)
if nexttask in self.sqdata.outrightfail:
logger.debug(2, 'No package found, so skipping setscene task %s', nexttask)
self.sq_task_failoutright(nexttask)
@@ -2154,7 +2248,8 @@ class RunQueueExecute:
if self.sq_deferred:
tid = self.sq_deferred.pop(list(self.sq_deferred.keys())[0])
logger.warning("Runqeueue deadlocked on deferred tasks, forcing task %s" % tid)
- self.sq_task_failoutright(tid)
+ if tid not in self.runq_complete:
+ self.sq_task_failoutright(tid)
return True
if len(self.failed_tids) != 0:
@@ -2268,10 +2363,16 @@ class RunQueueExecute:
self.updated_taskhash_queue.remove((tid, unihash))
if unihash != self.rqdata.runtaskentries[tid].unihash:
- hashequiv_logger.verbose("Task %s unihash changed to %s" % (tid, unihash))
- self.rqdata.runtaskentries[tid].unihash = unihash
- bb.parse.siggen.set_unihash(tid, unihash)
- toprocess.add(tid)
+ # Make sure we rehash any other tasks with the same task hash that we're deferred against.
+ torehash = [tid]
+ for deftid in self.sq_deferred:
+ if self.sq_deferred[deftid] == tid:
+ torehash.append(deftid)
+ for hashtid in torehash:
+ hashequiv_logger.verbose("Task %s unihash changed to %s" % (hashtid, unihash))
+ self.rqdata.runtaskentries[hashtid].unihash = unihash
+ bb.parse.siggen.set_unihash(hashtid, unihash)
+ toprocess.add(hashtid)
# Work out all tasks which depend upon these
total = set()
@@ -2410,6 +2511,14 @@ class RunQueueExecute:
if update_tasks:
self.sqdone = False
+ for mc in sorted(self.sqdata.multiconfigs):
+ for tid in sorted([t[0] for t in update_tasks]):
+ if mc_from_tid(tid) != mc:
+ continue
+ h = pending_hash_index(tid, self.rqdata)
+ if h in self.sqdata.hashes and tid != self.sqdata.hashes[h]:
+ self.sq_deferred[tid] = self.sqdata.hashes[h]
+ bb.note("Deferring %s after %s" % (tid, self.sqdata.hashes[h]))
update_scenequeue_data([t[0] for t in update_tasks], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False)
for (tid, harddepfail, origvalid) in update_tasks:
@@ -2750,6 +2859,19 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
sqdata.stamppresent = set()
sqdata.valid = set()
+ sqdata.hashes = {}
+ sqrq.sq_deferred = {}
+ for mc in sorted(sqdata.multiconfigs):
+ for tid in sorted(sqdata.sq_revdeps):
+ if mc_from_tid(tid) != mc:
+ continue
+ h = pending_hash_index(tid, rqdata)
+ if h not in sqdata.hashes:
+ sqdata.hashes[h] = tid
+ else:
+ sqrq.sq_deferred[tid] = sqdata.hashes[h]
+ bb.note("Deferring %s after %s" % (tid, sqdata.hashes[h]))
+
update_scenequeue_data(sqdata.sq_revdeps, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True)
def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True):
@@ -2761,6 +2883,8 @@ def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, s
sqdata.stamppresent.remove(tid)
if tid in sqdata.valid:
sqdata.valid.remove(tid)
+ if tid in sqdata.outrightfail:
+ sqdata.outrightfail.remove(tid)
(mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
@@ -2788,28 +2912,20 @@ def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, s
sqdata.valid |= rq.validate_hashes(tocheck, cooker.data, len(sqdata.stamppresent), False, summary=summary)
- sqdata.hashes = {}
- for mc in sorted(sqdata.multiconfigs):
- for tid in sorted(sqdata.sq_revdeps):
- if mc_from_tid(tid) != mc:
- continue
- if tid in sqdata.stamppresent:
- continue
- if tid in sqdata.valid:
- continue
- if tid in sqdata.noexec:
- continue
- if tid in sqrq.scenequeue_notcovered:
- continue
- sqdata.outrightfail.add(tid)
-
- h = pending_hash_index(tid, rqdata)
- if h not in sqdata.hashes:
- sqdata.hashes[h] = tid
- else:
- sqrq.sq_deferred[tid] = sqdata.hashes[h]
- bb.note("Deferring %s after %s" % (tid, sqdata.hashes[h]))
-
+ for tid in tids:
+ if tid in sqdata.stamppresent:
+ continue
+ if tid in sqdata.valid:
+ continue
+ if tid in sqdata.noexec:
+ continue
+ if tid in sqrq.scenequeue_covered:
+ continue
+ if tid in sqrq.scenequeue_notcovered:
+ continue
+ if tid in sqrq.sq_deferred:
+ continue
+ sqdata.outrightfail.add(tid)
class TaskFailure(Exception):
"""
diff --git a/lib/bb/server/process.py b/lib/bb/server/process.py
index b66fbe0ac..4bdb84ae3 100644
--- a/lib/bb/server/process.py
+++ b/lib/bb/server/process.py
@@ -25,6 +25,7 @@ import subprocess
import errno
import re
import datetime
+import gc
import bb.server.xmlrpcserver
from bb import daemonize
from multiprocessing import queues
@@ -152,7 +153,8 @@ class ProcessServer(multiprocessing.Process):
conn = newconnections.pop(-1)
fds.append(conn)
self.controllersock = conn
- elif self.timeout is None and not ready:
+
+ elif not self.timeout and not ready:
print("No timeout, exiting.")
self.quit = True
@@ -220,6 +222,7 @@ class ProcessServer(multiprocessing.Process):
try:
print("Running command %s" % command)
self.command_channel_reply.send(self.cooker.command.runCommand(command))
+ print("Command Completed")
except Exception as e:
logger.exception('Exception in server main event loop running command %s (%s)' % (command, str(e)))
@@ -347,7 +350,12 @@ class ServerCommunicator():
logger.info("No reply from server in 30s")
if not self.recv.poll(30):
raise ProcessTimeout("Timeout while waiting for a reply from the bitbake server (60s)")
- return self.recv.get()
+ ret, exc = self.recv.get()
+ # Should probably turn all exceptions in exc back into exceptions?
+ # For now, at least handle BBHandledException
+ if exc and "BBHandledException" in exc:
+ raise bb.BBHandledException()
+ return ret, exc
def updateFeatureSet(self, featureset):
_, error = self.runCommand(["setFeatures", featureset])
@@ -586,7 +594,7 @@ class BBUIEventQueue:
self.reader = ConnectionReader(readfd)
self.t = threading.Thread()
- self.t.setDaemon(True)
+ self.t.daemon = True
self.t.run = self.startCallbackHandler
self.t.start()
@@ -664,8 +672,10 @@ class ConnectionWriter(object):
def send(self, obj):
obj = multiprocessing.reduction.ForkingPickler.dumps(obj)
+ gc.disable()
with self.wlock:
self.writer.send_bytes(obj)
+ gc.enable()
def fileno(self):
return self.writer.fileno()
diff --git a/lib/bb/siggen.py b/lib/bb/siggen.py
index 26fa7f05c..9d4f67aa9 100644
--- a/lib/bb/siggen.py
+++ b/lib/bb/siggen.py
@@ -318,7 +318,8 @@ class SignatureGeneratorBasic(SignatureGenerator):
else:
sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[tid]
- bb.utils.mkdirhier(os.path.dirname(sigfile))
+ with bb.utils.umask(0o002):
+ bb.utils.mkdirhier(os.path.dirname(sigfile))
data = {}
data['task'] = task
diff --git a/lib/bb/tests/codeparser.py b/lib/bb/tests/codeparser.py
index 826a2d2f6..f1c4f618d 100644
--- a/lib/bb/tests/codeparser.py
+++ b/lib/bb/tests/codeparser.py
@@ -111,9 +111,9 @@ ${D}${libdir}/pkgconfig/*.pc
self.assertExecs(set(["sed"]))
def test_parameter_expansion_modifiers(self):
- # - and + are also valid modifiers for parameter expansion, but are
+ # -,+ and : are also valid modifiers for parameter expansion, but are
# valid characters in bitbake variable names, so are not included here
- for i in ('=', ':-', ':=', '?', ':?', ':+', '#', '%', '##', '%%'):
+ for i in ('=', '?', '#', '%', '##', '%%'):
name = "foo%sbar" % i
self.parseExpression("${%s}" % name)
self.assertNotIn(name, self.references)
@@ -412,6 +412,32 @@ esac
# Check final value
self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['anothervalue', 'yetanothervalue', 'lastone'])
+ def test_contains_vardeps_override_operators(self):
+ # Check override operators handle dependencies correctly with the contains functionality
+ expr_plain = 'testval'
+ expr_prepend = '${@bb.utils.filter("TESTVAR1", "testval1", d)} '
+ expr_append = ' ${@bb.utils.filter("TESTVAR2", "testval2", d)}'
+ expr_remove = '${@bb.utils.contains("TESTVAR3", "no-testval", "testval", "", d)}'
+ # Check dependencies
+ self.d.setVar('ANOTHERVAR', expr_plain)
+ self.d.prependVar('ANOTHERVAR', expr_prepend)
+ self.d.appendVar('ANOTHERVAR', expr_append)
+ self.d.setVar('ANOTHERVAR:remove', expr_remove)
+ self.d.setVar('TESTVAR1', 'blah')
+ self.d.setVar('TESTVAR2', 'testval2')
+ self.d.setVar('TESTVAR3', 'no-testval')
+ deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), self.d)
+ self.assertEqual(sorted(values.splitlines()),
+ sorted([
+ expr_prepend + expr_plain + expr_append,
+ '_remove of ' + expr_remove,
+ 'TESTVAR1{testval1} = Unset',
+ 'TESTVAR2{testval2} = Set',
+ 'TESTVAR3{no-testval} = Set',
+ ]))
+ # Check final value
+ self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['testval2'])
+
#Currently no wildcard support
#def test_vardeps_wildcards(self):
# self.d.setVar("oe_libinstall", "echo test")
diff --git a/lib/bb/tests/event.py b/lib/bb/tests/event.py
index 9229b63d4..9ca7e9bc8 100644
--- a/lib/bb/tests/event.py
+++ b/lib/bb/tests/event.py
@@ -6,17 +6,18 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-import unittest
-import bb
-import logging
-import bb.compat
-import bb.event
+import collections
import importlib
+import logging
+import pickle
import threading
import time
-import pickle
+import unittest
from unittest.mock import Mock
from unittest.mock import call
+
+import bb
+import bb.event
from bb.msg import BBLogFormatter
@@ -75,7 +76,7 @@ class EventHandlingTest(unittest.TestCase):
def _create_test_handlers(self):
""" Method used to create a test handler ordered dictionary """
- test_handlers = bb.compat.OrderedDict()
+ test_handlers = collections.OrderedDict()
test_handlers["handler1"] = self._test_process.handler1
test_handlers["handler2"] = self._test_process.handler2
return test_handlers
@@ -96,7 +97,7 @@ class EventHandlingTest(unittest.TestCase):
def test_clean_class_handlers(self):
""" Test clean_class_handlers method """
- cleanDict = bb.compat.OrderedDict()
+ cleanDict = collections.OrderedDict()
self.assertEqual(cleanDict,
bb.event.clean_class_handlers())
diff --git a/lib/bb/tests/fetch.py b/lib/bb/tests/fetch.py
index 9453c90d2..61dd5ccca 100644
--- a/lib/bb/tests/fetch.py
+++ b/lib/bb/tests/fetch.py
@@ -371,6 +371,7 @@ class FetcherTest(unittest.TestCase):
if os.environ.get("BB_TMPDIR_NOCLEAN") == "yes":
print("Not cleaning up %s. Please remove manually." % self.tempdir)
else:
+ bb.process.run('chmod u+rw -R %s' % self.tempdir)
bb.utils.prunedir(self.tempdir)
class MirrorUriTest(FetcherTest):
@@ -471,7 +472,7 @@ class GitDownloadDirectoryNamingTest(FetcherTest):
super(GitDownloadDirectoryNamingTest, self).setUp()
self.recipe_url = "git://git.openembedded.org/bitbake"
self.recipe_dir = "git.openembedded.org.bitbake"
- self.mirror_url = "git://github.com/openembedded/bitbake.git"
+ self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https"
self.mirror_dir = "github.com.openembedded.bitbake.git"
self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
@@ -519,7 +520,7 @@ class TarballNamingTest(FetcherTest):
super(TarballNamingTest, self).setUp()
self.recipe_url = "git://git.openembedded.org/bitbake"
self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz"
- self.mirror_url = "git://github.com/openembedded/bitbake.git"
+ self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https"
self.mirror_tarball = "git2_github.com.openembedded.bitbake.git.tar.gz"
self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1')
@@ -553,7 +554,7 @@ class GitShallowTarballNamingTest(FetcherTest):
super(GitShallowTarballNamingTest, self).setUp()
self.recipe_url = "git://git.openembedded.org/bitbake"
self.recipe_tarball = "gitshallow_git.openembedded.org.bitbake_82ea737-1_master.tar.gz"
- self.mirror_url = "git://github.com/openembedded/bitbake.git"
+ self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https"
self.mirror_tarball = "gitshallow_github.com.openembedded.bitbake.git_82ea737-1_master.tar.gz"
self.d.setVar('BB_GIT_SHALLOW', '1')
@@ -649,6 +650,58 @@ class FetcherLocalTest(FetcherTest):
with self.assertRaises(bb.fetch2.UnpackError):
self.fetchUnpack(['file://a;subdir=/bin/sh'])
+ def test_local_gitfetch_usehead(self):
+ # Create dummy local Git repo
+ src_dir = tempfile.mkdtemp(dir=self.tempdir,
+ prefix='gitfetch_localusehead_')
+ src_dir = os.path.abspath(src_dir)
+ bb.process.run("git init", cwd=src_dir)
+ bb.process.run("git commit --allow-empty -m'Dummy commit'",
+ cwd=src_dir)
+ # Use other branch than master
+ bb.process.run("git checkout -b my-devel", cwd=src_dir)
+ bb.process.run("git commit --allow-empty -m'Dummy commit 2'",
+ cwd=src_dir)
+ stdout = bb.process.run("git rev-parse HEAD", cwd=src_dir)
+ orig_rev = stdout[0].strip()
+
+ # Fetch and check revision
+ self.d.setVar("SRCREV", "AUTOINC")
+ url = "git://" + src_dir + ";protocol=file;usehead=1"
+ fetcher = bb.fetch.Fetch([url], self.d)
+ fetcher.download()
+ fetcher.unpack(self.unpackdir)
+ stdout = bb.process.run("git rev-parse HEAD",
+ cwd=os.path.join(self.unpackdir, 'git'))
+ unpack_rev = stdout[0].strip()
+ self.assertEqual(orig_rev, unpack_rev)
+
+ def test_local_gitfetch_usehead_withname(self):
+ # Create dummy local Git repo
+ src_dir = tempfile.mkdtemp(dir=self.tempdir,
+ prefix='gitfetch_localusehead_')
+ src_dir = os.path.abspath(src_dir)
+ bb.process.run("git init", cwd=src_dir)
+ bb.process.run("git commit --allow-empty -m'Dummy commit'",
+ cwd=src_dir)
+ # Use other branch than master
+ bb.process.run("git checkout -b my-devel", cwd=src_dir)
+ bb.process.run("git commit --allow-empty -m'Dummy commit 2'",
+ cwd=src_dir)
+ stdout = bb.process.run("git rev-parse HEAD", cwd=src_dir)
+ orig_rev = stdout[0].strip()
+
+ # Fetch and check revision
+ self.d.setVar("SRCREV", "AUTOINC")
+ url = "git://" + src_dir + ";protocol=file;usehead=1;name=newName"
+ fetcher = bb.fetch.Fetch([url], self.d)
+ fetcher.download()
+ fetcher.unpack(self.unpackdir)
+ stdout = bb.process.run("git rev-parse HEAD",
+ cwd=os.path.join(self.unpackdir, 'git'))
+ unpack_rev = stdout[0].strip()
+ self.assertEqual(orig_rev, unpack_rev)
+
class FetcherNoNetworkTest(FetcherTest):
def setUp(self):
super().setUp()
@@ -845,6 +898,8 @@ class FetcherNetworkTest(FetcherTest):
prefix='gitfetch_localusehead_')
src_dir = os.path.abspath(src_dir)
bb.process.run("git init", cwd=src_dir)
+ bb.process.run("git config user.email 'you@example.com'", cwd=src_dir)
+ bb.process.run("git config user.name 'Your Name'", cwd=src_dir)
bb.process.run("git commit --allow-empty -m'Dummy commit'",
cwd=src_dir)
# Use other branch than master
@@ -918,7 +973,7 @@ class FetcherNetworkTest(FetcherTest):
def test_git_submodule_dbus_broker(self):
# The following external repositories have show failures in fetch and unpack operations
# We want to avoid regressions!
- url = "gitsm://github.com/bus1/dbus-broker;protocol=git;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main"
+ url = "gitsm://github.com/bus1/dbus-broker;protocol=https;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# Previous cwd has been deleted
@@ -934,7 +989,7 @@ class FetcherNetworkTest(FetcherTest):
@skipIfNoNetwork()
def test_git_submodule_CLI11(self):
- url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf"
+ url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf;branch=main"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# Previous cwd has been deleted
@@ -949,12 +1004,12 @@ class FetcherNetworkTest(FetcherTest):
@skipIfNoNetwork()
def test_git_submodule_update_CLI11(self):
""" Prevent regression on update detection not finding missing submodule, or modules without needed commits """
- url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714"
+ url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714;branch=main"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# CLI11 that pulls in a newer nlohmann-json
- url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca"
+ url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca;branch=main"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# Previous cwd has been deleted
@@ -968,7 +1023,7 @@ class FetcherNetworkTest(FetcherTest):
@skipIfNoNetwork()
def test_git_submodule_aktualizr(self):
- url = "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=git;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
+ url = "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=https;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# Previous cwd has been deleted
@@ -988,7 +1043,7 @@ class FetcherNetworkTest(FetcherTest):
""" Prevent regression on deeply nested submodules not being checked out properly, even though they were fetched. """
# This repository also has submodules where the module (name), path and url do not align
- url = "gitsm://github.com/azure/iotedge.git;protocol=git;rev=d76e0316c6f324345d77c48a83ce836d09392699"
+ url = "gitsm://github.com/azure/iotedge.git;protocol=https;rev=d76e0316c6f324345d77c48a83ce836d09392699;branch=main"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# Previous cwd has been deleted
@@ -1046,7 +1101,7 @@ class SVNTest(FetcherTest):
bb.process.run("svn co %s svnfetch_co" % self.repo_url, cwd=self.tempdir)
# Github will emulate SVN. Use this to check if we're downloding...
- bb.process.run("svn propset svn:externals 'bitbake svn://vcs.pcre.org/pcre2/code' .",
+ bb.process.run("svn propset svn:externals 'bitbake https://github.com/PhilipHazel/pcre2.git' .",
cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
bb.process.run("svn commit --non-interactive -m 'Add external'",
cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
@@ -1164,7 +1219,7 @@ class FetchLatestVersionTest(FetcherTest):
test_git_uris = {
# version pattern "X.Y.Z"
- ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4", "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
+ ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4;protocol=https", "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
: "1.99.4",
# version pattern "vX.Y"
# mirror of git.infradead.org since network issues interfered with testing
@@ -1175,7 +1230,7 @@ class FetchLatestVersionTest(FetcherTest):
("presentproto", "git://git.yoctoproject.org/bbfetchtests-presentproto", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "")
: "1.0",
# version pattern "pkg_name-vX.Y.Z"
- ("dtc", "git://git.qemu.org/dtc.git", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "")
+ ("dtc", "git://git.yoctoproject.org/bbfetchtests-dtc.git", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "")
: "1.4.0",
# combination version pattern
("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https", "cd44ee6644c3641507fb53b8a2a69137f2971219", "")
@@ -1187,13 +1242,13 @@ class FetchLatestVersionTest(FetcherTest):
: "20120614",
# packages with a valid UPSTREAM_CHECK_GITTAGREGEX
# mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing
- ("xf86-video-omap", "git://git.yoctoproject.org/bbfetchtests-xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", "(?P<pver>(\d+\.(\d\.?)*))")
+ ("xf86-video-omap", "git://git.yoctoproject.org/bbfetchtests-xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", r"(?P<pver>(\d+\.(\d\.?)*))")
: "0.4.3",
- ("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", "(?P<pver>(([0-9][\.|_]?)+[0-9]))")
+ ("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", r"(?P<pver>(([0-9][\.|_]?)+[0-9]))")
: "11.0.0",
- ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot", "cd437ecbd8986c894442f8fce1e0061e20f04dee", "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
+ ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot;protocol=https", "cd437ecbd8986c894442f8fce1e0061e20f04dee", r"chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
: "1.3.59",
- ("remake", "git://github.com/rocky/remake.git", "f05508e521987c8494c92d9c2871aec46307d51d", "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
+ ("remake", "git://github.com/rocky/remake.git;protocol=https", "f05508e521987c8494c92d9c2871aec46307d51d", r"(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
: "3.82+dbg0.9",
}
@@ -1233,11 +1288,11 @@ class FetchLatestVersionTest(FetcherTest):
#
# http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2
# https://github.com/apple/cups/releases
- ("cups", "/software/1.7.2/cups-1.7.2-source.tar.bz2", "/apple/cups/releases", "(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz")
+ ("cups", "/software/1.7.2/cups-1.7.2-source.tar.bz2", "/apple/cups/releases", r"(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz")
: "2.0.0",
# http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz
# http://ftp.debian.org/debian/pool/main/d/db5.3/
- ("db", "/berkeley-db/db-5.3.21.tar.gz", "/debian/pool/main/d/db5.3/", "(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz")
+ ("db", "/berkeley-db/db-5.3.21.tar.gz", "/debian/pool/main/d/db5.3/", r"(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz")
: "5.3.10",
}
@@ -1283,13 +1338,10 @@ class FetchCheckStatusTest(FetcherTest):
"http://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz",
"http://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz",
"https://yoctoproject.org/",
- "https://yoctoproject.org/documentation",
+ "https://docs.yoctoproject.org/",
"http://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz",
"http://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz",
"ftp://sourceware.org/pub/libffi/libffi-1.20.tar.gz",
- "http://ftp.gnu.org/gnu/autoconf/autoconf-2.60.tar.gz",
- "https://ftp.gnu.org/gnu/chess/gnuchess-5.08.tar.gz",
- "https://ftp.gnu.org/gnu/gmp/gmp-4.0.tar.gz",
# GitHub releases are hosted on Amazon S3, which doesn't support HEAD
"https://github.com/kergoth/tslib/releases/download/1.1/tslib-1.1.tar.xz"
]
@@ -1328,6 +1380,8 @@ class GitMakeShallowTest(FetcherTest):
self.gitdir = os.path.join(self.tempdir, 'gitshallow')
bb.utils.mkdirhier(self.gitdir)
bb.process.run('git init', cwd=self.gitdir)
+ bb.process.run('git config user.email "you@example.com"', cwd=self.gitdir)
+ bb.process.run('git config user.name "Your Name"', cwd=self.gitdir)
def assertRefs(self, expected_refs):
actual_refs = self.git(['for-each-ref', '--format=%(refname)']).splitlines()
@@ -1451,6 +1505,8 @@ class GitShallowTest(FetcherTest):
bb.utils.mkdirhier(self.srcdir)
self.git('init', cwd=self.srcdir)
+ self.git('config user.email "you@example.com"', cwd=self.srcdir)
+ self.git('config user.name "Your Name"', cwd=self.srcdir)
self.d.setVar('WORKDIR', self.tempdir)
self.d.setVar('S', self.gitdir)
self.d.delVar('PREMIRRORS')
@@ -1532,6 +1588,7 @@ class GitShallowTest(FetcherTest):
# fetch and unpack, from the shallow tarball
bb.utils.remove(self.gitdir, recurse=True)
+ bb.process.run('chmod u+w -R "%s"' % ud.clonedir)
bb.utils.remove(ud.clonedir, recurse=True)
bb.utils.remove(ud.clonedir.replace('gitsource', 'gitsubmodule'), recurse=True)
@@ -1684,6 +1741,8 @@ class GitShallowTest(FetcherTest):
smdir = os.path.join(self.tempdir, 'gitsubmodule')
bb.utils.mkdirhier(smdir)
self.git('init', cwd=smdir)
+ self.git('config user.email "you@example.com"', cwd=smdir)
+ self.git('config user.name "Your Name"', cwd=smdir)
# Make this look like it was cloned from a remote...
self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir)
self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir)
@@ -1691,7 +1750,7 @@ class GitShallowTest(FetcherTest):
self.add_empty_file('bsub', cwd=smdir)
self.git('submodule init', cwd=self.srcdir)
- self.git('submodule add file://%s' % smdir, cwd=self.srcdir)
+ self.git('-c protocol.file.allow=always submodule add file://%s' % smdir, cwd=self.srcdir)
self.git('submodule update', cwd=self.srcdir)
self.git('commit -m submodule -a', cwd=self.srcdir)
@@ -1714,6 +1773,8 @@ class GitShallowTest(FetcherTest):
smdir = os.path.join(self.tempdir, 'gitsubmodule')
bb.utils.mkdirhier(smdir)
self.git('init', cwd=smdir)
+ self.git('config user.email "you@example.com"', cwd=smdir)
+ self.git('config user.name "Your Name"', cwd=smdir)
# Make this look like it was cloned from a remote...
self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir)
self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir)
@@ -1721,7 +1782,7 @@ class GitShallowTest(FetcherTest):
self.add_empty_file('bsub', cwd=smdir)
self.git('submodule init', cwd=self.srcdir)
- self.git('submodule add file://%s' % smdir, cwd=self.srcdir)
+ self.git('-c protocol.file.allow=always submodule add file://%s' % smdir, cwd=self.srcdir)
self.git('submodule update', cwd=self.srcdir)
self.git('commit -m submodule -a', cwd=self.srcdir)
@@ -1756,8 +1817,8 @@ class GitShallowTest(FetcherTest):
self.git('annex init', cwd=self.srcdir)
open(os.path.join(self.srcdir, 'c'), 'w').close()
self.git('annex add c', cwd=self.srcdir)
- self.git('commit -m annex-c -a', cwd=self.srcdir)
- bb.process.run('chmod u+w -R %s' % os.path.join(self.srcdir, '.git', 'annex'))
+ self.git('commit --author "Foo Bar <foo@bar>" -m annex-c -a', cwd=self.srcdir)
+ bb.process.run('chmod u+w -R %s' % self.srcdir)
uri = 'gitannex://%s;protocol=file;subdir=${S}' % self.srcdir
fetcher, ud = self.fetch_shallow(uri)
@@ -1971,7 +2032,7 @@ class GitShallowTest(FetcherTest):
@skipIfNoNetwork()
def test_bitbake(self):
- self.git('remote add --mirror=fetch origin git://github.com/openembedded/bitbake', cwd=self.srcdir)
+ self.git('remote add --mirror=fetch origin https://github.com/openembedded/bitbake', cwd=self.srcdir)
self.git('config core.bare true', cwd=self.srcdir)
self.git('fetch', cwd=self.srcdir)
@@ -2032,6 +2093,8 @@ class GitLfsTest(FetcherTest):
bb.utils.mkdirhier(self.srcdir)
self.git('init', cwd=self.srcdir)
+ self.git('config user.email "you@example.com"', cwd=self.srcdir)
+ self.git('config user.name "Your Name"', cwd=self.srcdir)
with open(os.path.join(self.srcdir, '.gitattributes'), 'wt') as attrs:
attrs.write('*.mp3 filter=lfs -text')
self.git(['add', '.gitattributes'], cwd=self.srcdir)
diff --git a/lib/bb/tinfoil.py b/lib/bb/tinfoil.py
index 8c9b6b8ca..8bec8cbaf 100644
--- a/lib/bb/tinfoil.py
+++ b/lib/bb/tinfoil.py
@@ -53,6 +53,10 @@ class TinfoilDataStoreConnectorVarHistory:
def remoteCommand(self, cmd, *args, **kwargs):
return self.tinfoil.run_command('dataStoreConnectorVarHistCmd', self.dsindex, cmd, args, kwargs)
+ def emit(self, var, oval, val, o, d):
+ ret = self.tinfoil.run_command('dataStoreConnectorVarHistCmdEmit', self.dsindex, var, oval, val, d.dsindex)
+ o.write(ret)
+
def __getattr__(self, name):
if not hasattr(bb.data_smart.VariableHistory, name):
raise AttributeError("VariableHistory has no such method %s" % name)
@@ -448,7 +452,7 @@ class Tinfoil:
self.run_actions(config_params)
self.recipes_parsed = True
- def run_command(self, command, *params):
+ def run_command(self, command, *params, handle_events=True):
"""
Run a command on the server (as implemented in bb.command).
Note that there are two types of command - synchronous and
@@ -465,7 +469,16 @@ class Tinfoil:
commandline = [command]
if params:
commandline.extend(params)
- result = self.server_connection.connection.runCommand(commandline)
+ try:
+ result = self.server_connection.connection.runCommand(commandline)
+ finally:
+ while handle_events:
+ event = self.wait_event()
+ if not event:
+ break
+ if isinstance(event, logging.LogRecord):
+ if event.taskpid == 0 or event.levelno > logging.INFO:
+ self.logger.handle(event)
if result[1]:
raise TinfoilCommandFailed(result[1])
return result[0]
diff --git a/lib/bb/ui/knotty.py b/lib/bb/ui/knotty.py
index 87e873d64..d1f74389d 100644
--- a/lib/bb/ui/knotty.py
+++ b/lib/bb/ui/knotty.py
@@ -227,7 +227,9 @@ class TerminalFilter(object):
def keepAlive(self, t):
if not self.cuu:
- print("Bitbake still alive (%ds)" % t)
+ print("Bitbake still alive (no events for %ds). Active tasks:" % t)
+ for t in self.helper.running_tasks:
+ print(t)
sys.stdout.flush()
def updateFooter(self):
@@ -380,14 +382,27 @@ _evt_list = [ "bb.runqueue.runQueueExitWait", "bb.event.LogExecTTY", "logging.Lo
"bb.event.BuildBase", "bb.build.TaskStarted", "bb.build.TaskSucceeded", "bb.build.TaskFailedSilent",
"bb.build.TaskProgress", "bb.event.ProcessStarted", "bb.event.ProcessProgress", "bb.event.ProcessFinished"]
+def drain_events_errorhandling(eventHandler):
+ # We don't have logging setup, we do need to show any events we see before exiting
+ event = True
+ logger = bb.msg.logger_create('bitbake', sys.stdout)
+ while event:
+ event = eventHandler.waitEvent(0)
+ if isinstance(event, logging.LogRecord):
+ logger.handle(event)
+
def main(server, eventHandler, params, tf = TerminalFilter):
- if not params.observe_only:
- params.updateToServer(server, os.environ.copy())
+ try:
+ if not params.observe_only:
+ params.updateToServer(server, os.environ.copy())
- includelogs, loglines, consolelogfile, logconfigfile = _log_settings_from_server(server, params.observe_only)
+ includelogs, loglines, consolelogfile, logconfigfile = _log_settings_from_server(server, params.observe_only)
- loglevel, _ = bb.msg.constructLogOptions()
+ loglevel, _ = bb.msg.constructLogOptions()
+ except bb.BBHandledException:
+ drain_events_errorhandling(eventHandler)
+ return 1
if params.options.quiet == 0:
console_loglevel = loglevel
@@ -584,7 +599,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
warnings = 0
taskfailures = []
- printinterval = 5000
+ printintervaldelta = 10 * 60 # 10 minutes
+ printinterval = printintervaldelta
lastprint = time.time()
termfilter = tf(main, helper, console_handlers, params.options.quiet)
@@ -594,7 +610,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
try:
if (lastprint + printinterval) <= time.time():
termfilter.keepAlive(printinterval)
- printinterval += 5000
+ printinterval += printintervaldelta
event = eventHandler.waitEvent(0)
if event is None:
if main.shutdown > 1:
@@ -625,7 +641,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
if isinstance(event, logging.LogRecord):
lastprint = time.time()
- printinterval = 5000
+ printinterval = printintervaldelta
if event.levelno >= bb.msg.BBLogFormatter.ERROR:
errors = errors + 1
return_value = 1
diff --git a/lib/bb/ui/taskexp.py b/lib/bb/ui/taskexp.py
index 2b246710c..c00eaf663 100644
--- a/lib/bb/ui/taskexp.py
+++ b/lib/bb/ui/taskexp.py
@@ -8,6 +8,7 @@
#
import sys
+import traceback
try:
import gi
@@ -196,6 +197,7 @@ def main(server, eventHandler, params):
gtkgui.start()
try:
+ params.updateToServer(server, os.environ.copy())
params.updateFromServer(server)
cmdline = params.parseActions()
if not cmdline:
@@ -218,6 +220,9 @@ def main(server, eventHandler, params):
except client.Fault as x:
print("XMLRPC Fault getting commandline:\n %s" % x)
return
+ except Exception as e:
+ print("Exception in startup:\n %s" % traceback.format_exc())
+ return
if gtkthread.quit.isSet():
return
diff --git a/lib/bb/utils.py b/lib/bb/utils.py
index 5f5767c1d..34fa0b7a6 100644
--- a/lib/bb/utils.py
+++ b/lib/bb/utils.py
@@ -16,7 +16,8 @@ import bb.msg
import multiprocessing
import fcntl
import importlib
-from importlib import machinery
+import importlib.machinery
+import importlib.util
import itertools
import subprocess
import glob
@@ -420,12 +421,14 @@ def better_eval(source, locals, extraglobals = None):
return eval(source, ctx, locals)
@contextmanager
-def fileslocked(files):
+def fileslocked(files, *args, **kwargs):
"""Context manager for locking and unlocking file locks."""
locks = []
if files:
for lockfile in files:
- locks.append(bb.utils.lockfile(lockfile))
+ l = bb.utils.lockfile(lockfile, *args, **kwargs)
+ if l is not None:
+ locks.append(l)
try:
yield
@@ -458,9 +461,16 @@ def lockfile(name, shared=False, retry=True, block=False):
consider the possibility of sending a signal to the process to break
out - at which point you want block=True rather than retry=True.
"""
+ basename = os.path.basename(name)
+ if len(basename) > 255:
+ root, ext = os.path.splitext(basename)
+ basename = root[:255 - len(ext)] + ext
+
dirname = os.path.dirname(name)
mkdirhier(dirname)
+ name = os.path.join(dirname, basename)
+
if not os.access(dirname, os.W_OK):
logger.error("Unable to acquire lock '%s', directory is not writable",
name)
@@ -494,7 +504,7 @@ def lockfile(name, shared=False, retry=True, block=False):
return lf
lf.close()
except OSError as e:
- if e.errno == errno.EACCES:
+ if e.errno == errno.EACCES or e.errno == errno.ENAMETOOLONG:
logger.error("Unable to acquire lock '%s', %s",
e.strerror, name)
sys.exit(1)
@@ -959,6 +969,17 @@ def which(path, item, direction = 0, history = False, executable=False):
return "", hist
return ""
+@contextmanager
+def umask(new_mask):
+ """
+ Context manager to set the umask to a specific mask, and restore it afterwards.
+ """
+ current_mask = os.umask(new_mask)
+ try:
+ yield
+ finally:
+ os.umask(current_mask)
+
def to_boolean(string, default=None):
if not string:
return default
@@ -1560,21 +1581,22 @@ def set_process_name(name):
# export common proxies variables from datastore to environment
def export_proxies(d):
- import os
+ """ export common proxies variables from datastore to environment """
variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY',
'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY',
- 'GIT_PROXY_COMMAND']
+ 'GIT_PROXY_COMMAND', 'SSL_CERT_FILE', 'SSL_CERT_DIR']
exported = False
- for v in variables:
- if v in os.environ.keys():
+ origenv = d.getVar("BB_ORIGENV")
+
+ for name in variables:
+ value = d.getVar(name)
+ if not value and origenv:
+ value = origenv.getVar(name)
+ if value:
+ os.environ[name] = value
exported = True
- else:
- v_proxy = d.getVar(v)
- if v_proxy is not None:
- os.environ[v] = v_proxy
- exported = True
return exported
@@ -1584,7 +1606,9 @@ def load_plugins(logger, plugins, pluginpath):
logger.debug(1, 'Loading plugin %s' % name)
spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
if spec:
- return spec.loader.load_module()
+ mod = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(mod)
+ return mod
logger.debug(1, 'Loading plugins from %s...' % pluginpath)
diff --git a/lib/bblayers/action.py b/lib/bblayers/action.py
index d6459d661..d2f9c1bbd 100644
--- a/lib/bblayers/action.py
+++ b/lib/bblayers/action.py
@@ -50,10 +50,10 @@ class ActionPlugin(LayerPlugin):
if not (args.force or notadded):
try:
self.tinfoil.run_command('parseConfiguration')
- except bb.tinfoil.TinfoilUIException:
+ except (bb.tinfoil.TinfoilUIException, bb.BBHandledException):
# Restore the back up copy of bblayers.conf
shutil.copy2(backup, bblayers_conf)
- bb.fatal("Parse failure with the specified layer added")
+ bb.fatal("Parse failure with the specified layer added, aborting.")
else:
for item in notadded:
sys.stderr.write("Specified layer %s is already in BBLAYERS\n" % item)
diff --git a/lib/bblayers/layerindex.py b/lib/bblayers/layerindex.py
index 95b67a662..f64d18e81 100644
--- a/lib/bblayers/layerindex.py
+++ b/lib/bblayers/layerindex.py
@@ -206,6 +206,7 @@ class LayerIndexPlugin(ActionPlugin):
"""
args.show_only = True
args.ignore = []
+ args.shallow = True
self.do_layerindex_fetch(args)
def register_commands(self, sp):
diff --git a/lib/bblayers/query.py b/lib/bblayers/query.py
index e2cc31053..652a3acce 100644
--- a/lib/bblayers/query.py
+++ b/lib/bblayers/query.py
@@ -150,7 +150,7 @@ skipped recipes will also be listed, with a " (skipped)" suffix.
def print_item(f, pn, ver, layer, ispref):
if not selected_layer or layer == selected_layer:
if not bare and f in skiplist:
- skipped = ' (skipped)'
+ skipped = ' (skipped: %s)' % self.tinfoil.cooker.skiplist[f].skipreason
else:
skipped = ''
if show_filenames:
@@ -433,10 +433,10 @@ NOTE: .bbappend files can impact the dependencies.
line = fnfile.readline()
# The "require/include xxx" in conf/machine/*.conf, .inc and .bbclass
- conf_re = re.compile(".*/conf/machine/[^\/]*\.conf$")
- inc_re = re.compile(".*\.inc$")
+ conf_re = re.compile(r".*/conf/machine/[^\/]*\.conf$")
+ inc_re = re.compile(r".*\.inc$")
# The "inherit xxx" in .bbclass
- bbclass_re = re.compile(".*\.bbclass$")
+ bbclass_re = re.compile(r".*\.bbclass$")
for layerdir in self.bblayers:
layername = self.get_layer_name(layerdir)
for dirpath, dirnames, filenames in os.walk(layerdir):
diff --git a/lib/hashserv/server.py b/lib/hashserv/server.py
index 81050715e..f38a22ad9 100644
--- a/lib/hashserv/server.py
+++ b/lib/hashserv/server.py
@@ -12,6 +12,7 @@ import math
import os
import signal
import socket
+import sys
import time
from . import chunkify, DEFAULT_MAX_CHUNK
@@ -419,9 +420,14 @@ class Server(object):
self._cleanup_socket = None
def start_tcp_server(self, host, port):
- self.server = self.loop.run_until_complete(
- asyncio.start_server(self.handle_client, host, port, loop=self.loop)
- )
+ if sys.version_info[0] == 3 and sys.version_info[1] < 6:
+ self.server = self.loop.run_until_complete(
+ asyncio.start_server(self.handle_client, host, port, loop=self.loop)
+ )
+ else:
+ self.server = self.loop.run_until_complete(
+ asyncio.start_server(self.handle_client, host, port)
+ )
for s in self.server.sockets:
logger.info('Listening on %r' % (s.getsockname(),))
@@ -444,9 +450,14 @@ class Server(object):
try:
# Work around path length limits in AF_UNIX
os.chdir(os.path.dirname(path))
- self.server = self.loop.run_until_complete(
- asyncio.start_unix_server(self.handle_client, os.path.basename(path), loop=self.loop)
- )
+ if sys.version_info[0] == 3 and sys.version_info[1] < 6:
+ self.server = self.loop.run_until_complete(
+ asyncio.start_unix_server(self.handle_client, os.path.basename(path), loop=self.loop)
+ )
+ else:
+ self.server = self.loop.run_until_complete(
+ asyncio.start_unix_server(self.handle_client, os.path.basename(path))
+ )
finally:
os.chdir(cwd)
diff --git a/lib/layerindexlib/__init__.py b/lib/layerindexlib/__init__.py
index 77196b408..f30ee9e25 100644
--- a/lib/layerindexlib/__init__.py
+++ b/lib/layerindexlib/__init__.py
@@ -6,7 +6,6 @@
import datetime
import logging
-import imp
from collections import OrderedDict
from layerindexlib.plugin import LayerIndexPluginUrlError
diff --git a/lib/toaster/toastergui/api.py b/lib/toaster/toastergui/api.py
index b4cdc335e..e367bd910 100644
--- a/lib/toaster/toastergui/api.py
+++ b/lib/toaster/toastergui/api.py
@@ -11,7 +11,7 @@ import os
import re
import logging
import json
-import subprocess
+import glob
from collections import Counter
from orm.models import Project, ProjectTarget, Build, Layer_Version
@@ -227,20 +227,18 @@ class XhrSetDefaultImageUrl(View):
# same logical name
# * Each project that uses a layer will have its own
# LayerVersion and Project Layer for it
-# * During the Paroject delete process, when the last
+# * During the Project delete process, when the last
# LayerVersion for a 'local_source_dir' layer is deleted
# then the Layer record is deleted to remove orphans
#
def scan_layer_content(layer,layer_version):
# if this is a local layer directory, we can immediately scan its content
- if layer.local_source_dir:
+ if os.path.isdir(layer.local_source_dir):
try:
# recipes-*/*/*.bb
- cmd = '%s %s' % ('ls', os.path.join(layer.local_source_dir,'recipes-*/*/*.bb'))
- recipes_list = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,stderr=subprocess.STDOUT).stdout.read()
- recipes_list = recipes_list.decode("utf-8").strip()
- if recipes_list and 'No such' not in recipes_list:
+ recipes_list = glob.glob(os.path.join(layer.local_source_dir, 'recipes-*/*/*.bb'))
+ for recipe in recipes_list:
for recipe in recipes_list.split('\n'):
recipe_path = recipe[recipe.rfind('recipes-'):]
recipe_name = recipe[recipe.rfind('/')+1:].replace('.bb','')
@@ -260,6 +258,9 @@ def scan_layer_content(layer,layer_version):
except Exception as e:
logger.warning("ERROR:scan_layer_content: %s" % e)
+ else:
+ logger.warning("ERROR: wrong path given")
+ raise KeyError("local_source_dir")
class XhrLayer(View):
""" Delete, Get, Add and Update Layer information
@@ -456,15 +457,18 @@ class XhrLayer(View):
'layerdetailurl':
layer_dep.get_detailspage_url(project.pk)})
- # Scan the layer's content and update components
- scan_layer_content(layer,layer_version)
+ # Only scan_layer_content if layer is local
+ if layer_data.get('local_source_dir', None):
+ # Scan the layer's content and update components
+ scan_layer_content(layer,layer_version)
except Layer_Version.DoesNotExist:
return error_response("layer-dep-not-found")
except Project.DoesNotExist:
return error_response("project-not-found")
- except KeyError:
- return error_response("incorrect-parameters")
+ except KeyError as e:
+ _log("KeyError: %s" % e)
+ return error_response(f"incorrect-parameters")
return JsonResponse({'error': "ok",
'imported_layer': {