aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--SECURITY.md24
-rwxr-xr-xbin/bitbake-getvar48
-rw-r--r--doc/bitbake-user-manual/bitbake-user-manual-fetching.rst4
-rw-r--r--lib/bb/__init__.py7
-rw-r--r--lib/bb/command.py28
-rw-r--r--lib/bb/cooker.py12
-rw-r--r--lib/bb/data.py1
-rw-r--r--lib/bb/fetch2/__init__.py5
-rw-r--r--lib/bb/fetch2/git.py39
-rw-r--r--lib/bb/fetch2/wget.py24
-rw-r--r--lib/bb/monitordisk.py7
-rw-r--r--lib/bb/runqueue.py178
-rw-r--r--lib/bb/server/process.py11
-rw-r--r--lib/bb/siggen.py3
-rw-r--r--lib/bb/tests/codeparser.py26
-rw-r--r--lib/bb/tests/fetch.py97
-rw-r--r--lib/bb/tinfoil.py17
-rw-r--r--lib/bb/ui/knotty.py32
-rw-r--r--lib/bb/utils.py52
-rw-r--r--lib/bblayers/action.py4
-rw-r--r--lib/bblayers/layerindex.py1
-rw-r--r--lib/hashserv/server.py23
-rw-r--r--lib/toaster/toastergui/api.py26
23 files changed, 537 insertions, 132 deletions
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 000000000..7d2ce1f63
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,24 @@
+How to Report a Potential Vulnerability?
+========================================
+
+If you would like to report a public issue (for example, one with a released
+CVE number), please report it using the
+[https://bugzilla.yoctoproject.org/enter_bug.cgi?product=Security Security Bugzilla].
+If you have a patch ready, submit it following the same procedure as any other
+patch as described in README.md.
+
+If you are dealing with a not-yet released or urgent issue, please send a
+message to security AT yoctoproject DOT org, including as many details as
+possible: the layer or software module affected, the recipe and its version,
+and any example code, if available.
+
+Branches maintained with security fixes
+---------------------------------------
+
+See [https://wiki.yoctoproject.org/wiki/Stable_Release_and_LTS Stable release and LTS]
+for detailed info regarding the policies and maintenance of Stable branches.
+
+The [https://wiki.yoctoproject.org/wiki/Releases Release page] contains a list of all
+releases of the Yocto Project. Versions in grey are no longer actively maintained with
+security patches, but well-tested patches may still be accepted for them for
+significant issues.
diff --git a/bin/bitbake-getvar b/bin/bitbake-getvar
new file mode 100755
index 000000000..942321925
--- /dev/null
+++ b/bin/bitbake-getvar
@@ -0,0 +1,48 @@
+#! /usr/bin/env python3
+#
+# Copyright (C) 2021 Richard Purdie
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import argparse
+import io
+import os
+import sys
+
+bindir = os.path.dirname(__file__)
+topdir = os.path.dirname(bindir)
+sys.path[0:0] = [os.path.join(topdir, 'lib')]
+
+import bb.tinfoil
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(description="Bitbake Query Variable")
+ parser.add_argument("variable", help="variable name to query")
+ parser.add_argument("-r", "--recipe", help="Recipe name to query", default=None, required=False)
+ parser.add_argument('-u', '--unexpand', help='Do not expand the value (with --value)', action="store_true")
+ parser.add_argument('-f', '--flag', help='Specify a variable flag to query (with --value)', default=None)
+ parser.add_argument('--value', help='Only report the value, no history and no variable name', action="store_true")
+ args = parser.parse_args()
+
+ if args.unexpand and not args.value:
+ print("--unexpand only makes sense with --value")
+ sys.exit(1)
+
+ if args.flag and not args.value:
+ print("--flag only makes sense with --value")
+ sys.exit(1)
+
+ with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
+ if args.recipe:
+ tinfoil.prepare(quiet=2)
+ d = tinfoil.parse_recipe(args.recipe)
+ else:
+ tinfoil.prepare(quiet=2, config_only=True)
+ d = tinfoil.config_data
+ if args.flag:
+ print(str(d.getVarFlag(args.variable, args.flag, expand=(not args.unexpand))))
+ elif args.value:
+ print(str(d.getVar(args.variable, expand=(not args.unexpand))))
+ else:
+ bb.data.emit_var(args.variable, d=d, all=True)
diff --git a/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst b/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
index 93ac18b78..75e8dd69d 100644
--- a/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
+++ b/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
@@ -405,8 +405,8 @@ This fetcher supports the following parameters:
- *"nobranch":* Tells the fetcher to not check the SHA validation for
the branch when set to "1". The default is "0". Set this option for
- the recipe that refers to the commit that is valid for a tag instead
- of the branch.
+ the recipe that refers to the commit that is valid for any namespace
+ (branch, tag, ...) instead of the branch.
- *"bareclone":* Tells the fetcher to clone a bare clone into the
destination directory without checking out a working tree. Only the
diff --git a/lib/bb/__init__.py b/lib/bb/__init__.py
index c98e23ce3..ba8039497 100644
--- a/lib/bb/__init__.py
+++ b/lib/bb/__init__.py
@@ -15,6 +15,13 @@ import sys
if sys.version_info < (3, 5, 0):
raise RuntimeError("Sorry, python 3.5.0 or later is required for this version of bitbake")
+if sys.version_info < (3, 10, 0):
+ # With python 3.8 and 3.9, we see errors of "libgcc_s.so.1 must be installed for pthread_cancel to work"
+ # https://stackoverflow.com/questions/64797838/libgcc-s-so-1-must-be-installed-for-pthread-cancel-to-work
+ # https://bugs.ams1.psf.io/issue42888
+ # so ensure libgcc_s is loaded early on
+ import ctypes
+ libgcc_s = ctypes.CDLL('libgcc_s.so.1')
class BBHandledException(Exception):
"""
diff --git a/lib/bb/command.py b/lib/bb/command.py
index 6abf38668..b8429b277 100644
--- a/lib/bb/command.py
+++ b/lib/bb/command.py
@@ -20,6 +20,7 @@ Commands are queued in a CommandQueue
from collections import OrderedDict, defaultdict
+import io
import bb.event
import bb.cooker
import bb.remotedata
@@ -74,8 +75,12 @@ class Command:
result = command_method(self, commandline)
except CommandError as exc:
return None, exc.args[0]
- except (Exception, SystemExit):
+ except (Exception, SystemExit) as exc:
import traceback
+ if isinstance(exc, bb.BBHandledException):
+ # We need to start returning real exceptions here. Until we do, we can't
+ # tell if an exception is an instance of bb.BBHandledException
+ return None, "bb.BBHandledException()\n" + traceback.format_exc()
return None, traceback.format_exc()
else:
return result, None
@@ -474,6 +479,17 @@ class CommandsSync:
d = command.remotedatastores[dsindex].varhistory
return getattr(d, method)(*args, **kwargs)
+ def dataStoreConnectorVarHistCmdEmit(self, command, params):
+ dsindex = params[0]
+ var = params[1]
+ oval = params[2]
+ val = params[3]
+ d = command.remotedatastores[params[4]]
+
+ o = io.StringIO()
+ command.remotedatastores[dsindex].varhistory.emit(var, oval, val, o, d)
+ return o.getvalue()
+
def dataStoreConnectorIncHistCmd(self, command, params):
dsindex = params[0]
method = params[1]
@@ -620,6 +636,16 @@ class CommandsAsync:
command.finishAsyncCommand()
findFilesMatchingInDir.needcache = False
+ def testCookerCommandEvent(self, command, params):
+ """
+ Dummy command used by OEQA selftest to test tinfoil without IO
+ """
+ pattern = params[0]
+
+ command.cooker.testCookerCommandEvent(pattern)
+ command.finishAsyncCommand()
+ testCookerCommandEvent.needcache = False
+
def findConfigFilePath(self, command, params):
"""
Find the path of the requested configuration file
diff --git a/lib/bb/cooker.py b/lib/bb/cooker.py
index 000faf82f..6743bce58 100644
--- a/lib/bb/cooker.py
+++ b/lib/bb/cooker.py
@@ -13,7 +13,6 @@ import sys, os, glob, os.path, re, time
import itertools
import logging
import multiprocessing
-import sre_constants
import threading
from io import StringIO, UnsupportedOperation
from contextlib import closing
@@ -411,6 +410,8 @@ class BBCooker:
self.data.disableTracking()
def parseConfiguration(self):
+ self.updateCacheSync()
+
# Change nice level if we're asked to
nice = self.data.getVar("BB_NICE_LEVEL")
if nice:
@@ -1017,6 +1018,11 @@ class BBCooker:
if matches:
bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
+ def testCookerCommandEvent(self, filepattern):
+ # Dummy command used by OEQA selftest to test tinfoil without IO
+ matches = ["A", "B"]
+ bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
+
def findProviders(self, mc=''):
return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
@@ -1788,7 +1794,7 @@ class CookerCollectFiles(object):
try:
re.compile(mask)
bbmasks.append(mask)
- except sre_constants.error:
+ except re.error:
collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
# Then validate the combined regular expressions. This should never
@@ -1796,7 +1802,7 @@ class CookerCollectFiles(object):
bbmask = "|".join(bbmasks)
try:
bbmask_compiled = re.compile(bbmask)
- except sre_constants.error:
+ except re.error:
collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
bbmask = None
diff --git a/lib/bb/data.py b/lib/bb/data.py
index b0683c518..1d21e00a1 100644
--- a/lib/bb/data.py
+++ b/lib/bb/data.py
@@ -301,6 +301,7 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
value += "\n_remove of %s" % r
deps |= r2.references
deps = deps | (keys & r2.execs)
+ value = handle_contains(value, r2.contains, d)
return value
if "vardepvalue" in varflags:
diff --git a/lib/bb/fetch2/__init__.py b/lib/bb/fetch2/__init__.py
index dc99914cd..3e6555bd6 100644
--- a/lib/bb/fetch2/__init__.py
+++ b/lib/bb/fetch2/__init__.py
@@ -562,6 +562,9 @@ def verify_checksum(ud, d, precomputed={}):
checksum_expected = getattr(ud, "%s_expected" % checksum_id)
+ if checksum_expected == '':
+ checksum_expected = None
+
return {
"id": checksum_id,
"name": checksum_name,
@@ -612,7 +615,7 @@ def verify_checksum(ud, d, precomputed={}):
for ci in checksum_infos:
if ci["expected"] and ci["expected"] != ci["data"]:
- messages.append("File: '%s' has %s checksum %s when %s was " \
+ messages.append("File: '%s' has %s checksum '%s' when '%s' was " \
"expected" % (ud.localpath, ci["id"], ci["data"], ci["expected"]))
bad_checksum = ci["data"]
diff --git a/lib/bb/fetch2/git.py b/lib/bb/fetch2/git.py
index 81335c117..cad1ae820 100644
--- a/lib/bb/fetch2/git.py
+++ b/lib/bb/fetch2/git.py
@@ -44,7 +44,8 @@ Supported SRC_URI options are:
- nobranch
Don't check the SHA validation for branch. set this option for the recipe
- referring to commit which is valid in tag instead of branch.
+ referring to commit which is valid in any namespace (branch, tag, ...)
+ instead of branch.
The default is "0", set nobranch=1 if needed.
- usehead
@@ -63,6 +64,7 @@ import errno
import fnmatch
import os
import re
+import shlex
import subprocess
import tempfile
import bb
@@ -141,6 +143,10 @@ class Git(FetchMethod):
ud.proto = 'file'
else:
ud.proto = "git"
+ if ud.host == "github.com" and ud.proto == "git":
+ # github stopped supporting git protocol
+ # https://github.blog/2021-09-01-improving-git-protocol-security-github/#no-more-unauthenticated-git
+ ud.proto = "https"
if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
@@ -220,7 +226,12 @@ class Git(FetchMethod):
ud.shallow = False
if ud.usehead:
- ud.unresolvedrev['default'] = 'HEAD'
+ # When usehead is set let's associate 'HEAD' with the unresolved
+ # rev of this repository. This will get resolved into a revision
+ # later. If an actual revision happens to have also been provided
+ # then this setting will be overridden.
+ for name in ud.names:
+ ud.unresolvedrev[name] = 'HEAD'
ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0"
@@ -343,7 +354,7 @@ class Git(FetchMethod):
# We do this since git will use a "-l" option automatically for local urls where possible
if repourl.startswith("file://"):
repourl = repourl[7:]
- clone_cmd = "LANG=C %s clone --bare --mirror \"%s\" %s --progress" % (ud.basecmd, repourl, ud.clonedir)
+ clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, shlex.quote(repourl), ud.clonedir)
if ud.proto.lower() != 'file':
bb.fetch2.check_network_access(d, clone_cmd, ud.url)
progresshandler = GitProgressHandler(d)
@@ -355,8 +366,12 @@ class Git(FetchMethod):
if "origin" in output:
runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir)
- runfetchcmd("%s remote add --mirror=fetch origin \"%s\"" % (ud.basecmd, repourl), d, workdir=ud.clonedir)
- fetch_cmd = "LANG=C %s fetch -f --progress \"%s\" refs/*:refs/*" % (ud.basecmd, repourl)
+ runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=ud.clonedir)
+
+ if ud.nobranch:
+ fetch_cmd = "LANG=C %s fetch -f --progress %s refs/*:refs/*" % (ud.basecmd, shlex.quote(repourl))
+ else:
+ fetch_cmd = "LANG=C %s fetch -f --progress %s refs/heads/*:refs/heads/* refs/tags/*:refs/tags/*" % (ud.basecmd, shlex.quote(repourl))
if ud.proto.lower() != 'file':
bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
progresshandler = GitProgressHandler(d)
@@ -412,14 +427,14 @@ class Git(FetchMethod):
# Create as a temp file and move atomically into position to avoid races
@contextmanager
- def create_atomic(filename, d):
+ def create_atomic(filename):
fd, tfile = tempfile.mkstemp(dir=os.path.dirname(filename))
try:
yield tfile
umask = os.umask(0o666)
os.umask(umask)
os.chmod(tfile, (0o666 & ~umask))
- runfetchcmd("mv %s %s" % (tfile, filename), d)
+ os.rename(tfile, filename)
finally:
os.close(fd)
@@ -433,7 +448,7 @@ class Git(FetchMethod):
self.clone_shallow_local(ud, shallowclone, d)
logger.info("Creating tarball of git repository")
- with create_atomic(ud.fullshallow, d) as tfile:
+ with create_atomic(ud.fullshallow) as tfile:
runfetchcmd("tar -czf %s ." % tfile, d, workdir=shallowclone)
runfetchcmd("touch %s.done" % ud.fullshallow, d)
finally:
@@ -443,7 +458,7 @@ class Git(FetchMethod):
os.unlink(ud.fullmirror)
logger.info("Creating tarball of git repository")
- with create_atomic(ud.fullmirror, d) as tfile:
+ with create_atomic(ud.fullmirror) as tfile:
runfetchcmd("tar -czf %s ." % tfile, d, workdir=ud.clonedir)
runfetchcmd("touch %s.done" % ud.fullmirror, d)
@@ -550,7 +565,7 @@ class Git(FetchMethod):
raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url)
repourl = self._get_repo_url(ud)
- runfetchcmd("%s remote set-url origin \"%s\"" % (ud.basecmd, repourl), d, workdir=destdir)
+ runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=destdir)
if self._contains_lfs(ud, d, destdir):
if need_lfs and not self._find_git_lfs(d):
@@ -678,8 +693,8 @@ class Git(FetchMethod):
d.setVar('_BB_GIT_IN_LSREMOTE', '1')
try:
repourl = self._get_repo_url(ud)
- cmd = "%s ls-remote \"%s\" %s" % \
- (ud.basecmd, repourl, search)
+ cmd = "%s ls-remote %s %s" % \
+ (ud.basecmd, shlex.quote(repourl), search)
if ud.proto.lower() != 'file':
bb.fetch2.check_network_access(d, cmd, repourl)
output = runfetchcmd(cmd, d, True)
diff --git a/lib/bb/fetch2/wget.py b/lib/bb/fetch2/wget.py
index f7d1de26b..368c64433 100644
--- a/lib/bb/fetch2/wget.py
+++ b/lib/bb/fetch2/wget.py
@@ -52,6 +52,12 @@ class WgetProgressHandler(bb.progress.LineFilterProgressHandler):
class Wget(FetchMethod):
+
+ # CDNs like CloudFlare may do a 'browser integrity test' which can fail
+ # with the standard wget/urllib User-Agent, so pretend to be a modern
+ # browser.
+ user_agent = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0"
+
"""Class to fetch urls via 'wget'"""
def supports(self, ud, d):
"""
@@ -91,10 +97,9 @@ class Wget(FetchMethod):
fetchcmd = self.basecmd
- if 'downloadfilename' in ud.parm:
- localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile)
- bb.utils.mkdirhier(os.path.dirname(localpath))
- fetchcmd += " -O %s" % shlex.quote(localpath)
+ localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) + ".tmp"
+ bb.utils.mkdirhier(os.path.dirname(localpath))
+ fetchcmd += " -O %s" % shlex.quote(localpath)
if ud.user and ud.pswd:
fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (ud.user, ud.pswd)
@@ -108,6 +113,10 @@ class Wget(FetchMethod):
self._runwget(ud, d, fetchcmd, False)
+ # Remove the ".tmp" and move the file into position atomically
+ # Our lock prevents multiple writers but mirroring code may grab incomplete files
+ os.rename(localpath, localpath[:-4])
+
# Sanity check since wget can pretend it succeed when it didn't
# Also, this used to happen if sourceforge sent us to the mirror page
if not os.path.exists(ud.localpath):
@@ -300,7 +309,7 @@ class Wget(FetchMethod):
# Some servers (FusionForge, as used on Alioth) require that the
# optional Accept header is set.
r.add_header("Accept", "*/*")
- r.add_header("User-Agent", "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12")
+ r.add_header("User-Agent", self.user_agent)
def add_basic_auth(login_str, request):
'''Adds Basic auth to http request, pass in login:password as string'''
import base64
@@ -319,7 +328,7 @@ class Wget(FetchMethod):
except (TypeError, ImportError, IOError, netrc.NetrcParseError):
pass
- with opener.open(r) as response:
+ with opener.open(r, timeout=30) as response:
pass
except urllib.error.URLError as e:
if try_again:
@@ -404,9 +413,8 @@ class Wget(FetchMethod):
"""
f = tempfile.NamedTemporaryFile()
with tempfile.TemporaryDirectory(prefix="wget-index-") as workdir, tempfile.NamedTemporaryFile(dir=workdir, prefix="wget-listing-") as f:
- agent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12"
fetchcmd = self.basecmd
- fetchcmd += " -O " + f.name + " --user-agent='" + agent + "' '" + uri + "'"
+ fetchcmd += " -O " + f.name + " --user-agent='" + self.user_agent + "' '" + uri + "'"
try:
self._runwget(ud, d, fetchcmd, True, workdir=workdir)
fetchresult = f.read()
diff --git a/lib/bb/monitordisk.py b/lib/bb/monitordisk.py
index e7c07264a..4d243af30 100644
--- a/lib/bb/monitordisk.py
+++ b/lib/bb/monitordisk.py
@@ -229,9 +229,10 @@ class diskMonitor:
freeInode = st.f_favail
if minInode and freeInode < minInode:
- # Some filesystems use dynamic inodes so can't run out
- # (e.g. btrfs). This is reported by the inode count being 0.
- if st.f_files == 0:
+ # Some filesystems use dynamic inodes so can't run out.
+ # This is reported by the inode count being 0 (btrfs) or the free
+ # inode count being -1 (cephfs).
+ if st.f_files == 0 or st.f_favail == -1:
self.devDict[k][2] = None
continue
# Always show warning, the self.checked would always be False if the action is WARN
diff --git a/lib/bb/runqueue.py b/lib/bb/runqueue.py
index a513b0983..886eef1f2 100644
--- a/lib/bb/runqueue.py
+++ b/lib/bb/runqueue.py
@@ -24,6 +24,7 @@ import pickle
from multiprocessing import Process
import shlex
import pprint
+import time
bblogger = logging.getLogger("BitBake")
logger = logging.getLogger("BitBake.RunQueue")
@@ -142,6 +143,55 @@ class RunQueueScheduler(object):
self.buildable.append(tid)
self.rev_prio_map = None
+ self.is_pressure_usable()
+
+ def is_pressure_usable(self):
+ """
+ If monitoring pressure, return True if pressure files can be open and read. For example
+ openSUSE /proc/pressure/* files have readable file permissions but when read the error EOPNOTSUPP (Operation not supported)
+ is returned.
+ """
+ if self.rq.max_cpu_pressure or self.rq.max_io_pressure or self.rq.max_memory_pressure:
+ try:
+ with open("/proc/pressure/cpu") as cpu_pressure_fds, \
+ open("/proc/pressure/io") as io_pressure_fds, \
+ open("/proc/pressure/memory") as memory_pressure_fds:
+
+ self.prev_cpu_pressure = cpu_pressure_fds.readline().split()[4].split("=")[1]
+ self.prev_io_pressure = io_pressure_fds.readline().split()[4].split("=")[1]
+ self.prev_memory_pressure = memory_pressure_fds.readline().split()[4].split("=")[1]
+ self.prev_pressure_time = time.time()
+ self.check_pressure = True
+ except:
+ bb.note("The /proc/pressure files can't be read. Continuing build without monitoring pressure")
+ self.check_pressure = False
+ else:
+ self.check_pressure = False
+
+ def exceeds_max_pressure(self):
+ """
+ Monitor the difference in total pressure at least once per second, if
+ BB_PRESSURE_MAX_{CPU|IO|MEMORY} are set, return True if above threshold.
+ """
+ if self.check_pressure:
+ with open("/proc/pressure/cpu") as cpu_pressure_fds, \
+ open("/proc/pressure/io") as io_pressure_fds, \
+ open("/proc/pressure/memory") as memory_pressure_fds:
+ # extract "total" from /proc/pressure/{cpu|io}
+ curr_cpu_pressure = cpu_pressure_fds.readline().split()[4].split("=")[1]
+ curr_io_pressure = io_pressure_fds.readline().split()[4].split("=")[1]
+ curr_memory_pressure = memory_pressure_fds.readline().split()[4].split("=")[1]
+ exceeds_cpu_pressure = self.rq.max_cpu_pressure and (float(curr_cpu_pressure) - float(self.prev_cpu_pressure)) > self.rq.max_cpu_pressure
+ exceeds_io_pressure = self.rq.max_io_pressure and (float(curr_io_pressure) - float(self.prev_io_pressure)) > self.rq.max_io_pressure
+ exceeds_memory_pressure = self.rq.max_memory_pressure and (float(curr_memory_pressure) - float(self.prev_memory_pressure)) > self.rq.max_memory_pressure
+ now = time.time()
+ if now - self.prev_pressure_time > 1.0:
+ self.prev_cpu_pressure = curr_cpu_pressure
+ self.prev_io_pressure = curr_io_pressure
+ self.prev_memory_pressure = curr_memory_pressure
+ self.prev_pressure_time = now
+ return (exceeds_cpu_pressure or exceeds_io_pressure or exceeds_memory_pressure)
+ return False
def next_buildable_task(self):
"""
@@ -155,6 +205,12 @@ class RunQueueScheduler(object):
if not buildable:
return None
+ # Bitbake requires that at least one task be active. Only check for pressure if
+ # this is the case, otherwise the pressure limitation could result in no tasks
+ # being active and no new tasks started thereby, at times, breaking the scheduler.
+ if self.rq.stats.active and self.exceeds_max_pressure():
+ return None
+
# Filter out tasks that have a max number of threads that have been exceeded
skip_buildable = {}
for running in self.rq.runq_running.difference(self.rq.runq_complete):
@@ -1700,6 +1756,9 @@ class RunQueueExecute:
self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS") or 1)
self.scheduler = self.cfgData.getVar("BB_SCHEDULER") or "speed"
+ self.max_cpu_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_CPU")
+ self.max_io_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_IO")
+ self.max_memory_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_MEMORY")
self.sq_buildable = set()
self.sq_running = set()
@@ -1735,6 +1794,29 @@ class RunQueueExecute:
if self.number_tasks <= 0:
bb.fatal("Invalid BB_NUMBER_THREADS %s" % self.number_tasks)
+ lower_limit = 1.0
+ upper_limit = 1000000.0
+ if self.max_cpu_pressure:
+ self.max_cpu_pressure = float(self.max_cpu_pressure)
+ if self.max_cpu_pressure < lower_limit:
+ bb.fatal("Invalid BB_PRESSURE_MAX_CPU %s, minimum value is %s." % (self.max_cpu_pressure, lower_limit))
+ if self.max_cpu_pressure > upper_limit:
+ bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_CPU is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_cpu_pressure))
+
+ if self.max_io_pressure:
+ self.max_io_pressure = float(self.max_io_pressure)
+ if self.max_io_pressure < lower_limit:
+ bb.fatal("Invalid BB_PRESSURE_MAX_IO %s, minimum value is %s." % (self.max_io_pressure, lower_limit))
+ if self.max_io_pressure > upper_limit:
+ bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_IO is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_io_pressure))
+
+ if self.max_memory_pressure:
+ self.max_memory_pressure = float(self.max_memory_pressure)
+ if self.max_memory_pressure < lower_limit:
+ bb.fatal("Invalid BB_PRESSURE_MAX_MEMORY %s, minimum value is %s." % (self.max_memory_pressure, lower_limit))
+ if self.max_memory_pressure > upper_limit:
+ bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_MEMORY is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_io_pressure))
+
# List of setscene tasks which we've covered
self.scenequeue_covered = set()
# List of tasks which are covered (including setscene ones)
@@ -1893,6 +1975,20 @@ class RunQueueExecute:
self.setbuildable(revdep)
logger.debug(1, "Marking task %s as buildable", revdep)
+ found = None
+ for t in sorted(self.sq_deferred.copy()):
+ if self.sq_deferred[t] == task:
+ # Allow the next deferred task to run. Any other deferred tasks should be deferred after that task.
+ # We shouldn't allow all to run at once as it is prone to races.
+ if not found:
+ bb.note("Deferred task %s now buildable" % t)
+ del self.sq_deferred[t]
+ update_scenequeue_data([t], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False)
+ found = t
+ else:
+ bb.note("Deferring %s after %s" % (t, found))
+ self.sq_deferred[t] = found
+
def task_complete(self, task):
self.stats.taskCompleted()
bb.event.fire(runQueueTaskCompleted(task, self.stats, self.rq), self.cfgData)
@@ -2002,8 +2098,6 @@ class RunQueueExecute:
logger.debug(1, "%s didn't become valid, skipping setscene" % nexttask)
self.sq_task_failoutright(nexttask)
return True
- else:
- self.sqdata.outrightfail.remove(nexttask)
if nexttask in self.sqdata.outrightfail:
logger.debug(2, 'No package found, so skipping setscene task %s', nexttask)
self.sq_task_failoutright(nexttask)
@@ -2154,7 +2248,8 @@ class RunQueueExecute:
if self.sq_deferred:
tid = self.sq_deferred.pop(list(self.sq_deferred.keys())[0])
logger.warning("Runqeueue deadlocked on deferred tasks, forcing task %s" % tid)
- self.sq_task_failoutright(tid)
+ if tid not in self.runq_complete:
+ self.sq_task_failoutright(tid)
return True
if len(self.failed_tids) != 0:
@@ -2268,10 +2363,16 @@ class RunQueueExecute:
self.updated_taskhash_queue.remove((tid, unihash))
if unihash != self.rqdata.runtaskentries[tid].unihash:
- hashequiv_logger.verbose("Task %s unihash changed to %s" % (tid, unihash))
- self.rqdata.runtaskentries[tid].unihash = unihash
- bb.parse.siggen.set_unihash(tid, unihash)
- toprocess.add(tid)
+ # Make sure we rehash any other tasks with the same task hash that we're deferred against.
+ torehash = [tid]
+ for deftid in self.sq_deferred:
+ if self.sq_deferred[deftid] == tid:
+ torehash.append(deftid)
+ for hashtid in torehash:
+ hashequiv_logger.verbose("Task %s unihash changed to %s" % (hashtid, unihash))
+ self.rqdata.runtaskentries[hashtid].unihash = unihash
+ bb.parse.siggen.set_unihash(hashtid, unihash)
+ toprocess.add(hashtid)
# Work out all tasks which depend upon these
total = set()
@@ -2410,6 +2511,14 @@ class RunQueueExecute:
if update_tasks:
self.sqdone = False
+ for mc in sorted(self.sqdata.multiconfigs):
+ for tid in sorted([t[0] for t in update_tasks]):
+ if mc_from_tid(tid) != mc:
+ continue
+ h = pending_hash_index(tid, self.rqdata)
+ if h in self.sqdata.hashes and tid != self.sqdata.hashes[h]:
+ self.sq_deferred[tid] = self.sqdata.hashes[h]
+ bb.note("Deferring %s after %s" % (tid, self.sqdata.hashes[h]))
update_scenequeue_data([t[0] for t in update_tasks], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False)
for (tid, harddepfail, origvalid) in update_tasks:
@@ -2750,6 +2859,19 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
sqdata.stamppresent = set()
sqdata.valid = set()
+ sqdata.hashes = {}
+ sqrq.sq_deferred = {}
+ for mc in sorted(sqdata.multiconfigs):
+ for tid in sorted(sqdata.sq_revdeps):
+ if mc_from_tid(tid) != mc:
+ continue
+ h = pending_hash_index(tid, rqdata)
+ if h not in sqdata.hashes:
+ sqdata.hashes[h] = tid
+ else:
+ sqrq.sq_deferred[tid] = sqdata.hashes[h]
+ bb.note("Deferring %s after %s" % (tid, sqdata.hashes[h]))
+
update_scenequeue_data(sqdata.sq_revdeps, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True)
def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True):
@@ -2761,6 +2883,8 @@ def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, s
sqdata.stamppresent.remove(tid)
if tid in sqdata.valid:
sqdata.valid.remove(tid)
+ if tid in sqdata.outrightfail:
+ sqdata.outrightfail.remove(tid)
(mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
@@ -2788,32 +2912,20 @@ def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, s
sqdata.valid |= rq.validate_hashes(tocheck, cooker.data, len(sqdata.stamppresent), False, summary=summary)
- sqdata.hashes = {}
- sqrq.sq_deferred = {}
- for mc in sorted(sqdata.multiconfigs):
- for tid in sorted(sqdata.sq_revdeps):
- if mc_from_tid(tid) != mc:
- continue
- if tid in sqdata.stamppresent:
- continue
- if tid in sqdata.valid:
- continue
- if tid in sqdata.noexec:
- continue
- if tid in sqrq.scenequeue_notcovered:
- continue
- if tid in sqrq.scenequeue_covered:
- continue
-
- sqdata.outrightfail.add(tid)
-
- h = pending_hash_index(tid, rqdata)
- if h not in sqdata.hashes:
- sqdata.hashes[h] = tid
- else:
- sqrq.sq_deferred[tid] = sqdata.hashes[h]
- bb.note("Deferring %s after %s" % (tid, sqdata.hashes[h]))
-
+ for tid in tids:
+ if tid in sqdata.stamppresent:
+ continue
+ if tid in sqdata.valid:
+ continue
+ if tid in sqdata.noexec:
+ continue
+ if tid in sqrq.scenequeue_covered:
+ continue
+ if tid in sqrq.scenequeue_notcovered:
+ continue
+ if tid in sqrq.sq_deferred:
+ continue
+ sqdata.outrightfail.add(tid)
class TaskFailure(Exception):
"""
diff --git a/lib/bb/server/process.py b/lib/bb/server/process.py
index 43061eb3c..4bdb84ae3 100644
--- a/lib/bb/server/process.py
+++ b/lib/bb/server/process.py
@@ -25,6 +25,7 @@ import subprocess
import errno
import re
import datetime
+import gc
import bb.server.xmlrpcserver
from bb import daemonize
from multiprocessing import queues
@@ -221,6 +222,7 @@ class ProcessServer(multiprocessing.Process):
try:
print("Running command %s" % command)
self.command_channel_reply.send(self.cooker.command.runCommand(command))
+ print("Command Completed")
except Exception as e:
logger.exception('Exception in server main event loop running command %s (%s)' % (command, str(e)))
@@ -348,7 +350,12 @@ class ServerCommunicator():
logger.info("No reply from server in 30s")
if not self.recv.poll(30):
raise ProcessTimeout("Timeout while waiting for a reply from the bitbake server (60s)")
- return self.recv.get()
+ ret, exc = self.recv.get()
+ # Should probably turn all exceptions in exc back into exceptions?
+ # For now, at least handle BBHandledException
+ if exc and "BBHandledException" in exc:
+ raise bb.BBHandledException()
+ return ret, exc
def updateFeatureSet(self, featureset):
_, error = self.runCommand(["setFeatures", featureset])
@@ -665,8 +672,10 @@ class ConnectionWriter(object):
def send(self, obj):
obj = multiprocessing.reduction.ForkingPickler.dumps(obj)
+ gc.disable()
with self.wlock:
self.writer.send_bytes(obj)
+ gc.enable()
def fileno(self):
return self.writer.fileno()
diff --git a/lib/bb/siggen.py b/lib/bb/siggen.py
index 26fa7f05c..9d4f67aa9 100644
--- a/lib/bb/siggen.py
+++ b/lib/bb/siggen.py
@@ -318,7 +318,8 @@ class SignatureGeneratorBasic(SignatureGenerator):
else:
sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[tid]
- bb.utils.mkdirhier(os.path.dirname(sigfile))
+ with bb.utils.umask(0o002):
+ bb.utils.mkdirhier(os.path.dirname(sigfile))
data = {}
data['task'] = task
diff --git a/lib/bb/tests/codeparser.py b/lib/bb/tests/codeparser.py
index f48520479..f1c4f618d 100644
--- a/lib/bb/tests/codeparser.py
+++ b/lib/bb/tests/codeparser.py
@@ -412,6 +412,32 @@ esac
# Check final value
self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['anothervalue', 'yetanothervalue', 'lastone'])
+ def test_contains_vardeps_override_operators(self):
+ # Check override operators handle dependencies correctly with the contains functionality
+ expr_plain = 'testval'
+ expr_prepend = '${@bb.utils.filter("TESTVAR1", "testval1", d)} '
+ expr_append = ' ${@bb.utils.filter("TESTVAR2", "testval2", d)}'
+ expr_remove = '${@bb.utils.contains("TESTVAR3", "no-testval", "testval", "", d)}'
+ # Check dependencies
+ self.d.setVar('ANOTHERVAR', expr_plain)
+ self.d.prependVar('ANOTHERVAR', expr_prepend)
+ self.d.appendVar('ANOTHERVAR', expr_append)
+ self.d.setVar('ANOTHERVAR:remove', expr_remove)
+ self.d.setVar('TESTVAR1', 'blah')
+ self.d.setVar('TESTVAR2', 'testval2')
+ self.d.setVar('TESTVAR3', 'no-testval')
+ deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), self.d)
+ self.assertEqual(sorted(values.splitlines()),
+ sorted([
+ expr_prepend + expr_plain + expr_append,
+ '_remove of ' + expr_remove,
+ 'TESTVAR1{testval1} = Unset',
+ 'TESTVAR2{testval2} = Set',
+ 'TESTVAR3{no-testval} = Set',
+ ]))
+ # Check final value
+ self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['testval2'])
+
#Currently no wildcard support
#def test_vardeps_wildcards(self):
# self.d.setVar("oe_libinstall", "echo test")
diff --git a/lib/bb/tests/fetch.py b/lib/bb/tests/fetch.py
index 8ad107081..61dd5ccca 100644
--- a/lib/bb/tests/fetch.py
+++ b/lib/bb/tests/fetch.py
@@ -472,7 +472,7 @@ class GitDownloadDirectoryNamingTest(FetcherTest):
super(GitDownloadDirectoryNamingTest, self).setUp()
self.recipe_url = "git://git.openembedded.org/bitbake"
self.recipe_dir = "git.openembedded.org.bitbake"
- self.mirror_url = "git://github.com/openembedded/bitbake.git"
+ self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https"
self.mirror_dir = "github.com.openembedded.bitbake.git"
self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
@@ -520,7 +520,7 @@ class TarballNamingTest(FetcherTest):
super(TarballNamingTest, self).setUp()
self.recipe_url = "git://git.openembedded.org/bitbake"
self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz"
- self.mirror_url = "git://github.com/openembedded/bitbake.git"
+ self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https"
self.mirror_tarball = "git2_github.com.openembedded.bitbake.git.tar.gz"
self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1')
@@ -554,7 +554,7 @@ class GitShallowTarballNamingTest(FetcherTest):
super(GitShallowTarballNamingTest, self).setUp()
self.recipe_url = "git://git.openembedded.org/bitbake"
self.recipe_tarball = "gitshallow_git.openembedded.org.bitbake_82ea737-1_master.tar.gz"
- self.mirror_url = "git://github.com/openembedded/bitbake.git"
+ self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https"
self.mirror_tarball = "gitshallow_github.com.openembedded.bitbake.git_82ea737-1_master.tar.gz"
self.d.setVar('BB_GIT_SHALLOW', '1')
@@ -650,6 +650,58 @@ class FetcherLocalTest(FetcherTest):
with self.assertRaises(bb.fetch2.UnpackError):
self.fetchUnpack(['file://a;subdir=/bin/sh'])
+ def test_local_gitfetch_usehead(self):
+ # Create dummy local Git repo
+ src_dir = tempfile.mkdtemp(dir=self.tempdir,
+ prefix='gitfetch_localusehead_')
+ src_dir = os.path.abspath(src_dir)
+ bb.process.run("git init", cwd=src_dir)
+ bb.process.run("git commit --allow-empty -m'Dummy commit'",
+ cwd=src_dir)
+ # Use other branch than master
+ bb.process.run("git checkout -b my-devel", cwd=src_dir)
+ bb.process.run("git commit --allow-empty -m'Dummy commit 2'",
+ cwd=src_dir)
+ stdout = bb.process.run("git rev-parse HEAD", cwd=src_dir)
+ orig_rev = stdout[0].strip()
+
+ # Fetch and check revision
+ self.d.setVar("SRCREV", "AUTOINC")
+ url = "git://" + src_dir + ";protocol=file;usehead=1"
+ fetcher = bb.fetch.Fetch([url], self.d)
+ fetcher.download()
+ fetcher.unpack(self.unpackdir)
+ stdout = bb.process.run("git rev-parse HEAD",
+ cwd=os.path.join(self.unpackdir, 'git'))
+ unpack_rev = stdout[0].strip()
+ self.assertEqual(orig_rev, unpack_rev)
+
+ def test_local_gitfetch_usehead_withname(self):
+ # Create dummy local Git repo
+ src_dir = tempfile.mkdtemp(dir=self.tempdir,
+ prefix='gitfetch_localusehead_')
+ src_dir = os.path.abspath(src_dir)
+ bb.process.run("git init", cwd=src_dir)
+ bb.process.run("git commit --allow-empty -m'Dummy commit'",
+ cwd=src_dir)
+ # Use other branch than master
+ bb.process.run("git checkout -b my-devel", cwd=src_dir)
+ bb.process.run("git commit --allow-empty -m'Dummy commit 2'",
+ cwd=src_dir)
+ stdout = bb.process.run("git rev-parse HEAD", cwd=src_dir)
+ orig_rev = stdout[0].strip()
+
+ # Fetch and check revision
+ self.d.setVar("SRCREV", "AUTOINC")
+ url = "git://" + src_dir + ";protocol=file;usehead=1;name=newName"
+ fetcher = bb.fetch.Fetch([url], self.d)
+ fetcher.download()
+ fetcher.unpack(self.unpackdir)
+ stdout = bb.process.run("git rev-parse HEAD",
+ cwd=os.path.join(self.unpackdir, 'git'))
+ unpack_rev = stdout[0].strip()
+ self.assertEqual(orig_rev, unpack_rev)
+
class FetcherNoNetworkTest(FetcherTest):
def setUp(self):
super().setUp()
@@ -921,7 +973,7 @@ class FetcherNetworkTest(FetcherTest):
def test_git_submodule_dbus_broker(self):
# The following external repositories have show failures in fetch and unpack operations
# We want to avoid regressions!
- url = "gitsm://github.com/bus1/dbus-broker;protocol=git;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main"
+ url = "gitsm://github.com/bus1/dbus-broker;protocol=https;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# Previous cwd has been deleted
@@ -937,7 +989,7 @@ class FetcherNetworkTest(FetcherTest):
@skipIfNoNetwork()
def test_git_submodule_CLI11(self):
- url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf;branch=main"
+ url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf;branch=main"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# Previous cwd has been deleted
@@ -952,12 +1004,12 @@ class FetcherNetworkTest(FetcherTest):
@skipIfNoNetwork()
def test_git_submodule_update_CLI11(self):
""" Prevent regression on update detection not finding missing submodule, or modules without needed commits """
- url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714;branch=main"
+ url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714;branch=main"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# CLI11 that pulls in a newer nlohmann-json
- url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca;branch=main"
+ url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca;branch=main"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# Previous cwd has been deleted
@@ -971,7 +1023,7 @@ class FetcherNetworkTest(FetcherTest):
@skipIfNoNetwork()
def test_git_submodule_aktualizr(self):
- url = "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=git;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
+ url = "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=https;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# Previous cwd has been deleted
@@ -991,7 +1043,7 @@ class FetcherNetworkTest(FetcherTest):
""" Prevent regression on deeply nested submodules not being checked out properly, even though they were fetched. """
# This repository also has submodules where the module (name), path and url do not align
- url = "gitsm://github.com/azure/iotedge.git;protocol=git;rev=d76e0316c6f324345d77c48a83ce836d09392699"
+ url = "gitsm://github.com/azure/iotedge.git;protocol=https;rev=d76e0316c6f324345d77c48a83ce836d09392699;branch=main"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# Previous cwd has been deleted
@@ -1049,7 +1101,7 @@ class SVNTest(FetcherTest):
bb.process.run("svn co %s svnfetch_co" % self.repo_url, cwd=self.tempdir)
# Github will emulate SVN. Use this to check if we're downloding...
- bb.process.run("svn propset svn:externals 'bitbake svn://vcs.pcre.org/pcre2/code' .",
+ bb.process.run("svn propset svn:externals 'bitbake https://github.com/PhilipHazel/pcre2.git' .",
cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
bb.process.run("svn commit --non-interactive -m 'Add external'",
cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
@@ -1167,7 +1219,7 @@ class FetchLatestVersionTest(FetcherTest):
test_git_uris = {
# version pattern "X.Y.Z"
- ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4", "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
+ ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4;protocol=https", "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
: "1.99.4",
# version pattern "vX.Y"
# mirror of git.infradead.org since network issues interfered with testing
@@ -1190,13 +1242,13 @@ class FetchLatestVersionTest(FetcherTest):
: "20120614",
# packages with a valid UPSTREAM_CHECK_GITTAGREGEX
# mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing
- ("xf86-video-omap", "git://git.yoctoproject.org/bbfetchtests-xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", "(?P<pver>(\d+\.(\d\.?)*))")
+ ("xf86-video-omap", "git://git.yoctoproject.org/bbfetchtests-xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", r"(?P<pver>(\d+\.(\d\.?)*))")
: "0.4.3",
- ("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", "(?P<pver>(([0-9][\.|_]?)+[0-9]))")
+ ("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", r"(?P<pver>(([0-9][\.|_]?)+[0-9]))")
: "11.0.0",
- ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot", "cd437ecbd8986c894442f8fce1e0061e20f04dee", "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
+ ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot;protocol=https", "cd437ecbd8986c894442f8fce1e0061e20f04dee", r"chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
: "1.3.59",
- ("remake", "git://github.com/rocky/remake.git", "f05508e521987c8494c92d9c2871aec46307d51d", "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
+ ("remake", "git://github.com/rocky/remake.git;protocol=https", "f05508e521987c8494c92d9c2871aec46307d51d", r"(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
: "3.82+dbg0.9",
}
@@ -1236,11 +1288,11 @@ class FetchLatestVersionTest(FetcherTest):
#
# http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2
# https://github.com/apple/cups/releases
- ("cups", "/software/1.7.2/cups-1.7.2-source.tar.bz2", "/apple/cups/releases", "(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz")
+ ("cups", "/software/1.7.2/cups-1.7.2-source.tar.bz2", "/apple/cups/releases", r"(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz")
: "2.0.0",
# http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz
# http://ftp.debian.org/debian/pool/main/d/db5.3/
- ("db", "/berkeley-db/db-5.3.21.tar.gz", "/debian/pool/main/d/db5.3/", "(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz")
+ ("db", "/berkeley-db/db-5.3.21.tar.gz", "/debian/pool/main/d/db5.3/", r"(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz")
: "5.3.10",
}
@@ -1286,13 +1338,10 @@ class FetchCheckStatusTest(FetcherTest):
"http://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz",
"http://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz",
"https://yoctoproject.org/",
- "https://yoctoproject.org/documentation",
+ "https://docs.yoctoproject.org/",
"http://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz",
"http://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz",
"ftp://sourceware.org/pub/libffi/libffi-1.20.tar.gz",
- "http://ftp.gnu.org/gnu/autoconf/autoconf-2.60.tar.gz",
- "https://ftp.gnu.org/gnu/chess/gnuchess-5.08.tar.gz",
- "https://ftp.gnu.org/gnu/gmp/gmp-4.0.tar.gz",
# GitHub releases are hosted on Amazon S3, which doesn't support HEAD
"https://github.com/kergoth/tslib/releases/download/1.1/tslib-1.1.tar.xz"
]
@@ -1701,7 +1750,7 @@ class GitShallowTest(FetcherTest):
self.add_empty_file('bsub', cwd=smdir)
self.git('submodule init', cwd=self.srcdir)
- self.git('submodule add file://%s' % smdir, cwd=self.srcdir)
+ self.git('-c protocol.file.allow=always submodule add file://%s' % smdir, cwd=self.srcdir)
self.git('submodule update', cwd=self.srcdir)
self.git('commit -m submodule -a', cwd=self.srcdir)
@@ -1733,7 +1782,7 @@ class GitShallowTest(FetcherTest):
self.add_empty_file('bsub', cwd=smdir)
self.git('submodule init', cwd=self.srcdir)
- self.git('submodule add file://%s' % smdir, cwd=self.srcdir)
+ self.git('-c protocol.file.allow=always submodule add file://%s' % smdir, cwd=self.srcdir)
self.git('submodule update', cwd=self.srcdir)
self.git('commit -m submodule -a', cwd=self.srcdir)
@@ -1983,7 +2032,7 @@ class GitShallowTest(FetcherTest):
@skipIfNoNetwork()
def test_bitbake(self):
- self.git('remote add --mirror=fetch origin git://github.com/openembedded/bitbake', cwd=self.srcdir)
+ self.git('remote add --mirror=fetch origin https://github.com/openembedded/bitbake', cwd=self.srcdir)
self.git('config core.bare true', cwd=self.srcdir)
self.git('fetch', cwd=self.srcdir)
diff --git a/lib/bb/tinfoil.py b/lib/bb/tinfoil.py
index 8c9b6b8ca..8bec8cbaf 100644
--- a/lib/bb/tinfoil.py
+++ b/lib/bb/tinfoil.py
@@ -53,6 +53,10 @@ class TinfoilDataStoreConnectorVarHistory:
def remoteCommand(self, cmd, *args, **kwargs):
return self.tinfoil.run_command('dataStoreConnectorVarHistCmd', self.dsindex, cmd, args, kwargs)
+ def emit(self, var, oval, val, o, d):
+ ret = self.tinfoil.run_command('dataStoreConnectorVarHistCmdEmit', self.dsindex, var, oval, val, d.dsindex)
+ o.write(ret)
+
def __getattr__(self, name):
if not hasattr(bb.data_smart.VariableHistory, name):
raise AttributeError("VariableHistory has no such method %s" % name)
@@ -448,7 +452,7 @@ class Tinfoil:
self.run_actions(config_params)
self.recipes_parsed = True
- def run_command(self, command, *params):
+ def run_command(self, command, *params, handle_events=True):
"""
Run a command on the server (as implemented in bb.command).
Note that there are two types of command - synchronous and
@@ -465,7 +469,16 @@ class Tinfoil:
commandline = [command]
if params:
commandline.extend(params)
- result = self.server_connection.connection.runCommand(commandline)
+ try:
+ result = self.server_connection.connection.runCommand(commandline)
+ finally:
+ while handle_events:
+ event = self.wait_event()
+ if not event:
+ break
+ if isinstance(event, logging.LogRecord):
+ if event.taskpid == 0 or event.levelno > logging.INFO:
+ self.logger.handle(event)
if result[1]:
raise TinfoilCommandFailed(result[1])
return result[0]
diff --git a/lib/bb/ui/knotty.py b/lib/bb/ui/knotty.py
index 87e873d64..d1f74389d 100644
--- a/lib/bb/ui/knotty.py
+++ b/lib/bb/ui/knotty.py
@@ -227,7 +227,9 @@ class TerminalFilter(object):
def keepAlive(self, t):
if not self.cuu:
- print("Bitbake still alive (%ds)" % t)
+ print("Bitbake still alive (no events for %ds). Active tasks:" % t)
+ for t in self.helper.running_tasks:
+ print(t)
sys.stdout.flush()
def updateFooter(self):
@@ -380,14 +382,27 @@ _evt_list = [ "bb.runqueue.runQueueExitWait", "bb.event.LogExecTTY", "logging.Lo
"bb.event.BuildBase", "bb.build.TaskStarted", "bb.build.TaskSucceeded", "bb.build.TaskFailedSilent",
"bb.build.TaskProgress", "bb.event.ProcessStarted", "bb.event.ProcessProgress", "bb.event.ProcessFinished"]
+def drain_events_errorhandling(eventHandler):
+ # We don't have logging setup, we do need to show any events we see before exiting
+ event = True
+ logger = bb.msg.logger_create('bitbake', sys.stdout)
+ while event:
+ event = eventHandler.waitEvent(0)
+ if isinstance(event, logging.LogRecord):
+ logger.handle(event)
+
def main(server, eventHandler, params, tf = TerminalFilter):
- if not params.observe_only:
- params.updateToServer(server, os.environ.copy())
+ try:
+ if not params.observe_only:
+ params.updateToServer(server, os.environ.copy())
- includelogs, loglines, consolelogfile, logconfigfile = _log_settings_from_server(server, params.observe_only)
+ includelogs, loglines, consolelogfile, logconfigfile = _log_settings_from_server(server, params.observe_only)
- loglevel, _ = bb.msg.constructLogOptions()
+ loglevel, _ = bb.msg.constructLogOptions()
+ except bb.BBHandledException:
+ drain_events_errorhandling(eventHandler)
+ return 1
if params.options.quiet == 0:
console_loglevel = loglevel
@@ -584,7 +599,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
warnings = 0
taskfailures = []
- printinterval = 5000
+ printintervaldelta = 10 * 60 # 10 minutes
+ printinterval = printintervaldelta
lastprint = time.time()
termfilter = tf(main, helper, console_handlers, params.options.quiet)
@@ -594,7 +610,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
try:
if (lastprint + printinterval) <= time.time():
termfilter.keepAlive(printinterval)
- printinterval += 5000
+ printinterval += printintervaldelta
event = eventHandler.waitEvent(0)
if event is None:
if main.shutdown > 1:
@@ -625,7 +641,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
if isinstance(event, logging.LogRecord):
lastprint = time.time()
- printinterval = 5000
+ printinterval = printintervaldelta
if event.levelno >= bb.msg.BBLogFormatter.ERROR:
errors = errors + 1
return_value = 1
diff --git a/lib/bb/utils.py b/lib/bb/utils.py
index 5f5767c1d..34fa0b7a6 100644
--- a/lib/bb/utils.py
+++ b/lib/bb/utils.py
@@ -16,7 +16,8 @@ import bb.msg
import multiprocessing
import fcntl
import importlib
-from importlib import machinery
+import importlib.machinery
+import importlib.util
import itertools
import subprocess
import glob
@@ -420,12 +421,14 @@ def better_eval(source, locals, extraglobals = None):
return eval(source, ctx, locals)
@contextmanager
-def fileslocked(files):
+def fileslocked(files, *args, **kwargs):
"""Context manager for locking and unlocking file locks."""
locks = []
if files:
for lockfile in files:
- locks.append(bb.utils.lockfile(lockfile))
+ l = bb.utils.lockfile(lockfile, *args, **kwargs)
+ if l is not None:
+ locks.append(l)
try:
yield
@@ -458,9 +461,16 @@ def lockfile(name, shared=False, retry=True, block=False):
consider the possibility of sending a signal to the process to break
out - at which point you want block=True rather than retry=True.
"""
+ basename = os.path.basename(name)
+ if len(basename) > 255:
+ root, ext = os.path.splitext(basename)
+ basename = root[:255 - len(ext)] + ext
+
dirname = os.path.dirname(name)
mkdirhier(dirname)
+ name = os.path.join(dirname, basename)
+
if not os.access(dirname, os.W_OK):
logger.error("Unable to acquire lock '%s', directory is not writable",
name)
@@ -494,7 +504,7 @@ def lockfile(name, shared=False, retry=True, block=False):
return lf
lf.close()
except OSError as e:
- if e.errno == errno.EACCES:
+ if e.errno == errno.EACCES or e.errno == errno.ENAMETOOLONG:
logger.error("Unable to acquire lock '%s', %s",
e.strerror, name)
sys.exit(1)
@@ -959,6 +969,17 @@ def which(path, item, direction = 0, history = False, executable=False):
return "", hist
return ""
+@contextmanager
+def umask(new_mask):
+ """
+ Context manager to set the umask to a specific mask, and restore it afterwards.
+ """
+ current_mask = os.umask(new_mask)
+ try:
+ yield
+ finally:
+ os.umask(current_mask)
+
def to_boolean(string, default=None):
if not string:
return default
@@ -1560,21 +1581,22 @@ def set_process_name(name):
# export common proxies variables from datastore to environment
def export_proxies(d):
- import os
+ """ export common proxies variables from datastore to environment """
variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY',
'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY',
- 'GIT_PROXY_COMMAND']
+ 'GIT_PROXY_COMMAND', 'SSL_CERT_FILE', 'SSL_CERT_DIR']
exported = False
- for v in variables:
- if v in os.environ.keys():
+ origenv = d.getVar("BB_ORIGENV")
+
+ for name in variables:
+ value = d.getVar(name)
+ if not value and origenv:
+ value = origenv.getVar(name)
+ if value:
+ os.environ[name] = value
exported = True
- else:
- v_proxy = d.getVar(v)
- if v_proxy is not None:
- os.environ[v] = v_proxy
- exported = True
return exported
@@ -1584,7 +1606,9 @@ def load_plugins(logger, plugins, pluginpath):
logger.debug(1, 'Loading plugin %s' % name)
spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
if spec:
- return spec.loader.load_module()
+ mod = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(mod)
+ return mod
logger.debug(1, 'Loading plugins from %s...' % pluginpath)
diff --git a/lib/bblayers/action.py b/lib/bblayers/action.py
index d6459d661..d2f9c1bbd 100644
--- a/lib/bblayers/action.py
+++ b/lib/bblayers/action.py
@@ -50,10 +50,10 @@ class ActionPlugin(LayerPlugin):
if not (args.force or notadded):
try:
self.tinfoil.run_command('parseConfiguration')
- except bb.tinfoil.TinfoilUIException:
+ except (bb.tinfoil.TinfoilUIException, bb.BBHandledException):
# Restore the back up copy of bblayers.conf
shutil.copy2(backup, bblayers_conf)
- bb.fatal("Parse failure with the specified layer added")
+ bb.fatal("Parse failure with the specified layer added, aborting.")
else:
for item in notadded:
sys.stderr.write("Specified layer %s is already in BBLAYERS\n" % item)
diff --git a/lib/bblayers/layerindex.py b/lib/bblayers/layerindex.py
index 95b67a662..f64d18e81 100644
--- a/lib/bblayers/layerindex.py
+++ b/lib/bblayers/layerindex.py
@@ -206,6 +206,7 @@ class LayerIndexPlugin(ActionPlugin):
"""
args.show_only = True
args.ignore = []
+ args.shallow = True
self.do_layerindex_fetch(args)
def register_commands(self, sp):
diff --git a/lib/hashserv/server.py b/lib/hashserv/server.py
index 56f354bd0..f38a22ad9 100644
--- a/lib/hashserv/server.py
+++ b/lib/hashserv/server.py
@@ -12,6 +12,7 @@ import math
import os
import signal
import socket
+import sys
import time
from . import chunkify, DEFAULT_MAX_CHUNK
@@ -419,9 +420,14 @@ class Server(object):
self._cleanup_socket = None
def start_tcp_server(self, host, port):
- self.server = self.loop.run_until_complete(
- asyncio.start_server(self.handle_client, host, port)
- )
+ if sys.version_info[0] == 3 and sys.version_info[1] < 6:
+ self.server = self.loop.run_until_complete(
+ asyncio.start_server(self.handle_client, host, port, loop=self.loop)
+ )
+ else:
+ self.server = self.loop.run_until_complete(
+ asyncio.start_server(self.handle_client, host, port)
+ )
for s in self.server.sockets:
logger.info('Listening on %r' % (s.getsockname(),))
@@ -444,9 +450,14 @@ class Server(object):
try:
# Work around path length limits in AF_UNIX
os.chdir(os.path.dirname(path))
- self.server = self.loop.run_until_complete(
- asyncio.start_unix_server(self.handle_client, os.path.basename(path))
- )
+ if sys.version_info[0] == 3 and sys.version_info[1] < 6:
+ self.server = self.loop.run_until_complete(
+ asyncio.start_unix_server(self.handle_client, os.path.basename(path), loop=self.loop)
+ )
+ else:
+ self.server = self.loop.run_until_complete(
+ asyncio.start_unix_server(self.handle_client, os.path.basename(path))
+ )
finally:
os.chdir(cwd)
diff --git a/lib/toaster/toastergui/api.py b/lib/toaster/toastergui/api.py
index b4cdc335e..e367bd910 100644
--- a/lib/toaster/toastergui/api.py
+++ b/lib/toaster/toastergui/api.py
@@ -11,7 +11,7 @@ import os
import re
import logging
import json
-import subprocess
+import glob
from collections import Counter
from orm.models import Project, ProjectTarget, Build, Layer_Version
@@ -227,20 +227,18 @@ class XhrSetDefaultImageUrl(View):
# same logical name
# * Each project that uses a layer will have its own
# LayerVersion and Project Layer for it
-# * During the Paroject delete process, when the last
+# * During the Project delete process, when the last
# LayerVersion for a 'local_source_dir' layer is deleted
# then the Layer record is deleted to remove orphans
#
def scan_layer_content(layer,layer_version):
# if this is a local layer directory, we can immediately scan its content
- if layer.local_source_dir:
+ if os.path.isdir(layer.local_source_dir):
try:
# recipes-*/*/*.bb
- cmd = '%s %s' % ('ls', os.path.join(layer.local_source_dir,'recipes-*/*/*.bb'))
- recipes_list = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,stderr=subprocess.STDOUT).stdout.read()
- recipes_list = recipes_list.decode("utf-8").strip()
- if recipes_list and 'No such' not in recipes_list:
+ recipes_list = glob.glob(os.path.join(layer.local_source_dir, 'recipes-*/*/*.bb'))
+ for recipe in recipes_list:
for recipe in recipes_list.split('\n'):
recipe_path = recipe[recipe.rfind('recipes-'):]
recipe_name = recipe[recipe.rfind('/')+1:].replace('.bb','')
@@ -260,6 +258,9 @@ def scan_layer_content(layer,layer_version):
except Exception as e:
logger.warning("ERROR:scan_layer_content: %s" % e)
+ else:
+ logger.warning("ERROR: wrong path given")
+ raise KeyError("local_source_dir")
class XhrLayer(View):
""" Delete, Get, Add and Update Layer information
@@ -456,15 +457,18 @@ class XhrLayer(View):
'layerdetailurl':
layer_dep.get_detailspage_url(project.pk)})
- # Scan the layer's content and update components
- scan_layer_content(layer,layer_version)
+ # Only scan_layer_content if layer is local
+ if layer_data.get('local_source_dir', None):
+ # Scan the layer's content and update components
+ scan_layer_content(layer,layer_version)
except Layer_Version.DoesNotExist:
return error_response("layer-dep-not-found")
except Project.DoesNotExist:
return error_response("project-not-found")
- except KeyError:
- return error_response("incorrect-parameters")
+ except KeyError as e:
+ _log("KeyError: %s" % e)
+ return error_response(f"incorrect-parameters")
return JsonResponse({'error': "ok",
'imported_layer': {