aboutsummaryrefslogtreecommitdiffstats
path: root/bin
diff options
context:
space:
mode:
Diffstat (limited to 'bin')
-rwxr-xr-xbin/bitbake7
-rwxr-xr-xbin/bitbake-diffsigs68
-rwxr-xr-xbin/bitbake-getvar60
-rwxr-xr-xbin/bitbake-hashclient323
-rwxr-xr-xbin/bitbake-hashserv153
-rwxr-xr-xbin/bitbake-layers20
-rwxr-xr-xbin/bitbake-prserv114
-rwxr-xr-xbin/bitbake-selftest8
-rwxr-xr-xbin/bitbake-server55
-rwxr-xr-xbin/bitbake-worker165
-rwxr-xr-xbin/git-make-shallow38
-rwxr-xr-xbin/toaster18
-rwxr-xr-xbin/toaster-eventreplay82
13 files changed, 854 insertions, 257 deletions
diff --git a/bin/bitbake b/bin/bitbake
index 61db6b70f..8622a7bf9 100755
--- a/bin/bitbake
+++ b/bin/bitbake
@@ -12,6 +12,8 @@
import os
import sys
+import warnings
+warnings.simplefilter("default")
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),
'lib'))
@@ -23,10 +25,9 @@ except RuntimeError as exc:
from bb import cookerdata
from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException
-if sys.getfilesystemencoding() != "utf-8":
- sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\nPython can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.")
+bb.utils.check_system_locale()
-__version__ = "1.47.0"
+__version__ = "2.9.1"
if __name__ == "__main__":
if __version__ != bb.__version__:
diff --git a/bin/bitbake-diffsigs b/bin/bitbake-diffsigs
index 19420a2df..9d6cb8c94 100755
--- a/bin/bitbake-diffsigs
+++ b/bin/bitbake-diffsigs
@@ -11,6 +11,8 @@
import os
import sys
import warnings
+
+warnings.simplefilter("default")
import argparse
import logging
import pickle
@@ -26,6 +28,7 @@ logger = bb.msg.logger_create(myname)
is_dump = myname == 'bitbake-dumpsig'
+
def find_siginfo(tinfoil, pn, taskname, sigs=None):
result = None
tinfoil.set_event_mask(['bb.event.FindSigInfoResult',
@@ -51,6 +54,7 @@ def find_siginfo(tinfoil, pn, taskname, sigs=None):
sys.exit(2)
return result
+
def find_siginfo_task(bbhandler, pn, taskname, sig1=None, sig2=None):
""" Find the most recent signature files for the specified PN/task """
@@ -59,22 +63,26 @@ def find_siginfo_task(bbhandler, pn, taskname, sig1=None, sig2=None):
if sig1 and sig2:
sigfiles = find_siginfo(bbhandler, pn, taskname, [sig1, sig2])
- if len(sigfiles) == 0:
+ if not sigfiles:
logger.error('No sigdata files found matching %s %s matching either %s or %s' % (pn, taskname, sig1, sig2))
sys.exit(1)
- elif not sig1 in sigfiles:
+ elif sig1 not in sigfiles:
logger.error('No sigdata files found matching %s %s with signature %s' % (pn, taskname, sig1))
sys.exit(1)
- elif not sig2 in sigfiles:
+ elif sig2 not in sigfiles:
logger.error('No sigdata files found matching %s %s with signature %s' % (pn, taskname, sig2))
sys.exit(1)
- latestfiles = [sigfiles[sig1], sigfiles[sig2]]
+
+ latestfiles = [sigfiles[sig1]['path'], sigfiles[sig2]['path']]
else:
- filedates = find_siginfo(bbhandler, pn, taskname)
- latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-2:]
- if not latestfiles:
+ sigfiles = find_siginfo(bbhandler, pn, taskname)
+ latestsigs = sorted(sigfiles.keys(), key=lambda h: sigfiles[h]['time'])[-2:]
+ if not latestsigs:
logger.error('No sigdata files found matching %s %s' % (pn, taskname))
sys.exit(1)
+ latestfiles = [sigfiles[latestsigs[0]]['path']]
+ if len(latestsigs) > 1:
+ latestfiles.append(sigfiles[latestsigs[1]]['path'])
return latestfiles
@@ -85,14 +93,14 @@ def recursecb(key, hash1, hash2):
hashfiles = find_siginfo(tinfoil, key, None, hashes)
recout = []
- if len(hashfiles) == 0:
+ if not hashfiles:
recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2))
- elif not hash1 in hashfiles:
+ elif hash1 not in hashfiles:
recout.append("Unable to find matching sigdata for %s with hash %s" % (key, hash1))
- elif not hash2 in hashfiles:
+ elif hash2 not in hashfiles:
recout.append("Unable to find matching sigdata for %s with hash %s" % (key, hash2))
else:
- out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb, color=color)
+ out2 = bb.siggen.compare_sigfiles(hashfiles[hash1]['path'], hashfiles[hash2]['path'], recursecb, color=color)
for change in out2:
for line in change.splitlines():
recout.append(' ' + line)
@@ -109,36 +117,36 @@ parser.add_argument('-D', '--debug',
if is_dump:
parser.add_argument("-t", "--task",
- help="find the signature data file for the last run of the specified task",
- action="store", dest="taskargs", nargs=2, metavar=('recipename', 'taskname'))
+ help="find the signature data file for the last run of the specified task",
+ action="store", dest="taskargs", nargs=2, metavar=('recipename', 'taskname'))
parser.add_argument("sigdatafile1",
- help="Signature file to dump. Not used when using -t/--task.",
- action="store", nargs='?', metavar="sigdatafile")
+ help="Signature file to dump. Not used when using -t/--task.",
+ action="store", nargs='?', metavar="sigdatafile")
else:
parser.add_argument('-c', '--color',
- help='Colorize the output (where %(metavar)s is %(choices)s)',
- choices=['auto', 'always', 'never'], default='auto', metavar='color')
+ help='Colorize the output (where %(metavar)s is %(choices)s)',
+ choices=['auto', 'always', 'never'], default='auto', metavar='color')
parser.add_argument('-d', '--dump',
- help='Dump the last signature data instead of comparing (equivalent to using bitbake-dumpsig)',
- action='store_true')
+ help='Dump the last signature data instead of comparing (equivalent to using bitbake-dumpsig)',
+ action='store_true')
parser.add_argument("-t", "--task",
- help="find the signature data files for the last two runs of the specified task and compare them",
- action="store", dest="taskargs", nargs=2, metavar=('recipename', 'taskname'))
+ help="find the signature data files for the last two runs of the specified task and compare them",
+ action="store", dest="taskargs", nargs=2, metavar=('recipename', 'taskname'))
parser.add_argument("-s", "--signature",
- help="With -t/--task, specify the signatures to look for instead of taking the last two",
- action="store", dest="sigargs", nargs=2, metavar=('fromsig', 'tosig'))
+ help="With -t/--task, specify the signatures to look for instead of taking the last two",
+ action="store", dest="sigargs", nargs=2, metavar=('fromsig', 'tosig'))
parser.add_argument("sigdatafile1",
- help="First signature file to compare (or signature file to dump, if second not specified). Not used when using -t/--task.",
- action="store", nargs='?')
+ help="First signature file to compare (or signature file to dump, if second not specified). Not used when using -t/--task.",
+ action="store", nargs='?')
parser.add_argument("sigdatafile2",
- help="Second signature file to compare",
- action="store", nargs='?')
+ help="Second signature file to compare",
+ action="store", nargs='?')
options = parser.parse_args()
if is_dump:
@@ -156,7 +164,8 @@ if options.taskargs:
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.prepare(config_only=True)
if not options.dump and options.sigargs:
- files = find_siginfo_task(tinfoil, options.taskargs[0], options.taskargs[1], options.sigargs[0], options.sigargs[1])
+ files = find_siginfo_task(tinfoil, options.taskargs[0], options.taskargs[1], options.sigargs[0],
+ options.sigargs[1])
else:
files = find_siginfo_task(tinfoil, options.taskargs[0], options.taskargs[1])
@@ -165,7 +174,8 @@ if options.taskargs:
output = bb.siggen.dump_sigfile(files[-1])
else:
if len(files) < 2:
- logger.error('Only one matching sigdata file found for the specified task (%s %s)' % (options.taskargs[0], options.taskargs[1]))
+ logger.error('Only one matching sigdata file found for the specified task (%s %s)' % (
+ options.taskargs[0], options.taskargs[1]))
sys.exit(1)
# Recurse into signature comparison
diff --git a/bin/bitbake-getvar b/bin/bitbake-getvar
new file mode 100755
index 000000000..8901f99ae
--- /dev/null
+++ b/bin/bitbake-getvar
@@ -0,0 +1,60 @@
+#! /usr/bin/env python3
+#
+# Copyright (C) 2021 Richard Purdie
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import argparse
+import io
+import os
+import sys
+import warnings
+warnings.simplefilter("default")
+
+bindir = os.path.dirname(__file__)
+topdir = os.path.dirname(bindir)
+sys.path[0:0] = [os.path.join(topdir, 'lib')]
+
+import bb.tinfoil
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(description="Bitbake Query Variable")
+ parser.add_argument("variable", help="variable name to query")
+ parser.add_argument("-r", "--recipe", help="Recipe name to query", default=None, required=False)
+ parser.add_argument('-u', '--unexpand', help='Do not expand the value (with --value)', action="store_true")
+ parser.add_argument('-f', '--flag', help='Specify a variable flag to query (with --value)', default=None)
+ parser.add_argument('--value', help='Only report the value, no history and no variable name', action="store_true")
+ parser.add_argument('-q', '--quiet', help='Silence bitbake server logging', action="store_true")
+ parser.add_argument('--ignore-undefined', help='Suppress any errors related to undefined variables', action="store_true")
+ args = parser.parse_args()
+
+ if not args.value:
+ if args.unexpand:
+ sys.exit("--unexpand only makes sense with --value")
+
+ if args.flag:
+ sys.exit("--flag only makes sense with --value")
+
+ quiet = args.quiet or args.value
+ with bb.tinfoil.Tinfoil(tracking=True, setup_logging=not quiet) as tinfoil:
+ if args.recipe:
+ tinfoil.prepare(quiet=3 if quiet else 2)
+ d = tinfoil.parse_recipe(args.recipe)
+ else:
+ tinfoil.prepare(quiet=2, config_only=True)
+ d = tinfoil.config_data
+
+ value = None
+ if args.flag:
+ value = d.getVarFlag(args.variable, args.flag, expand=not args.unexpand)
+ if value is None and not args.ignore_undefined:
+ sys.exit(f"The flag '{args.flag}' is not defined for variable '{args.variable}'")
+ else:
+ value = d.getVar(args.variable, expand=not args.unexpand)
+ if value is None and not args.ignore_undefined:
+ sys.exit(f"The variable '{args.variable}' is not defined")
+ if args.value:
+ print(str(value if value is not None else ""))
+ else:
+ bb.data.emit_var(args.variable, d=d, all=True)
diff --git a/bin/bitbake-hashclient b/bin/bitbake-hashclient
index 29ab65f17..a50701a88 100755
--- a/bin/bitbake-hashclient
+++ b/bin/bitbake-hashclient
@@ -13,6 +13,12 @@ import pprint
import sys
import threading
import time
+import warnings
+import netrc
+import json
+import statistics
+import textwrap
+warnings.simplefilter("default")
try:
import tqdm
@@ -34,18 +40,42 @@ except ImportError:
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib'))
import hashserv
+import bb.asyncrpc
DEFAULT_ADDRESS = 'unix://./hashserve.sock'
METHOD = 'stress.test.method'
+def print_user(u):
+ print(f"Username: {u['username']}")
+ if "permissions" in u:
+ print("Permissions: " + " ".join(u["permissions"]))
+ if "token" in u:
+ print(f"Token: {u['token']}")
+
def main():
+ def handle_get(args, client):
+ result = client.get_taskhash(args.method, args.taskhash, all_properties=True)
+ if not result:
+ return 0
+
+ print(json.dumps(result, sort_keys=True, indent=4))
+ return 0
+
+ def handle_get_outhash(args, client):
+ result = client.get_outhash(args.method, args.outhash, args.taskhash)
+ if not result:
+ return 0
+
+ print(json.dumps(result, sort_keys=True, indent=4))
+ return 0
+
def handle_stats(args, client):
if args.reset:
s = client.reset_stats()
else:
s = client.get_stats()
- pprint.pprint(s)
+ print(json.dumps(s, sort_keys=True, indent=4))
return 0
def handle_stress(args, client):
@@ -53,47 +83,59 @@ def main():
nonlocal found_hashes
nonlocal missed_hashes
nonlocal max_time
+ nonlocal times
- client = hashserv.create_client(args.address)
-
- for i in range(args.requests):
- taskhash = hashlib.sha256()
- taskhash.update(args.taskhash_seed.encode('utf-8'))
- taskhash.update(str(i).encode('utf-8'))
+ with hashserv.create_client(args.address) as client:
+ for i in range(args.requests):
+ taskhash = hashlib.sha256()
+ taskhash.update(args.taskhash_seed.encode('utf-8'))
+ taskhash.update(str(i).encode('utf-8'))
- start_time = time.perf_counter()
- l = client.get_unihash(METHOD, taskhash.hexdigest())
- elapsed = time.perf_counter() - start_time
+ start_time = time.perf_counter()
+ l = client.get_unihash(METHOD, taskhash.hexdigest())
+ elapsed = time.perf_counter() - start_time
- with lock:
- if l:
- found_hashes += 1
- else:
- missed_hashes += 1
+ with lock:
+ if l:
+ found_hashes += 1
+ else:
+ missed_hashes += 1
- max_time = max(elapsed, max_time)
- pbar.update()
+ times.append(elapsed)
+ pbar.update()
max_time = 0
found_hashes = 0
missed_hashes = 0
lock = threading.Lock()
- total_requests = args.clients * args.requests
+ times = []
start_time = time.perf_counter()
- with ProgressBar(total=total_requests) as pbar:
+ with ProgressBar(total=args.clients * args.requests) as pbar:
threads = [threading.Thread(target=thread_main, args=(pbar, lock), daemon=False) for _ in range(args.clients)]
for t in threads:
t.start()
for t in threads:
t.join()
+ total_elapsed = time.perf_counter() - start_time
- elapsed = time.perf_counter() - start_time
with lock:
- print("%d requests in %.1fs. %.1f requests per second" % (total_requests, elapsed, total_requests / elapsed))
- print("Average request time %.8fs" % (elapsed / total_requests))
- print("Max request time was %.8fs" % max_time)
- print("Found %d hashes, missed %d" % (found_hashes, missed_hashes))
+ mean = statistics.mean(times)
+ median = statistics.median(times)
+ stddev = statistics.pstdev(times)
+
+ print(f"Number of clients: {args.clients}")
+ print(f"Requests per client: {args.requests}")
+ print(f"Number of requests: {len(times)}")
+ print(f"Total elapsed time: {total_elapsed:.3f}s")
+ print(f"Total request rate: {len(times)/total_elapsed:.3f} req/s")
+ print(f"Average request time: {mean:.3f}s")
+ print(f"Median request time: {median:.3f}s")
+ print(f"Request time std dev: {stddev:.3f}s")
+ print(f"Maximum request time: {max(times):.3f}s")
+ print(f"Minimum request time: {min(times):.3f}s")
+ print(f"Hashes found: {found_hashes}")
+ print(f"Hashes missed: {missed_hashes}")
if args.report:
with ProgressBar(total=args.requests) as pbar:
@@ -111,12 +153,152 @@ def main():
with lock:
pbar.update()
- parser = argparse.ArgumentParser(description='Hash Equivalence Client')
+ def handle_remove(args, client):
+ where = {k: v for k, v in args.where}
+ if where:
+ result = client.remove(where)
+ print("Removed %d row(s)" % (result["count"]))
+ else:
+ print("No query specified")
+
+ def handle_clean_unused(args, client):
+ result = client.clean_unused(args.max_age)
+ print("Removed %d rows" % (result["count"]))
+ return 0
+
+ def handle_refresh_token(args, client):
+ r = client.refresh_token(args.username)
+ print_user(r)
+
+ def handle_set_user_permissions(args, client):
+ r = client.set_user_perms(args.username, args.permissions)
+ print_user(r)
+
+ def handle_get_user(args, client):
+ r = client.get_user(args.username)
+ print_user(r)
+
+ def handle_get_all_users(args, client):
+ users = client.get_all_users()
+ print("{username:20}| {permissions}".format(username="Username", permissions="Permissions"))
+ print(("-" * 20) + "+" + ("-" * 20))
+ for u in users:
+ print("{username:20}| {permissions}".format(username=u["username"], permissions=" ".join(u["permissions"])))
+
+ def handle_new_user(args, client):
+ r = client.new_user(args.username, args.permissions)
+ print_user(r)
+
+ def handle_delete_user(args, client):
+ r = client.delete_user(args.username)
+ print_user(r)
+
+ def handle_get_db_usage(args, client):
+ usage = client.get_db_usage()
+ print(usage)
+ tables = sorted(usage.keys())
+ print("{name:20}| {rows:20}".format(name="Table name", rows="Rows"))
+ print(("-" * 20) + "+" + ("-" * 20))
+ for t in tables:
+ print("{name:20}| {rows:<20}".format(name=t, rows=usage[t]["rows"]))
+ print()
+
+ total_rows = sum(t["rows"] for t in usage.values())
+ print(f"Total rows: {total_rows}")
+
+ def handle_get_db_query_columns(args, client):
+ columns = client.get_db_query_columns()
+ print("\n".join(sorted(columns)))
+
+ def handle_gc_status(args, client):
+ result = client.gc_status()
+ if not result["mark"]:
+ print("No Garbage collection in progress")
+ return 0
+
+ print("Current Mark: %s" % result["mark"])
+ print("Total hashes to keep: %d" % result["keep"])
+ print("Total hashes to remove: %s" % result["remove"])
+ return 0
+
+ def handle_gc_mark(args, client):
+ where = {k: v for k, v in args.where}
+ result = client.gc_mark(args.mark, where)
+ print("New hashes marked: %d" % result["count"])
+ return 0
+
+ def handle_gc_sweep(args, client):
+ result = client.gc_sweep(args.mark)
+ print("Removed %d rows" % result["count"])
+ return 0
+
+ def handle_unihash_exists(args, client):
+ result = client.unihash_exists(args.unihash)
+ if args.quiet:
+ return 0 if result else 1
+
+ print("true" if result else "false")
+ return 0
+
+ def handle_ping(args, client):
+ times = []
+ for i in range(1, args.count + 1):
+ if not args.quiet:
+ print(f"Ping {i} of {args.count}... ", end="")
+ start_time = time.perf_counter()
+ client.ping()
+ elapsed = time.perf_counter() - start_time
+ times.append(elapsed)
+ if not args.quiet:
+ print(f"{elapsed:.3f}s")
+
+ mean = statistics.mean(times)
+ median = statistics.median(times)
+ std_dev = statistics.pstdev(times)
+
+ if not args.quiet:
+ print("------------------------")
+ print(f"Number of pings: {len(times)}")
+ print(f"Average round trip time: {mean:.3f}s")
+ print(f"Median round trip time: {median:.3f}s")
+ print(f"Round trip time std dev: {std_dev:.3f}s")
+ print(f"Min time is: {min(times):.3f}s")
+ print(f"Max time is: {max(times):.3f}s")
+ return 0
+
+ parser = argparse.ArgumentParser(
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ description='Hash Equivalence Client',
+ epilog=textwrap.dedent(
+ """
+ Possible ADDRESS options are:
+ unix://PATH Connect to UNIX domain socket at PATH
+ ws://HOST[:PORT] Connect to websocket at HOST:PORT (default port is 80)
+ wss://HOST[:PORT] Connect to secure websocket at HOST:PORT (default port is 443)
+ HOST:PORT Connect to TCP server at HOST:PORT
+ """
+ ),
+ )
parser.add_argument('--address', default=DEFAULT_ADDRESS, help='Server address (default "%(default)s")')
parser.add_argument('--log', default='WARNING', help='Set logging level')
+ parser.add_argument('--login', '-l', metavar="USERNAME", help="Authenticate as USERNAME")
+ parser.add_argument('--password', '-p', metavar="TOKEN", help="Authenticate using token TOKEN")
+ parser.add_argument('--become', '-b', metavar="USERNAME", help="Impersonate user USERNAME (if allowed) when performing actions")
+ parser.add_argument('--no-netrc', '-n', action="store_false", dest="netrc", help="Do not use .netrc")
subparsers = parser.add_subparsers()
+ get_parser = subparsers.add_parser('get', help="Get the unihash for a taskhash")
+ get_parser.add_argument("method", help="Method to query")
+ get_parser.add_argument("taskhash", help="Task hash to query")
+ get_parser.set_defaults(func=handle_get)
+
+ get_outhash_parser = subparsers.add_parser('get-outhash', help="Get output hash information")
+ get_outhash_parser.add_argument("method", help="Method to query")
+ get_outhash_parser.add_argument("outhash", help="Output hash to query")
+ get_outhash_parser.add_argument("taskhash", help="Task hash to query")
+ get_outhash_parser.set_defaults(func=handle_get_outhash)
+
stats_parser = subparsers.add_parser('stats', help='Show server stats')
stats_parser.add_argument('--reset', action='store_true',
help='Reset server stats')
@@ -135,6 +317,69 @@ def main():
help='Include string in outhash')
stress_parser.set_defaults(func=handle_stress)
+ remove_parser = subparsers.add_parser('remove', help="Remove hash entries")
+ remove_parser.add_argument("--where", "-w", metavar="KEY VALUE", nargs=2, action="append", default=[],
+ help="Remove entries from table where KEY == VALUE")
+ remove_parser.set_defaults(func=handle_remove)
+
+ clean_unused_parser = subparsers.add_parser('clean-unused', help="Remove unused database entries")
+ clean_unused_parser.add_argument("max_age", metavar="SECONDS", type=int, help="Remove unused entries older than SECONDS old")
+ clean_unused_parser.set_defaults(func=handle_clean_unused)
+
+ refresh_token_parser = subparsers.add_parser('refresh-token', help="Refresh auth token")
+ refresh_token_parser.add_argument("--username", "-u", help="Refresh the token for another user (if authorized)")
+ refresh_token_parser.set_defaults(func=handle_refresh_token)
+
+ set_user_perms_parser = subparsers.add_parser('set-user-perms', help="Set new permissions for user")
+ set_user_perms_parser.add_argument("--username", "-u", help="Username", required=True)
+ set_user_perms_parser.add_argument("permissions", metavar="PERM", nargs="*", default=[], help="New permissions")
+ set_user_perms_parser.set_defaults(func=handle_set_user_permissions)
+
+ get_user_parser = subparsers.add_parser('get-user', help="Get user")
+ get_user_parser.add_argument("--username", "-u", help="Username")
+ get_user_parser.set_defaults(func=handle_get_user)
+
+ get_all_users_parser = subparsers.add_parser('get-all-users', help="List all users")
+ get_all_users_parser.set_defaults(func=handle_get_all_users)
+
+ new_user_parser = subparsers.add_parser('new-user', help="Create new user")
+ new_user_parser.add_argument("--username", "-u", help="Username", required=True)
+ new_user_parser.add_argument("permissions", metavar="PERM", nargs="*", default=[], help="New permissions")
+ new_user_parser.set_defaults(func=handle_new_user)
+
+ delete_user_parser = subparsers.add_parser('delete-user', help="Delete user")
+ delete_user_parser.add_argument("--username", "-u", help="Username", required=True)
+ delete_user_parser.set_defaults(func=handle_delete_user)
+
+ db_usage_parser = subparsers.add_parser('get-db-usage', help="Database Usage")
+ db_usage_parser.set_defaults(func=handle_get_db_usage)
+
+ db_query_columns_parser = subparsers.add_parser('get-db-query-columns', help="Show columns that can be used in database queries")
+ db_query_columns_parser.set_defaults(func=handle_get_db_query_columns)
+
+ gc_status_parser = subparsers.add_parser("gc-status", help="Show garbage collection status")
+ gc_status_parser.set_defaults(func=handle_gc_status)
+
+ gc_mark_parser = subparsers.add_parser('gc-mark', help="Mark hashes to be kept for garbage collection")
+ gc_mark_parser.add_argument("mark", help="Mark for this garbage collection operation")
+ gc_mark_parser.add_argument("--where", "-w", metavar="KEY VALUE", nargs=2, action="append", default=[],
+ help="Keep entries in table where KEY == VALUE")
+ gc_mark_parser.set_defaults(func=handle_gc_mark)
+
+ gc_sweep_parser = subparsers.add_parser('gc-sweep', help="Perform garbage collection and delete any entries that are not marked")
+ gc_sweep_parser.add_argument("mark", help="Mark for this garbage collection operation")
+ gc_sweep_parser.set_defaults(func=handle_gc_sweep)
+
+ unihash_exists_parser = subparsers.add_parser('unihash-exists', help="Check if a unihash is known to the server")
+ unihash_exists_parser.add_argument("--quiet", action="store_true", help="Don't print status. Instead, exit with 0 if unihash exists and 1 if it does not")
+ unihash_exists_parser.add_argument("unihash", help="Unihash to check")
+ unihash_exists_parser.set_defaults(func=handle_unihash_exists)
+
+ ping_parser = subparsers.add_parser('ping', help="Ping server")
+ ping_parser.add_argument("-n", "--count", type=int, help="Number of pings. Default is %(default)s", default=10)
+ ping_parser.add_argument("-q", "--quiet", action="store_true", help="Don't print each ping; only print results")
+ ping_parser.set_defaults(func=handle_ping)
+
args = parser.parse_args()
logger = logging.getLogger('hashserv')
@@ -148,14 +393,30 @@ def main():
console.setLevel(level)
logger.addHandler(console)
+ login = args.login
+ password = args.password
+
+ if login is None and args.netrc:
+ try:
+ n = netrc.netrc()
+ auth = n.authenticators(args.address)
+ if auth is not None:
+ login, _, password = auth
+ except FileNotFoundError:
+ pass
+ except netrc.NetrcParseError as e:
+ sys.stderr.write(f"Error parsing {e.filename}:{e.lineno}: {e.msg}\n")
+
func = getattr(args, 'func', None)
if func:
- client = hashserv.create_client(args.address)
- # Try to establish a connection to the server now to detect failures
- # early
- client.connect()
-
- return func(args, client)
+ try:
+ with hashserv.create_client(args.address, login, password) as client:
+ if args.become:
+ client.become_user(args.become)
+ return func(args, client)
+ except bb.asyncrpc.InvokeError as e:
+ print(f"ERROR: {e}")
+ return 1
return 0
diff --git a/bin/bitbake-hashserv b/bin/bitbake-hashserv
index 1bc1f91f3..01503736b 100755
--- a/bin/bitbake-hashserv
+++ b/bin/bitbake-hashserv
@@ -10,53 +10,170 @@ import sys
import logging
import argparse
import sqlite3
+import warnings
-sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib'))
+warnings.simplefilter("default")
+
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), "lib"))
import hashserv
+from hashserv.server import DEFAULT_ANON_PERMS
VERSION = "1.0.0"
-DEFAULT_BIND = 'unix://./hashserve.sock'
+DEFAULT_BIND = "unix://./hashserve.sock"
def main():
- parser = argparse.ArgumentParser(description='Hash Equivalence Reference Server. Version=%s' % VERSION,
- epilog='''The bind address is the path to a unix domain socket if it is
- prefixed with "unix://". Otherwise, it is an IP address
- and port in form ADDRESS:PORT. To bind to all addresses, leave
- the ADDRESS empty, e.g. "--bind :8686". To bind to a specific
- IPv6 address, enclose the address in "[]", e.g.
- "--bind [::1]:8686"'''
- )
-
- parser.add_argument('--bind', default=DEFAULT_BIND, help='Bind address (default "%(default)s")')
- parser.add_argument('--database', default='./hashserv.db', help='Database file (default "%(default)s")')
- parser.add_argument('--log', default='WARNING', help='Set logging level')
+ parser = argparse.ArgumentParser(
+ description="Hash Equivalence Reference Server. Version=%s" % VERSION,
+ formatter_class=argparse.RawTextHelpFormatter,
+ epilog="""
+The bind address may take one of the following formats:
+ unix://PATH - Bind to unix domain socket at PATH
+ ws://ADDRESS:PORT - Bind to websocket on ADDRESS:PORT
+ ADDRESS:PORT - Bind to raw TCP socket on ADDRESS:PORT
+
+To bind to all addresses, leave the ADDRESS empty, e.g. "--bind :8686" or
+"--bind ws://:8686". To bind to a specific IPv6 address, enclose the address in
+"[]", e.g. "--bind [::1]:8686" or "--bind ws://[::1]:8686"
+
+Note that the default Anonymous permissions are designed to not break existing
+server instances when upgrading, but are not particularly secure defaults. If
+you want to use authentication, it is recommended that you use "--anon-perms
+@read" to only give anonymous users read access, or "--anon-perms @none" to
+give un-authenticated users no access at all.
+
+Setting "--anon-perms @all" or "--anon-perms @user-admin" is not allowed, since
+this would allow anonymous users to manage all users accounts, which is a bad
+idea.
+
+If you are using user authentication, you should run your server in websockets
+mode with an SSL terminating load balancer in front of it (as this server does
+not implement SSL). Otherwise all usernames and passwords will be transmitted
+in the clear. When configured this way, clients can connect using a secure
+websocket, as in "wss://SERVER:PORT"
+
+The following permissions are supported by the server:
+
+ @none - No permissions
+ @read - The ability to read equivalent hashes from the server
+ @report - The ability to report equivalent hashes to the server
+ @db-admin - Manage the hash database(s). This includes cleaning the
+ database, removing hashes, etc.
+ @user-admin - The ability to manage user accounts. This includes, creating
+ users, deleting users, resetting login tokens, and assigning
+ permissions.
+ @all - All possible permissions, including any that may be added
+ in the future
+ """,
+ )
+
+ parser.add_argument(
+ "-b",
+ "--bind",
+ default=os.environ.get("HASHSERVER_BIND", DEFAULT_BIND),
+ help='Bind address (default $HASHSERVER_BIND, "%(default)s")',
+ )
+ parser.add_argument(
+ "-d",
+ "--database",
+ default=os.environ.get("HASHSERVER_DB", "./hashserv.db"),
+ help='Database file (default $HASHSERVER_DB, "%(default)s")',
+ )
+ parser.add_argument(
+ "-l",
+ "--log",
+ default=os.environ.get("HASHSERVER_LOG_LEVEL", "WARNING"),
+ help='Set logging level (default $HASHSERVER_LOG_LEVEL, "%(default)s")',
+ )
+ parser.add_argument(
+ "-u",
+ "--upstream",
+ default=os.environ.get("HASHSERVER_UPSTREAM", None),
+ help="Upstream hashserv to pull hashes from ($HASHSERVER_UPSTREAM)",
+ )
+ parser.add_argument(
+ "-r",
+ "--read-only",
+ action="store_true",
+ help="Disallow write operations from clients ($HASHSERVER_READ_ONLY)",
+ )
+ parser.add_argument(
+ "--db-username",
+ default=os.environ.get("HASHSERVER_DB_USERNAME", None),
+ help="Database username ($HASHSERVER_DB_USERNAME)",
+ )
+ parser.add_argument(
+ "--db-password",
+ default=os.environ.get("HASHSERVER_DB_PASSWORD", None),
+ help="Database password ($HASHSERVER_DB_PASSWORD)",
+ )
+ parser.add_argument(
+ "--anon-perms",
+ metavar="PERM[,PERM[,...]]",
+ default=os.environ.get("HASHSERVER_ANON_PERMS", ",".join(DEFAULT_ANON_PERMS)),
+ help='Permissions to give anonymous users (default $HASHSERVER_ANON_PERMS, "%(default)s")',
+ )
+ parser.add_argument(
+ "--admin-user",
+ default=os.environ.get("HASHSERVER_ADMIN_USER", None),
+ help="Create default admin user with name ADMIN_USER ($HASHSERVER_ADMIN_USER)",
+ )
+ parser.add_argument(
+ "--admin-password",
+ default=os.environ.get("HASHSERVER_ADMIN_PASSWORD", None),
+ help="Create default admin user with password ADMIN_PASSWORD ($HASHSERVER_ADMIN_PASSWORD)",
+ )
+ parser.add_argument(
+ "--reuseport",
+ action="store_true",
+ help="Enable SO_REUSEPORT, allowing multiple servers to bind to the same port for load balancing",
+ )
args = parser.parse_args()
- logger = logging.getLogger('hashserv')
+ logger = logging.getLogger("hashserv")
level = getattr(logging, args.log.upper(), None)
if not isinstance(level, int):
- raise ValueError('Invalid log level: %s' % args.log)
+ raise ValueError(
+ "Invalid log level: %s (Try ERROR/WARNING/INFO/DEBUG)" % args.log
+ )
logger.setLevel(level)
console = logging.StreamHandler()
console.setLevel(level)
logger.addHandler(console)
- server = hashserv.create_server(args.bind, args.database)
+ read_only = (os.environ.get("HASHSERVER_READ_ONLY", "0") == "1") or args.read_only
+ if "," in args.anon_perms:
+ anon_perms = args.anon_perms.split(",")
+ else:
+ anon_perms = args.anon_perms.split()
+
+ server = hashserv.create_server(
+ args.bind,
+ args.database,
+ upstream=args.upstream,
+ read_only=read_only,
+ db_username=args.db_username,
+ db_password=args.db_password,
+ anon_perms=anon_perms,
+ admin_username=args.admin_user,
+ admin_password=args.admin_password,
+ reuseport=args.reuseport,
+ )
server.serve_forever()
return 0
-if __name__ == '__main__':
+if __name__ == "__main__":
try:
ret = main()
except Exception:
ret = 1
import traceback
+
traceback.print_exc()
sys.exit(ret)
diff --git a/bin/bitbake-layers b/bin/bitbake-layers
index 149f1b1ac..aebb5100c 100755
--- a/bin/bitbake-layers
+++ b/bin/bitbake-layers
@@ -14,7 +14,8 @@ import logging
import os
import sys
import argparse
-import signal
+import warnings
+warnings.simplefilter("default")
bindir = os.path.dirname(__file__)
topdir = os.path.dirname(bindir)
@@ -26,14 +27,13 @@ import bb.msg
logger = bb.msg.logger_create('bitbake-layers', sys.stdout)
def main():
- signal.signal(signal.SIGPIPE, signal.SIG_DFL)
parser = argparse.ArgumentParser(
description="BitBake layers utility",
epilog="Use %(prog)s <subcommand> --help to get help on a specific command",
add_help=False)
parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
- parser.add_argument('-F', '--force', help='Force add without recipe parse verification', action='store_true')
+ parser.add_argument('-F', '--force', help='Forced execution: can be specified multiple times. -F will force add without recipe parse verification and -FF will additionally force the run withput layer parsing.', action='count', default=0)
parser.add_argument('--color', choices=['auto', 'always', 'never'], default='auto', help='Colorize output (where %(metavar)s is %(choices)s)', metavar='COLOR')
global_args, unparsed_args = parser.parse_known_args()
@@ -59,20 +59,24 @@ def main():
plugins = []
tinfoil = bb.tinfoil.Tinfoil(tracking=True)
tinfoil.logger.setLevel(logger.getEffectiveLevel())
- try:
+ if global_args.force > 1:
+ bbpaths = []
+ else:
tinfoil.prepare(True)
- for path in ([topdir] +
- tinfoil.config_data.getVar('BBPATH').split(':')):
+ bbpaths = tinfoil.config_data.getVar('BBPATH').split(':')
+
+ try:
+ for path in ([topdir] + bbpaths):
pluginpath = os.path.join(path, 'lib', 'bblayers')
bb.utils.load_plugins(logger, plugins, pluginpath)
registered = False
for plugin in plugins:
+ if hasattr(plugin, 'tinfoil_init') and global_args.force <= 1:
+ plugin.tinfoil_init(tinfoil)
if hasattr(plugin, 'register_commands'):
registered = True
plugin.register_commands(subparsers)
- if hasattr(plugin, 'tinfoil_init'):
- plugin.tinfoil_init(tinfoil)
if not registered:
logger.error("No commands registered - missing plugins?")
diff --git a/bin/bitbake-prserv b/bin/bitbake-prserv
index 1e9b6cbc1..3992e84ea 100755
--- a/bin/bitbake-prserv
+++ b/bin/bitbake-prserv
@@ -1,49 +1,103 @@
#!/usr/bin/env python3
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
import os
import sys,logging
-import optparse
+import argparse
+import warnings
+warnings.simplefilter("default")
-sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),'lib'))
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), "lib"))
import prserv
import prserv.serv
-__version__="1.0.0"
+VERSION = "2.0.0"
-PRHOST_DEFAULT='0.0.0.0'
+PRHOST_DEFAULT="0.0.0.0"
PRPORT_DEFAULT=8585
+def init_logger(logfile, loglevel):
+ numeric_level = getattr(logging, loglevel.upper(), None)
+ if not isinstance(numeric_level, int):
+ raise ValueError("Invalid log level: %s" % loglevel)
+ FORMAT = "%(asctime)-15s %(message)s"
+ logging.basicConfig(level=numeric_level, filename=logfile, format=FORMAT)
+
def main():
- parser = optparse.OptionParser(
- version="Bitbake PR Service Core version %s, %%prog version %s" % (prserv.__version__, __version__),
- usage = "%prog < --start | --stop > [options]")
-
- parser.add_option("-f", "--file", help="database filename(default: prserv.sqlite3)", action="store",
- dest="dbfile", type="string", default="prserv.sqlite3")
- parser.add_option("-l", "--log", help="log filename(default: prserv.log)", action="store",
- dest="logfile", type="string", default="prserv.log")
- parser.add_option("--loglevel", help="logging level, i.e. CRITICAL, ERROR, WARNING, INFO, DEBUG",
- action = "store", type="string", dest="loglevel", default = "INFO")
- parser.add_option("--start", help="start daemon",
- action="store_true", dest="start")
- parser.add_option("--stop", help="stop daemon",
- action="store_true", dest="stop")
- parser.add_option("--host", help="ip address to bind", action="store",
- dest="host", type="string", default=PRHOST_DEFAULT)
- parser.add_option("--port", help="port number(default: 8585)", action="store",
- dest="port", type="int", default=PRPORT_DEFAULT)
-
- options, args = parser.parse_args(sys.argv)
- prserv.init_logger(os.path.abspath(options.logfile),options.loglevel)
-
- if options.start:
- ret=prserv.serv.start_daemon(options.dbfile, options.host, options.port,os.path.abspath(options.logfile))
- elif options.stop:
- ret=prserv.serv.stop_daemon(options.host, options.port)
+ parser = argparse.ArgumentParser(
+ description="BitBake PR Server. Version=%s" % VERSION,
+ formatter_class=argparse.RawTextHelpFormatter)
+
+ parser.add_argument(
+ "-f",
+ "--file",
+ default="prserv.sqlite3",
+ help="database filename (default: prserv.sqlite3)",
+ )
+ parser.add_argument(
+ "-l",
+ "--log",
+ default="prserv.log",
+ help="log filename(default: prserv.log)",
+ )
+ parser.add_argument(
+ "--loglevel",
+ default="INFO",
+ help="logging level, i.e. CRITICAL, ERROR, WARNING, INFO, DEBUG",
+ )
+ parser.add_argument(
+ "--start",
+ action="store_true",
+ help="start daemon",
+ )
+ parser.add_argument(
+ "--stop",
+ action="store_true",
+ help="stop daemon",
+ )
+ parser.add_argument(
+ "--host",
+ help="ip address to bind",
+ default=PRHOST_DEFAULT,
+ )
+ parser.add_argument(
+ "--port",
+ type=int,
+ default=PRPORT_DEFAULT,
+ help="port number (default: 8585)",
+ )
+ parser.add_argument(
+ "-r",
+ "--read-only",
+ action="store_true",
+ help="open database in read-only mode",
+ )
+ parser.add_argument(
+ "-u",
+ "--upstream",
+ default=os.environ.get("PRSERV_UPSTREAM", None),
+ help="Upstream PR service (host:port)",
+ )
+
+ args = parser.parse_args()
+ init_logger(os.path.abspath(args.log), args.loglevel)
+
+ if args.start:
+ ret=prserv.serv.start_daemon(
+ args.file,
+ args.host,
+ args.port,
+ os.path.abspath(args.log),
+ args.read_only,
+ args.upstream
+ )
+ elif args.stop:
+ ret=prserv.serv.stop_daemon(args.host, args.port)
else:
ret=parser.print_help()
return ret
diff --git a/bin/bitbake-selftest b/bin/bitbake-selftest
index 041a2719f..1b7a783fd 100755
--- a/bin/bitbake-selftest
+++ b/bin/bitbake-selftest
@@ -7,27 +7,33 @@
import os
import sys, logging
+import warnings
+warnings.simplefilter("default")
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib'))
import unittest
try:
import bb
import hashserv
+ import prserv
import layerindexlib
except RuntimeError as exc:
sys.exit(str(exc))
tests = ["bb.tests.codeparser",
+ "bb.tests.color",
"bb.tests.cooker",
"bb.tests.cow",
"bb.tests.data",
"bb.tests.event",
"bb.tests.fetch",
"bb.tests.parse",
- "bb.tests.persist_data",
"bb.tests.runqueue",
+ "bb.tests.siggen",
"bb.tests.utils",
+ "bb.tests.compression",
"hashserv.tests",
+ "prserv.tests",
"layerindexlib.tests.layerindexobj",
"layerindexlib.tests.restapi",
"layerindexlib.tests.cooker"]
diff --git a/bin/bitbake-server b/bin/bitbake-server
new file mode 100755
index 000000000..e367ec2c1
--- /dev/null
+++ b/bin/bitbake-server
@@ -0,0 +1,55 @@
+#!/usr/bin/env python3
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# Copyright (C) 2020 Richard Purdie
+#
+
+import os
+import sys
+import warnings
+warnings.simplefilter("default")
+import logging
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
+
+import bb
+
+bb.utils.check_system_locale()
+
+# Users shouldn't be running this code directly
+if len(sys.argv) != 11 or not sys.argv[1].startswith("decafbad"):
+ print("bitbake-server is meant for internal execution by bitbake itself, please don't use it standalone.")
+ sys.exit(1)
+
+import bb.server.process
+
+lockfd = int(sys.argv[2])
+readypipeinfd = int(sys.argv[3])
+logfile = sys.argv[4]
+lockname = sys.argv[5]
+sockname = sys.argv[6]
+timeout = float(sys.argv[7])
+profile = bool(int(sys.argv[8]))
+xmlrpcinterface = (sys.argv[9], int(sys.argv[10]))
+if xmlrpcinterface[0] == "None":
+ xmlrpcinterface = (None, xmlrpcinterface[1])
+
+# Replace standard fds with our own
+with open('/dev/null', 'r') as si:
+ os.dup2(si.fileno(), sys.stdin.fileno())
+
+with open(logfile, 'a+') as so:
+ os.dup2(so.fileno(), sys.stdout.fileno())
+ os.dup2(so.fileno(), sys.stderr.fileno())
+
+# Have stdout and stderr be the same so log output matches chronologically
+# and there aren't two seperate buffers
+sys.stderr = sys.stdout
+
+logger = logging.getLogger("BitBake")
+# Ensure logging messages get sent to the UI as events
+handler = bb.event.LogHandler()
+logger.addHandler(handler)
+
+bb.server.process.execServer(lockfd, readypipeinfd, lockname, sockname, timeout, xmlrpcinterface, profile)
+
diff --git a/bin/bitbake-worker b/bin/bitbake-worker
index 97cc0fd60..e8073f2ac 100755
--- a/bin/bitbake-worker
+++ b/bin/bitbake-worker
@@ -1,11 +1,14 @@
#!/usr/bin/env python3
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
import os
import sys
import warnings
+warnings.simplefilter("default")
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
from bb import fetch2
import logging
@@ -16,11 +19,12 @@ import signal
import pickle
import traceback
import queue
+import shlex
+import subprocess
from multiprocessing import Lock
from threading import Thread
-if sys.getfilesystemencoding() != "utf-8":
- sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\nPython can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.")
+bb.utils.check_system_locale()
# Users shouldn't be running this code directly
if len(sys.argv) != 2 or not sys.argv[1].startswith("decafbad"):
@@ -87,19 +91,19 @@ def worker_fire_prepickled(event):
worker_thread_exit = False
def worker_flush(worker_queue):
- worker_queue_int = b""
+ worker_queue_int = bytearray()
global worker_pipe, worker_thread_exit
while True:
try:
- worker_queue_int = worker_queue_int + worker_queue.get(True, 1)
+ worker_queue_int.extend(worker_queue.get(True, 1))
except queue.Empty:
pass
while (worker_queue_int or not worker_queue.empty()):
try:
(_, ready, _) = select.select([], [worker_pipe], [], 1)
if not worker_queue.empty():
- worker_queue_int = worker_queue_int + worker_queue.get()
+ worker_queue_int.extend(worker_queue.get())
written = os.write(worker_pipe, worker_queue_int)
worker_queue_int = worker_queue_int[written:]
except (IOError, OSError) as e:
@@ -117,9 +121,10 @@ def worker_child_fire(event, d):
data = b"<event>" + pickle.dumps(event) + b"</event>"
try:
- worker_pipe_lock.acquire()
- worker_pipe.write(data)
- worker_pipe_lock.release()
+ with bb.utils.lock_timeout(worker_pipe_lock):
+ while(len(data)):
+ written = worker_pipe.write(data)
+ data = data[written:]
except IOError:
sigterm_handler(None, None)
raise
@@ -138,40 +143,59 @@ def sigterm_handler(signum, frame):
os.killpg(0, signal.SIGTERM)
sys.exit()
-def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskhash, unihash, appends, taskdepdata, extraconfigdata, quieterrors=False, dry_run_exec=False):
+def fork_off_task(cfg, data, databuilder, workerdata, extraconfigdata, runtask):
+
+ fn = runtask['fn']
+ task = runtask['task']
+ taskname = runtask['taskname']
+ taskhash = runtask['taskhash']
+ unihash = runtask['unihash']
+ appends = runtask['appends']
+ layername = runtask['layername']
+ taskdepdata = runtask['taskdepdata']
+ quieterrors = runtask['quieterrors']
# We need to setup the environment BEFORE the fork, since
# a fork() or exec*() activates PSEUDO...
envbackup = {}
+ fakeroot = False
fakeenv = {}
umask = None
- taskdep = workerdata["taskdeps"][fn]
+ uid = os.getuid()
+ gid = os.getgid()
+
+ taskdep = runtask['taskdep']
if 'umask' in taskdep and taskname in taskdep['umask']:
+ umask = taskdep['umask'][taskname]
+ elif workerdata["umask"]:
+ umask = workerdata["umask"]
+ if umask:
# umask might come in as a number or text string..
try:
- umask = int(taskdep['umask'][taskname],8)
+ umask = int(umask, 8)
except TypeError:
- umask = taskdep['umask'][taskname]
+ pass
- dry_run = cfg.dry_run or dry_run_exec
+ dry_run = cfg.dry_run or runtask['dry_run']
# We can't use the fakeroot environment in a dry run as it possibly hasn't been built
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not dry_run:
- envvars = (workerdata["fakerootenv"][fn] or "").split()
- for key, value in (var.split('=') for var in envvars):
+ fakeroot = True
+ envvars = (runtask['fakerootenv'] or "").split()
+ for key, value in (var.split('=',1) for var in envvars):
envbackup[key] = os.environ.get(key)
os.environ[key] = value
fakeenv[key] = value
- fakedirs = (workerdata["fakerootdirs"][fn] or "").split()
+ fakedirs = (runtask['fakerootdirs'] or "").split()
for p in fakedirs:
bb.utils.mkdirhier(p)
- logger.debug(2, 'Running %s:%s under fakeroot, fakedirs: %s' %
+ logger.debug2('Running %s:%s under fakeroot, fakedirs: %s' %
(fn, taskname, ', '.join(fakedirs)))
else:
- envvars = (workerdata["fakerootnoenv"][fn] or "").split()
- for key, value in (var.split('=') for var in envvars):
+ envvars = (runtask['fakerootnoenv'] or "").split()
+ for key, value in (var.split('=',1) for var in envvars):
envbackup[key] = os.environ.get(key)
os.environ[key] = value
fakeenv[key] = value
@@ -213,19 +237,21 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
# Let SIGHUP exit as SIGTERM
signal.signal(signal.SIGHUP, sigterm_handler)
- # No stdin
- newsi = os.open(os.devnull, os.O_RDWR)
- os.dup2(newsi, sys.stdin.fileno())
+ # No stdin & stdout
+ # stdout is used as a status report channel and must not be used by child processes.
+ dumbio = os.open(os.devnull, os.O_RDWR)
+ os.dup2(dumbio, sys.stdin.fileno())
+ os.dup2(dumbio, sys.stdout.fileno())
- if umask:
+ if umask is not None:
os.umask(umask)
try:
- bb_cache = bb.cache.NoCache(databuilder)
(realfn, virtual, mc) = bb.cache.virtualfn2realfn(fn)
the_data = databuilder.mcdata[mc]
the_data.setVar("BB_WORKERCONTEXT", "1")
the_data.setVar("BB_TASKDEPDATA", taskdepdata)
+ the_data.setVar('BB_CURRENTTASK', taskname.replace("do_", ""))
if cfg.limited_deps:
the_data.setVar("BB_LIMITEDDEPS", "1")
the_data.setVar("BUILDNAME", workerdata["buildname"])
@@ -239,12 +265,20 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
bb.parse.siggen.set_taskhashes(workerdata["newhashes"])
ret = 0
- the_data = bb_cache.loadDataFull(fn, appends)
+ the_data = databuilder.parseRecipe(fn, appends, layername)
the_data.setVar('BB_TASKHASH', taskhash)
the_data.setVar('BB_UNIHASH', unihash)
+ bb.parse.siggen.setup_datacache_from_datastore(fn, the_data)
bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN"), taskname.replace("do_", "")))
+ if not bb.utils.to_boolean(the_data.getVarFlag(taskname, 'network')):
+ if bb.utils.is_local_uid(uid):
+ logger.debug("Attempting to disable network for %s" % taskname)
+ bb.utils.disable_network(uid, gid)
+ else:
+ logger.debug("Skipping disable network for %s since %s is not a local uid." % (taskname, uid))
+
# exported_vars() returns a generator which *cannot* be passed to os.environ.update()
# successfully. We also need to unset anything from the environment which shouldn't be there
exports = bb.data.exported_vars(the_data)
@@ -273,10 +307,20 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
if not quieterrors:
logger.critical(traceback.format_exc())
os._exit(1)
+
+ sys.stdout.flush()
+ sys.stderr.flush()
+
try:
if dry_run:
return 0
- return bb.build.exec_task(fn, taskname, the_data, cfg.profile)
+ try:
+ ret = bb.build.exec_task(fn, taskname, the_data, cfg.profile)
+ finally:
+ if fakeroot:
+ fakerootcmd = shlex.split(the_data.getVar("FAKEROOTCMD"))
+ subprocess.run(fakerootcmd + ['-S'], check=True, stdout=subprocess.PIPE)
+ return ret
except:
os._exit(1)
if not profiling:
@@ -308,12 +352,12 @@ class runQueueWorkerPipe():
if pipeout:
pipeout.close()
bb.utils.nonblockingfd(self.input)
- self.queue = b""
+ self.queue = bytearray()
def read(self):
start = len(self.queue)
try:
- self.queue = self.queue + (self.input.read(102400) or b"")
+ self.queue.extend(self.input.read(102400) or b"")
except (OSError, IOError) as e:
if e.errno != errno.EAGAIN:
raise
@@ -321,7 +365,9 @@ class runQueueWorkerPipe():
end = len(self.queue)
index = self.queue.find(b"</event>")
while index != -1:
- worker_fire_prepickled(self.queue[:index+8])
+ msg = self.queue[:index+8]
+ assert msg.startswith(b"<event>") and msg.count(b"<event>") == 1
+ worker_fire_prepickled(msg)
self.queue = self.queue[index+8:]
index = self.queue.find(b"</event>")
return (end > start)
@@ -339,7 +385,7 @@ class BitbakeWorker(object):
def __init__(self, din):
self.input = din
bb.utils.nonblockingfd(self.input)
- self.queue = b""
+ self.queue = bytearray()
self.cookercfg = None
self.databuilder = None
self.data = None
@@ -373,7 +419,7 @@ class BitbakeWorker(object):
if len(r) == 0:
# EOF on pipe, server must have terminated
self.sigterm_exception(signal.SIGTERM, None)
- self.queue = self.queue + r
+ self.queue.extend(r)
except (OSError, IOError):
pass
if len(self.queue):
@@ -393,19 +439,35 @@ class BitbakeWorker(object):
while self.process_waitpid():
continue
-
def handle_item(self, item, func):
- if self.queue.startswith(b"<" + item + b">"):
- index = self.queue.find(b"</" + item + b">")
- while index != -1:
- func(self.queue[(len(item) + 2):index])
- self.queue = self.queue[(index + len(item) + 3):]
- index = self.queue.find(b"</" + item + b">")
+ opening_tag = b"<" + item + b">"
+ if not self.queue.startswith(opening_tag):
+ return
+
+ tag_len = len(opening_tag)
+ if len(self.queue) < tag_len + 4:
+ # we need to receive more data
+ return
+ header = self.queue[tag_len:tag_len + 4]
+ payload_len = int.from_bytes(header, 'big')
+ # closing tag has length (tag_len + 1)
+ if len(self.queue) < tag_len * 2 + 1 + payload_len:
+ # we need to receive more data
+ return
+
+ index = self.queue.find(b"</" + item + b">")
+ if index != -1:
+ try:
+ func(self.queue[(tag_len + 4):index])
+ except pickle.UnpicklingError:
+ workerlog_write("Unable to unpickle data: %s\n" % ":".join("{:02x}".format(c) for c in self.queue))
+ raise
+ self.queue = self.queue[(index + len(b"</") + len(item) + len(b">")):]
def handle_cookercfg(self, data):
self.cookercfg = pickle.loads(data)
self.databuilder = bb.cookerdata.CookerDataBuilder(self.cookercfg, worker=True)
- self.databuilder.parseBaseConfiguration()
+ self.databuilder.parseBaseConfiguration(worker=True)
self.data = self.databuilder.data
def handle_extraconfigdata(self, data):
@@ -413,13 +475,14 @@ class BitbakeWorker(object):
def handle_workerdata(self, data):
self.workerdata = pickle.loads(data)
+ bb.build.verboseShellLogging = self.workerdata["build_verbose_shell"]
+ bb.build.verboseStdoutLogging = self.workerdata["build_verbose_stdout"]
bb.msg.loggerDefaultLogLevel = self.workerdata["logdefaultlevel"]
- bb.msg.loggerDefaultVerbose = self.workerdata["logdefaultverbose"]
- bb.msg.loggerVerboseLogs = self.workerdata["logdefaultverboselogs"]
bb.msg.loggerDefaultDomains = self.workerdata["logdefaultdomain"]
for mc in self.databuilder.mcdata:
self.databuilder.mcdata[mc].setVar("PRSERV_HOST", self.workerdata["prhost"])
self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.workerdata["hashservaddr"])
+ self.databuilder.mcdata[mc].setVar("__bbclasstype", "recipe")
def handle_newtaskhashes(self, data):
self.workerdata["newhashes"] = pickle.loads(data)
@@ -437,11 +500,15 @@ class BitbakeWorker(object):
sys.exit(0)
def handle_runtask(self, data):
- fn, task, taskname, taskhash, unihash, quieterrors, appends, taskdepdata, dry_run_exec = pickle.loads(data)
- workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname))
+ runtask = pickle.loads(data)
- pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, fn, task, taskname, taskhash, unihash, appends, taskdepdata, self.extraconfigdata, quieterrors, dry_run_exec)
+ fn = runtask['fn']
+ task = runtask['task']
+ taskname = runtask['taskname']
+ workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname))
+
+ pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, self.extraconfigdata, runtask)
self.build_pids[pid] = task
self.build_pipes[pid] = runQueueWorkerPipe(pipein, pipeout)
@@ -505,9 +572,11 @@ except BaseException as e:
import traceback
sys.stderr.write(traceback.format_exc())
sys.stderr.write(str(e))
+finally:
+ worker_thread_exit = True
+ worker_thread.join()
-worker_thread_exit = True
-worker_thread.join()
-
-workerlog_write("exitting")
+workerlog_write("exiting")
+if not normalexit:
+ sys.exit(1)
sys.exit(0)
diff --git a/bin/git-make-shallow b/bin/git-make-shallow
index 57069f7ed..9de557c10 100755
--- a/bin/git-make-shallow
+++ b/bin/git-make-shallow
@@ -1,5 +1,7 @@
#!/usr/bin/env python3
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -16,19 +18,23 @@ import itertools
import os
import subprocess
import sys
+import warnings
+warnings.simplefilter("default")
version = 1.0
+git_cmd = ['git', '-c', 'safe.bareRepository=all']
+
def main():
if sys.version_info < (3, 4, 0):
sys.exit('Python 3.4 or greater is required')
- git_dir = check_output(['git', 'rev-parse', '--git-dir']).rstrip()
+ git_dir = check_output(git_cmd + ['rev-parse', '--git-dir']).rstrip()
shallow_file = os.path.join(git_dir, 'shallow')
if os.path.exists(shallow_file):
try:
- check_output(['git', 'fetch', '--unshallow'])
+ check_output(git_cmd + ['fetch', '--unshallow'])
except subprocess.CalledProcessError:
try:
os.unlink(shallow_file)
@@ -37,21 +43,21 @@ def main():
raise
args = process_args()
- revs = check_output(['git', 'rev-list'] + args.revisions).splitlines()
+ revs = check_output(git_cmd + ['rev-list'] + args.revisions).splitlines()
make_shallow(shallow_file, args.revisions, args.refs)
- ref_revs = check_output(['git', 'rev-list'] + args.refs).splitlines()
+ ref_revs = check_output(git_cmd + ['rev-list'] + args.refs).splitlines()
remaining_history = set(revs) & set(ref_revs)
for rev in remaining_history:
- if check_output(['git', 'rev-parse', '{}^@'.format(rev)]):
+ if check_output(git_cmd + ['rev-parse', '{}^@'.format(rev)]):
sys.exit('Error: %s was not made shallow' % rev)
filter_refs(args.refs)
if args.shrink:
shrink_repo(git_dir)
- subprocess.check_call(['git', 'fsck', '--unreachable'])
+ subprocess.check_call(git_cmd + ['fsck', '--unreachable'])
def process_args():
@@ -68,12 +74,12 @@ def process_args():
args = parser.parse_args()
if args.refs:
- args.refs = check_output(['git', 'rev-parse', '--symbolic-full-name'] + args.refs).splitlines()
+ args.refs = check_output(git_cmd + ['rev-parse', '--symbolic-full-name'] + args.refs).splitlines()
else:
args.refs = get_all_refs(lambda r, t, tt: t == 'commit' or tt == 'commit')
args.refs = list(filter(lambda r: not r.endswith('/HEAD'), args.refs))
- args.revisions = check_output(['git', 'rev-parse'] + ['%s^{}' % i for i in args.revisions]).splitlines()
+ args.revisions = check_output(git_cmd + ['rev-parse'] + ['%s^{}' % i for i in args.revisions]).splitlines()
return args
@@ -91,7 +97,7 @@ def make_shallow(shallow_file, revisions, refs):
def get_all_refs(ref_filter=None):
"""Return all the existing refs in this repository, optionally filtering the refs."""
- ref_output = check_output(['git', 'for-each-ref', '--format=%(refname)\t%(objecttype)\t%(*objecttype)'])
+ ref_output = check_output(git_cmd + ['for-each-ref', '--format=%(refname)\t%(objecttype)\t%(*objecttype)'])
ref_split = [tuple(iter_extend(l.rsplit('\t'), 3)) for l in ref_output.splitlines()]
if ref_filter:
ref_split = (e for e in ref_split if ref_filter(*e))
@@ -109,7 +115,7 @@ def filter_refs(refs):
all_refs = get_all_refs()
to_remove = set(all_refs) - set(refs)
if to_remove:
- check_output(['xargs', '-0', '-n', '1', 'git', 'update-ref', '-d', '--no-deref'],
+ check_output(['xargs', '-0', '-n', '1'] + git_cmd + ['update-ref', '-d', '--no-deref'],
input=''.join(l + '\0' for l in to_remove))
@@ -122,7 +128,7 @@ def follow_history_intersections(revisions, refs):
if rev in seen:
continue
- parents = check_output(['git', 'rev-parse', '%s^@' % rev]).splitlines()
+ parents = check_output(git_cmd + ['rev-parse', '%s^@' % rev]).splitlines()
yield rev
seen.add(rev)
@@ -130,12 +136,12 @@ def follow_history_intersections(revisions, refs):
if not parents:
continue
- check_refs = check_output(['git', 'merge-base', '--independent'] + sorted(refs)).splitlines()
+ check_refs = check_output(git_cmd + ['merge-base', '--independent'] + sorted(refs)).splitlines()
for parent in parents:
for ref in check_refs:
print("Checking %s vs %s" % (parent, ref))
try:
- merge_base = check_output(['git', 'merge-base', parent, ref]).rstrip()
+ merge_base = check_output(git_cmd + ['merge-base', parent, ref]).rstrip()
except subprocess.CalledProcessError:
continue
else:
@@ -155,14 +161,14 @@ def iter_except(func, exception, start=None):
def shrink_repo(git_dir):
"""Shrink the newly shallow repository, removing the unreachable objects."""
- subprocess.check_call(['git', 'reflog', 'expire', '--expire-unreachable=now', '--all'])
- subprocess.check_call(['git', 'repack', '-ad'])
+ subprocess.check_call(git_cmd + ['reflog', 'expire', '--expire-unreachable=now', '--all'])
+ subprocess.check_call(git_cmd + ['repack', '-ad'])
try:
os.unlink(os.path.join(git_dir, 'objects', 'info', 'alternates'))
except OSError as exc:
if exc.errno != errno.ENOENT:
raise
- subprocess.check_call(['git', 'prune', '--expire', 'now'])
+ subprocess.check_call(git_cmd + ['prune', '--expire', 'now'])
if __name__ == '__main__':
diff --git a/bin/toaster b/bin/toaster
index 6b90ee187..f002c8c15 100755
--- a/bin/toaster
+++ b/bin/toaster
@@ -33,7 +33,7 @@ databaseCheck()
$MANAGE migrate --noinput || retval=1
if [ $retval -eq 1 ]; then
- echo "Failed migrations, aborting system start" 1>&2
+ echo "Failed migrations, halting system start" 1>&2
return $retval
fi
# Make sure that checksettings can pick up any value for TEMPLATECONF
@@ -41,7 +41,7 @@ databaseCheck()
$MANAGE checksettings --traceback || retval=1
if [ $retval -eq 1 ]; then
- printf "\nError while checking settings; aborting\n"
+ printf "\nError while checking settings; exiting\n"
return $retval
fi
@@ -84,7 +84,7 @@ webserverStartAll()
echo "Starting webserver..."
$MANAGE runserver --noreload "$ADDR_PORT" \
- </dev/null >>${BUILDDIR}/toaster_web.log 2>&1 \
+ </dev/null >>${TOASTER_LOGS_DIR}/web.log 2>&1 \
& echo $! >${BUILDDIR}/.toastermain.pid
sleep 1
@@ -181,6 +181,14 @@ WEBSERVER=1
export TOASTER_BUILDSERVER=1
ADDR_PORT="localhost:8000"
TOASTERDIR=`dirname $BUILDDIR`
+# ${BUILDDIR}/toaster_logs/ became the default location for toaster logs
+# This is needed for implemented django-log-viewer: https://pypi.org/project/django-log-viewer/
+# If the directory does not exist, create it.
+TOASTER_LOGS_DIR="${BUILDDIR}/toaster_logs/"
+if [ ! -d $TOASTER_LOGS_DIR ]
+then
+ mkdir $TOASTER_LOGS_DIR
+fi
unset CMD
for param in $*; do
case $param in
@@ -248,7 +256,7 @@ fi
# 3) the sqlite db if that is being used.
# 4) pid's we need to clean up on exit/shutdown
export TOASTER_DIR=$TOASTERDIR
-export BB_ENV_EXTRAWHITE="$BB_ENV_EXTRAWHITE TOASTER_DIR"
+export BB_ENV_PASSTHROUGH_ADDITIONS="$BB_ENV_PASSTHROUGH_ADDITIONS TOASTER_DIR"
# Determine the action. If specified by arguments, fine, if not, toggle it
if [ "$CMD" = "start" ] ; then
@@ -299,7 +307,7 @@ case $CMD in
export BITBAKE_UI='toasterui'
if [ $TOASTER_BUILDSERVER -eq 1 ] ; then
$MANAGE runbuilds \
- </dev/null >>${BUILDDIR}/toaster_runbuilds.log 2>&1 \
+ </dev/null >>${TOASTER_LOGS_DIR}/toaster_runbuilds.log 2>&1 \
& echo $! >${BUILDDIR}/.runbuilds.pid
else
echo "Toaster build server not started."
diff --git a/bin/toaster-eventreplay b/bin/toaster-eventreplay
index 8fa4ab711..74a319320 100755
--- a/bin/toaster-eventreplay
+++ b/bin/toaster-eventreplay
@@ -19,6 +19,8 @@ import sys
import json
import pickle
import codecs
+import warnings
+warnings.simplefilter("default")
from collections import namedtuple
@@ -28,79 +30,23 @@ sys.path.insert(0, join(dirname(dirname(abspath(__file__))), 'lib'))
import bb.cooker
from bb.ui import toasterui
-
-class EventPlayer:
- """Emulate a connection to a bitbake server."""
-
- def __init__(self, eventfile, variables):
- self.eventfile = eventfile
- self.variables = variables
- self.eventmask = []
-
- def waitEvent(self, _timeout):
- """Read event from the file."""
- line = self.eventfile.readline().strip()
- if not line:
- return
- try:
- event_str = json.loads(line)['vars'].encode('utf-8')
- event = pickle.loads(codecs.decode(event_str, 'base64'))
- event_name = "%s.%s" % (event.__module__, event.__class__.__name__)
- if event_name not in self.eventmask:
- return
- return event
- except ValueError as err:
- print("Failed loading ", line)
- raise err
-
- def runCommand(self, command_line):
- """Emulate running a command on the server."""
- name = command_line[0]
-
- if name == "getVariable":
- var_name = command_line[1]
- variable = self.variables.get(var_name)
- if variable:
- return variable['v'], None
- return None, "Missing variable %s" % var_name
-
- elif name == "getAllKeysWithFlags":
- dump = {}
- flaglist = command_line[1]
- for key, val in self.variables.items():
- try:
- if not key.startswith("__"):
- dump[key] = {
- 'v': val['v'],
- 'history' : val['history'],
- }
- for flag in flaglist:
- dump[key][flag] = val[flag]
- except Exception as err:
- print(err)
- return (dump, None)
-
- elif name == 'setEventMask':
- self.eventmask = command_line[-1]
- return True, None
-
- else:
- raise Exception("Command %s not implemented" % command_line[0])
-
- def getEventHandle(self):
- """
- This method is called by toasterui.
- The return value is passed to self.runCommand but not used there.
- """
- pass
+from bb.ui import eventreplay
def main(argv):
with open(argv[-1]) as eventfile:
# load variables from the first line
- variables = json.loads(eventfile.readline().strip())['allvariables']
-
+ variables = None
+ while line := eventfile.readline().strip():
+ try:
+ variables = json.loads(line)['allvariables']
+ break
+ except (KeyError, json.JSONDecodeError):
+ continue
+ if not variables:
+ sys.exit("Cannot find allvariables entry in event log file %s" % argv[-1])
+ eventfile.seek(0)
params = namedtuple('ConfigParams', ['observe_only'])(True)
- player = EventPlayer(eventfile, variables)
+ player = eventreplay.EventPlayer(eventfile, variables)
return toasterui.main(player, player, params)