aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--README43
-rw-r--r--SECURITY.md24
-rwxr-xr-xbin/bitbake7
-rwxr-xr-xbin/bitbake-diffsigs67
-rwxr-xr-xbin/bitbake-getvar34
-rwxr-xr-xbin/bitbake-hashclient246
-rwxr-xr-xbin/bitbake-hashserv147
-rwxr-xr-xbin/bitbake-layers6
-rwxr-xr-xbin/bitbake-prserv94
-rwxr-xr-xbin/bitbake-selftest3
-rwxr-xr-xbin/bitbake-server13
-rwxr-xr-xbin/bitbake-worker149
-rwxr-xr-xbin/git-make-shallow38
-rwxr-xr-xbin/toaster18
-rwxr-xr-xbin/toaster-eventreplay82
-rw-r--r--conf/bitbake.conf8
-rw-r--r--contrib/hashserv/Dockerfile6
-rw-r--r--contrib/prserv/Dockerfile62
-rw-r--r--contrib/vim/indent/bitbake.vim6
-rw-r--r--contrib/vim/plugin/newbbappend.vim2
-rw-r--r--contrib/vim/syntax/bitbake.vim19
-rw-r--r--doc/Makefile2
-rw-r--r--doc/README4
-rw-r--r--doc/_templates/footer.html9
-rw-r--r--doc/bitbake-user-manual/bitbake-user-manual-execution.rst151
-rw-r--r--doc/bitbake-user-manual/bitbake-user-manual-fetching.rst228
-rw-r--r--doc/bitbake-user-manual/bitbake-user-manual-hello.rst155
-rw-r--r--doc/bitbake-user-manual/bitbake-user-manual-intro.rst26
-rw-r--r--doc/bitbake-user-manual/bitbake-user-manual-metadata.rst422
-rw-r--r--doc/bitbake-user-manual/bitbake-user-manual-ref-variables-context.rst91
-rw-r--r--doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst654
-rw-r--r--doc/index.rst1
-rw-r--r--doc/releases.rst92
-rw-r--r--lib/bb/COW.py2
-rw-r--r--lib/bb/__init__.py30
-rwxr-xr-xlib/bb/acl.py215
-rw-r--r--lib/bb/asyncrpc/__init__.py35
-rw-r--r--lib/bb/asyncrpc/client.py276
-rw-r--r--lib/bb/asyncrpc/connection.py146
-rw-r--r--lib/bb/asyncrpc/exceptions.py21
-rw-r--r--lib/bb/asyncrpc/serv.py415
-rw-r--r--lib/bb/build.py271
-rw-r--r--lib/bb/cache.py337
-rw-r--r--lib/bb/checksum.py22
-rw-r--r--lib/bb/codeparser.py104
-rw-r--r--lib/bb/command.py100
-rw-r--r--lib/bb/compress/_pipecompress.py196
-rw-r--r--lib/bb/compress/lz4.py19
-rw-r--r--lib/bb/compress/zstd.py30
-rw-r--r--lib/bb/cooker.py724
-rw-r--r--lib/bb/cookerdata.py197
-rw-r--r--lib/bb/daemonize.py44
-rw-r--r--lib/bb/data.py147
-rw-r--r--lib/bb/data_smart.py268
-rw-r--r--lib/bb/event.py148
-rw-r--r--lib/bb/exceptions.py2
-rw-r--r--lib/bb/fetch2/README57
-rw-r--r--lib/bb/fetch2/__init__.py363
-rw-r--r--lib/bb/fetch2/crate.py141
-rw-r--r--lib/bb/fetch2/gcp.py101
-rw-r--r--lib/bb/fetch2/git.py286
-rw-r--r--lib/bb/fetch2/gitsm.py49
-rw-r--r--lib/bb/fetch2/hg.py1
-rw-r--r--lib/bb/fetch2/local.py16
-rw-r--r--lib/bb/fetch2/npm.py63
-rw-r--r--lib/bb/fetch2/npmsw.py113
-rw-r--r--lib/bb/fetch2/osc.py52
-rw-r--r--lib/bb/fetch2/perforce.py2
-rw-r--r--lib/bb/fetch2/sftp.py2
-rw-r--r--lib/bb/fetch2/ssh.py47
-rw-r--r--lib/bb/fetch2/svn.py10
-rw-r--r--lib/bb/fetch2/wget.py181
-rwxr-xr-xlib/bb/main.py407
-rw-r--r--lib/bb/monitordisk.py24
-rw-r--r--lib/bb/msg.py34
-rw-r--r--lib/bb/parse/__init__.py14
-rw-r--r--lib/bb/parse/ast.py85
-rw-r--r--lib/bb/parse/parse_py/BBHandler.py72
-rw-r--r--lib/bb/parse/parse_py/ConfHandler.py38
-rw-r--r--lib/bb/persist_data.py80
-rw-r--r--lib/bb/process.py9
-rw-r--r--lib/bb/progress.py2
-rw-r--r--lib/bb/providers.py14
-rw-r--r--lib/bb/runqueue.py917
-rw-r--r--lib/bb/server/process.py380
-rw-r--r--lib/bb/server/xmlrpcserver.py3
-rw-r--r--lib/bb/siggen.py744
-rw-r--r--lib/bb/taskdata.py14
-rw-r--r--lib/bb/tests/codeparser.py66
-rw-r--r--lib/bb/tests/color.py2
-rw-r--r--lib/bb/tests/compression.py100
-rw-r--r--lib/bb/tests/cooker.py2
-rw-r--r--lib/bb/tests/data.py132
-rw-r--r--lib/bb/tests/event.py62
-rw-r--r--lib/bb/tests/fetch-testdata/debian/pool/main/m/minicom/index.html59
-rw-r--r--lib/bb/tests/fetch-testdata/npm-local-link-sources/inner/npm-shrinkwrap-v1.json11
-rw-r--r--lib/bb/tests/fetch-testdata/npm-local-link-sources/inner/npm-shrinkwrap-v2.json28
-rw-r--r--lib/bb/tests/fetch-testdata/npm-local-link-sources/inner/npm-shrinkwrap-v3.json23
-rw-r--r--lib/bb/tests/fetch-testdata/npm-local-link-sources/inner/package.json8
-rw-r--r--lib/bb/tests/fetch-testdata/npm-local-link-sources/package.json5
-rw-r--r--lib/bb/tests/fetch-testdata/software/libxml2/2.10/index.html20
-rw-r--r--lib/bb/tests/fetch-testdata/software/libxml2/2.9/index.html40
-rw-r--r--lib/bb/tests/fetch-testdata/software/libxml2/index.html19
-rw-r--r--lib/bb/tests/fetch.py1151
-rw-r--r--lib/bb/tests/parse.py179
-rw-r--r--lib/bb/tests/runqueue-tests/conf/bitbake.conf2
-rw-r--r--lib/bb/tests/runqueue.py54
-rw-r--r--lib/bb/tests/siggen.py77
-rw-r--r--lib/bb/tests/utils.py20
-rw-r--r--lib/bb/tinfoil.py26
-rw-r--r--lib/bb/ui/buildinfohelper.py96
-rw-r--r--lib/bb/ui/eventreplay.py86
-rw-r--r--lib/bb/ui/knotty.py185
-rw-r--r--lib/bb/ui/ncurses.py3
-rw-r--r--lib/bb/ui/taskexp.py7
-rwxr-xr-xlib/bb/ui/taskexp_ncurses.py1511
-rw-r--r--lib/bb/ui/toasterui.py2
-rw-r--r--lib/bb/ui/uievent.py32
-rw-r--r--lib/bb/ui/uihelper.py6
-rw-r--r--lib/bb/utils.py304
-rwxr-xr-xlib/bb/xattr.py126
-rw-r--r--lib/bblayers/__init__.py2
-rw-r--r--lib/bblayers/action.py18
-rw-r--r--lib/bblayers/common.py2
-rw-r--r--lib/bblayers/layerindex.py27
-rw-r--r--lib/bblayers/query.py58
-rw-r--r--lib/bs4/tests/test_tree.py2
-rw-r--r--lib/codegen.py10
-rw-r--r--lib/hashserv/__init__.py177
-rw-r--r--lib/hashserv/client.py312
-rw-r--r--lib/hashserv/server.py946
-rw-r--r--lib/hashserv/sqlalchemy.py598
-rw-r--r--lib/hashserv/sqlite.py562
-rw-r--r--lib/hashserv/tests.py1270
-rw-r--r--lib/layerindexlib/__init__.py24
-rw-r--r--lib/layerindexlib/cooker.py2
-rw-r--r--lib/layerindexlib/restapi.py4
-rw-r--r--lib/layerindexlib/tests/restapi.py2
-rw-r--r--lib/ply/yacc.py10
-rw-r--r--lib/progressbar/progressbar.py2
-rw-r--r--lib/prserv/__init__.py8
-rw-r--r--lib/prserv/client.py71
-rw-r--r--lib/prserv/db.py193
-rw-r--r--lib/prserv/serv.py315
-rw-r--r--lib/pyinotify.py44
-rw-r--r--lib/toaster/bldcollector/urls.py2
-rw-r--r--lib/toaster/bldcollector/views.py3
-rw-r--r--lib/toaster/bldcontrol/localhostbecontroller.py4
-rw-r--r--lib/toaster/bldcontrol/management/commands/runbuilds.py83
-rw-r--r--lib/toaster/bldcontrol/migrations/0008_models_bigautofield.py48
-rw-r--r--lib/toaster/bldcontrol/models.py4
-rw-r--r--lib/toaster/logs/.gitignore1
-rwxr-xr-xlib/toaster/manage.py2
-rw-r--r--lib/toaster/orm/fixtures/README2
-rwxr-xr-xlib/toaster/orm/fixtures/gen_fixtures.py447
-rw-r--r--lib/toaster/orm/fixtures/oe-core.xml48
-rw-r--r--lib/toaster/orm/fixtures/poky.xml118
-rw-r--r--lib/toaster/orm/fixtures/settings.xml4
-rw-r--r--lib/toaster/orm/management/commands/lsupdates.py16
-rw-r--r--lib/toaster/orm/migrations/0020_models_bigautofield.py173
-rw-r--r--lib/toaster/orm/migrations/0021_eventlogsimports.py22
-rw-r--r--lib/toaster/orm/models.py49
-rw-r--r--lib/toaster/pytest.ini16
-rw-r--r--lib/toaster/tests/browser/selenium_helpers_base.py76
-rw-r--r--lib/toaster/tests/browser/test_all_builds_page.py315
-rw-r--r--lib/toaster/tests/browser/test_all_projects_page.py162
-rw-r--r--lib/toaster/tests/browser/test_builddashboard_page.py15
-rw-r--r--lib/toaster/tests/browser/test_builddashboard_page_artifacts.py8
-rw-r--r--lib/toaster/tests/browser/test_delete_project.py103
-rw-r--r--lib/toaster/tests/browser/test_landing_page.py131
-rw-r--r--lib/toaster/tests/browser/test_layerdetails_page.py39
-rw-r--r--lib/toaster/tests/browser/test_most_recent_builds_states.py24
-rw-r--r--lib/toaster/tests/browser/test_new_custom_image_page.py14
-rw-r--r--lib/toaster/tests/browser/test_new_project_page.py16
-rw-r--r--lib/toaster/tests/browser/test_project_builds_page.py4
-rw-r--r--lib/toaster/tests/browser/test_project_config_page.py33
-rw-r--r--lib/toaster/tests/browser/test_sample.py10
-rw-r--r--lib/toaster/tests/browser/test_toastertable_ui.py11
-rw-r--r--lib/toaster/tests/builds/buildtest.py13
-rw-r--r--lib/toaster/tests/builds/test_core_image_min.py20
-rw-r--r--lib/toaster/tests/commands/test_loaddata.py4
-rw-r--r--lib/toaster/tests/commands/test_lsupdates.py3
-rw-r--r--lib/toaster/tests/commands/test_runbuilds.py13
-rw-r--r--lib/toaster/tests/db/test_db.py3
-rw-r--r--lib/toaster/tests/functional/functional_helpers.py82
-rw-r--r--lib/toaster/tests/functional/test_create_new_project.py179
-rw-r--r--lib/toaster/tests/functional/test_functional_basic.py195
-rw-r--r--lib/toaster/tests/functional/test_project_config.py341
-rw-r--r--lib/toaster/tests/functional/test_project_page.py792
-rw-r--r--lib/toaster/tests/functional/test_project_page_tab_config.py528
-rw-r--r--lib/toaster/tests/functional/utils.py89
-rw-r--r--lib/toaster/tests/toaster-tests-requirements.txt8
-rw-r--r--lib/toaster/tests/views/test_views.py20
-rw-r--r--lib/toaster/toastergui/api.py26
-rw-r--r--lib/toaster/toastergui/fixtures/toastergui-unittest-data.xml24
-rw-r--r--lib/toaster/toastergui/forms.py14
-rw-r--r--lib/toaster/toastergui/static/css/default.css28
-rw-r--r--lib/toaster/toastergui/static/css/jquery.dataTables-1.13.8.min.css1
-rw-r--r--lib/toaster/toastergui/static/js/bootstrap-3.4.1.js (renamed from lib/toaster/toastergui/static/js/bootstrap.js)431
-rw-r--r--lib/toaster/toastergui/static/js/bootstrap-3.4.1.min.js6
-rw-r--r--lib/toaster/toastergui/static/js/bootstrap.min.js7
-rw-r--r--lib/toaster/toastergui/static/js/jquery-3.7.1.min.js2
-rw-r--r--lib/toaster/toastergui/static/js/jquery-3.7.1.min.map1
-rw-r--r--lib/toaster/toastergui/static/js/jquery.dataTables-1.13.8.min.js4
-rw-r--r--lib/toaster/toastergui/static/js/libtoaster.js2
-rw-r--r--lib/toaster/toastergui/static/js/projectpage.js2
-rw-r--r--lib/toaster/toastergui/templates/base.html11
-rw-r--r--lib/toaster/toastergui/templates/base_specific.html4
-rw-r--r--lib/toaster/toastergui/templates/command_line_builds.html209
-rw-r--r--lib/toaster/toastergui/templates/configvars.html2
-rw-r--r--lib/toaster/toastergui/templates/js-unit-tests.html2
-rw-r--r--lib/toaster/toastergui/templates/landing.html16
-rw-r--r--lib/toaster/toastergui/templates/landing_not_managed.html34
-rw-r--r--lib/toaster/toastergui/templates/layerdetails.html2
-rw-r--r--lib/toaster/toastergui/templates/mrb_section.html2
-rw-r--r--lib/toaster/toastergui/templates/package_built_dependencies.html8
-rw-r--r--lib/toaster/toastergui/templates/package_detail_base.html2
-rw-r--r--lib/toaster/toastergui/templates/package_included_dependencies.html8
-rw-r--r--lib/toaster/toastergui/templates/package_included_reverse_dependencies.html4
-rw-r--r--lib/toaster/toastergui/templates/project.html2
-rw-r--r--lib/toaster/toastergui/templates/project_specific.html2
-rw-r--r--lib/toaster/toastergui/templates/projectconf.html34
-rw-r--r--lib/toaster/toastergui/templates/recipe.html4
-rw-r--r--lib/toaster/toastergui/templates/target.html4
-rw-r--r--lib/toaster/toastergui/templatetags/projecttags.py4
-rw-r--r--lib/toaster/toastergui/urls.py6
-rw-r--r--lib/toaster/toastergui/views.py218
-rw-r--r--lib/toaster/toastergui/widgets.py10
-rw-r--r--lib/toaster/toastermain/logs.py158
-rw-r--r--lib/toaster/toastermain/management/commands/buildimport.py4
-rw-r--r--lib/toaster/toastermain/management/commands/checksocket.py4
-rw-r--r--lib/toaster/toastermain/settings.py85
-rw-r--r--lib/toaster/toastermain/settings_test.py4
-rw-r--r--lib/toaster/toastermain/urls.py4
-rw-r--r--lib/toaster/tox.ini24
-rw-r--r--toaster-requirements.txt3
236 files changed, 20384 insertions, 5413 deletions
diff --git a/README b/README
index 64206de8a..e9f4c858e 100644
--- a/README
+++ b/README
@@ -7,22 +7,22 @@ One of BitBake's main users, OpenEmbedded, takes this core and builds embedded L
stacks using a task-oriented approach.
For information about Bitbake, see the OpenEmbedded website:
- http://www.openembedded.org/
+ https://www.openembedded.org/
Bitbake plain documentation can be found under the doc directory or its integrated
html version at the Yocto Project website:
https://docs.yoctoproject.org
+Bitbake requires Python version 3.8 or newer.
+
Contributing
------------
-Please refer to
-http://www.openembedded.org/wiki/How_to_submit_a_patch_to_OpenEmbedded
-for guidelines on how to submit patches, just note that the latter documentation is intended
-for OpenEmbedded (and its core) not bitbake patches (bitbake-devel@lists.openembedded.org)
-but in general main guidelines apply. Once the commit(s) have been created, the way to send
-the patch is through git-send-email. For example, to send the last commit (HEAD) on current
-branch, type:
+Please refer to our contributor guide here: https://docs.yoctoproject.org/contributor-guide/
+for full details on how to submit changes.
+
+As a quick guide, patches should be sent to bitbake-devel@lists.openembedded.org
+The git command to do that would be:
git send-email -M -1 --to bitbake-devel@lists.openembedded.org
@@ -33,8 +33,31 @@ the Yocto Project documentation mailing list:
Mailing list:
- http://lists.openembedded.org/mailman/listinfo/bitbake-devel
+ https://lists.openembedded.org/g/bitbake-devel
Source code:
- http://git.openembedded.org/bitbake/
+ https://git.openembedded.org/bitbake/
+
+Testing
+-------
+
+Bitbake has a testsuite located in lib/bb/tests/ whichs aim to try and prevent regressions.
+You can run this with "bitbake-selftest". In particular the fetcher is well covered since
+it has so many corner cases. The datastore has many tests too. Testing with the testsuite is
+recommended before submitting patches, particularly to the fetcher and datastore. We also
+appreciate new test cases and may require them for more obscure issues.
+
+To run the tests "zstd" and "git" must be installed.
+
+The assumption is made that this testsuite is run from an initialized OpenEmbedded build
+environment (i.e. `source oe-init-build-env` is used). If this is not the case, run the
+testsuite as follows:
+
+ export PATH=$(pwd)/bin:$PATH
+ bin/bitbake-selftest
+
+The testsuite can alternatively be executed using pytest, e.g. obtained from PyPI (in this
+case, the PATH is configured automatically):
+
+ pytest
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 000000000..7d2ce1f63
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,24 @@
+How to Report a Potential Vulnerability?
+========================================
+
+If you would like to report a public issue (for example, one with a released
+CVE number), please report it using the
+[https://bugzilla.yoctoproject.org/enter_bug.cgi?product=Security Security Bugzilla].
+If you have a patch ready, submit it following the same procedure as any other
+patch as described in README.md.
+
+If you are dealing with a not-yet released or urgent issue, please send a
+message to security AT yoctoproject DOT org, including as many details as
+possible: the layer or software module affected, the recipe and its version,
+and any example code, if available.
+
+Branches maintained with security fixes
+---------------------------------------
+
+See [https://wiki.yoctoproject.org/wiki/Stable_Release_and_LTS Stable release and LTS]
+for detailed info regarding the policies and maintenance of Stable branches.
+
+The [https://wiki.yoctoproject.org/wiki/Releases Release page] contains a list of all
+releases of the Yocto Project. Versions in grey are no longer actively maintained with
+security patches, but well-tested patches may still be accepted for them for
+significant issues.
diff --git a/bin/bitbake b/bin/bitbake
index f0ff2400f..382983e08 100755
--- a/bin/bitbake
+++ b/bin/bitbake
@@ -12,6 +12,8 @@
import os
import sys
+import warnings
+warnings.simplefilter("default")
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),
'lib'))
@@ -23,10 +25,9 @@ except RuntimeError as exc:
from bb import cookerdata
from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException
-if sys.getfilesystemencoding() != "utf-8":
- sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\nPython can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.")
+bb.utils.check_system_locale()
-__version__ = "1.51.0"
+__version__ = "2.9.0"
if __name__ == "__main__":
if __version__ != bb.__version__:
diff --git a/bin/bitbake-diffsigs b/bin/bitbake-diffsigs
index 19420a2df..8202c7862 100755
--- a/bin/bitbake-diffsigs
+++ b/bin/bitbake-diffsigs
@@ -11,6 +11,8 @@
import os
import sys
import warnings
+
+warnings.simplefilter("default")
import argparse
import logging
import pickle
@@ -26,6 +28,7 @@ logger = bb.msg.logger_create(myname)
is_dump = myname == 'bitbake-dumpsig'
+
def find_siginfo(tinfoil, pn, taskname, sigs=None):
result = None
tinfoil.set_event_mask(['bb.event.FindSigInfoResult',
@@ -51,6 +54,7 @@ def find_siginfo(tinfoil, pn, taskname, sigs=None):
sys.exit(2)
return result
+
def find_siginfo_task(bbhandler, pn, taskname, sig1=None, sig2=None):
""" Find the most recent signature files for the specified PN/task """
@@ -59,22 +63,25 @@ def find_siginfo_task(bbhandler, pn, taskname, sig1=None, sig2=None):
if sig1 and sig2:
sigfiles = find_siginfo(bbhandler, pn, taskname, [sig1, sig2])
- if len(sigfiles) == 0:
+ if not sigfiles:
logger.error('No sigdata files found matching %s %s matching either %s or %s' % (pn, taskname, sig1, sig2))
sys.exit(1)
- elif not sig1 in sigfiles:
+ elif sig1 not in sigfiles:
logger.error('No sigdata files found matching %s %s with signature %s' % (pn, taskname, sig1))
sys.exit(1)
- elif not sig2 in sigfiles:
+ elif sig2 not in sigfiles:
logger.error('No sigdata files found matching %s %s with signature %s' % (pn, taskname, sig2))
sys.exit(1)
- latestfiles = [sigfiles[sig1], sigfiles[sig2]]
else:
- filedates = find_siginfo(bbhandler, pn, taskname)
- latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-2:]
- if not latestfiles:
+ sigfiles = find_siginfo(bbhandler, pn, taskname)
+ latestsigs = sorted(sigfiles.keys(), key=lambda h: sigfiles[h]['time'])[-2:]
+ if not latestsigs:
logger.error('No sigdata files found matching %s %s' % (pn, taskname))
sys.exit(1)
+ sig1 = latestsigs[0]
+ sig2 = latestsigs[1]
+
+ latestfiles = [sigfiles[sig1]['path'], sigfiles[sig2]['path']]
return latestfiles
@@ -85,14 +92,14 @@ def recursecb(key, hash1, hash2):
hashfiles = find_siginfo(tinfoil, key, None, hashes)
recout = []
- if len(hashfiles) == 0:
+ if not hashfiles:
recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2))
- elif not hash1 in hashfiles:
+ elif hash1 not in hashfiles:
recout.append("Unable to find matching sigdata for %s with hash %s" % (key, hash1))
- elif not hash2 in hashfiles:
+ elif hash2 not in hashfiles:
recout.append("Unable to find matching sigdata for %s with hash %s" % (key, hash2))
else:
- out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb, color=color)
+ out2 = bb.siggen.compare_sigfiles(hashfiles[hash1]['path'], hashfiles[hash2]['path'], recursecb, color=color)
for change in out2:
for line in change.splitlines():
recout.append(' ' + line)
@@ -109,36 +116,36 @@ parser.add_argument('-D', '--debug',
if is_dump:
parser.add_argument("-t", "--task",
- help="find the signature data file for the last run of the specified task",
- action="store", dest="taskargs", nargs=2, metavar=('recipename', 'taskname'))
+ help="find the signature data file for the last run of the specified task",
+ action="store", dest="taskargs", nargs=2, metavar=('recipename', 'taskname'))
parser.add_argument("sigdatafile1",
- help="Signature file to dump. Not used when using -t/--task.",
- action="store", nargs='?', metavar="sigdatafile")
+ help="Signature file to dump. Not used when using -t/--task.",
+ action="store", nargs='?', metavar="sigdatafile")
else:
parser.add_argument('-c', '--color',
- help='Colorize the output (where %(metavar)s is %(choices)s)',
- choices=['auto', 'always', 'never'], default='auto', metavar='color')
+ help='Colorize the output (where %(metavar)s is %(choices)s)',
+ choices=['auto', 'always', 'never'], default='auto', metavar='color')
parser.add_argument('-d', '--dump',
- help='Dump the last signature data instead of comparing (equivalent to using bitbake-dumpsig)',
- action='store_true')
+ help='Dump the last signature data instead of comparing (equivalent to using bitbake-dumpsig)',
+ action='store_true')
parser.add_argument("-t", "--task",
- help="find the signature data files for the last two runs of the specified task and compare them",
- action="store", dest="taskargs", nargs=2, metavar=('recipename', 'taskname'))
+ help="find the signature data files for the last two runs of the specified task and compare them",
+ action="store", dest="taskargs", nargs=2, metavar=('recipename', 'taskname'))
parser.add_argument("-s", "--signature",
- help="With -t/--task, specify the signatures to look for instead of taking the last two",
- action="store", dest="sigargs", nargs=2, metavar=('fromsig', 'tosig'))
+ help="With -t/--task, specify the signatures to look for instead of taking the last two",
+ action="store", dest="sigargs", nargs=2, metavar=('fromsig', 'tosig'))
parser.add_argument("sigdatafile1",
- help="First signature file to compare (or signature file to dump, if second not specified). Not used when using -t/--task.",
- action="store", nargs='?')
+ help="First signature file to compare (or signature file to dump, if second not specified). Not used when using -t/--task.",
+ action="store", nargs='?')
parser.add_argument("sigdatafile2",
- help="Second signature file to compare",
- action="store", nargs='?')
+ help="Second signature file to compare",
+ action="store", nargs='?')
options = parser.parse_args()
if is_dump:
@@ -156,7 +163,8 @@ if options.taskargs:
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.prepare(config_only=True)
if not options.dump and options.sigargs:
- files = find_siginfo_task(tinfoil, options.taskargs[0], options.taskargs[1], options.sigargs[0], options.sigargs[1])
+ files = find_siginfo_task(tinfoil, options.taskargs[0], options.taskargs[1], options.sigargs[0],
+ options.sigargs[1])
else:
files = find_siginfo_task(tinfoil, options.taskargs[0], options.taskargs[1])
@@ -165,7 +173,8 @@ if options.taskargs:
output = bb.siggen.dump_sigfile(files[-1])
else:
if len(files) < 2:
- logger.error('Only one matching sigdata file found for the specified task (%s %s)' % (options.taskargs[0], options.taskargs[1]))
+ logger.error('Only one matching sigdata file found for the specified task (%s %s)' % (
+ options.taskargs[0], options.taskargs[1]))
sys.exit(1)
# Recurse into signature comparison
diff --git a/bin/bitbake-getvar b/bin/bitbake-getvar
index 942321925..8901f99ae 100755
--- a/bin/bitbake-getvar
+++ b/bin/bitbake-getvar
@@ -9,6 +9,8 @@ import argparse
import io
import os
import sys
+import warnings
+warnings.simplefilter("default")
bindir = os.path.dirname(__file__)
topdir = os.path.dirname(bindir)
@@ -23,26 +25,36 @@ if __name__ == "__main__":
parser.add_argument('-u', '--unexpand', help='Do not expand the value (with --value)', action="store_true")
parser.add_argument('-f', '--flag', help='Specify a variable flag to query (with --value)', default=None)
parser.add_argument('--value', help='Only report the value, no history and no variable name', action="store_true")
+ parser.add_argument('-q', '--quiet', help='Silence bitbake server logging', action="store_true")
+ parser.add_argument('--ignore-undefined', help='Suppress any errors related to undefined variables', action="store_true")
args = parser.parse_args()
- if args.unexpand and not args.value:
- print("--unexpand only makes sense with --value")
- sys.exit(1)
+ if not args.value:
+ if args.unexpand:
+ sys.exit("--unexpand only makes sense with --value")
- if args.flag and not args.value:
- print("--flag only makes sense with --value")
- sys.exit(1)
+ if args.flag:
+ sys.exit("--flag only makes sense with --value")
- with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
+ quiet = args.quiet or args.value
+ with bb.tinfoil.Tinfoil(tracking=True, setup_logging=not quiet) as tinfoil:
if args.recipe:
- tinfoil.prepare(quiet=2)
+ tinfoil.prepare(quiet=3 if quiet else 2)
d = tinfoil.parse_recipe(args.recipe)
else:
tinfoil.prepare(quiet=2, config_only=True)
d = tinfoil.config_data
+
+ value = None
if args.flag:
- print(str(d.getVarFlag(args.variable, args.flag, expand=(not args.unexpand))))
- elif args.value:
- print(str(d.getVar(args.variable, expand=(not args.unexpand))))
+ value = d.getVarFlag(args.variable, args.flag, expand=not args.unexpand)
+ if value is None and not args.ignore_undefined:
+ sys.exit(f"The flag '{args.flag}' is not defined for variable '{args.variable}'")
+ else:
+ value = d.getVar(args.variable, expand=not args.unexpand)
+ if value is None and not args.ignore_undefined:
+ sys.exit(f"The variable '{args.variable}' is not defined")
+ if args.value:
+ print(str(value if value is not None else ""))
else:
bb.data.emit_var(args.variable, d=d, all=True)
diff --git a/bin/bitbake-hashclient b/bin/bitbake-hashclient
index a89290217..610787ed2 100755
--- a/bin/bitbake-hashclient
+++ b/bin/bitbake-hashclient
@@ -13,6 +13,10 @@ import pprint
import sys
import threading
import time
+import warnings
+import netrc
+import json
+warnings.simplefilter("default")
try:
import tqdm
@@ -34,18 +38,42 @@ except ImportError:
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib'))
import hashserv
+import bb.asyncrpc
DEFAULT_ADDRESS = 'unix://./hashserve.sock'
METHOD = 'stress.test.method'
+def print_user(u):
+ print(f"Username: {u['username']}")
+ if "permissions" in u:
+ print("Permissions: " + " ".join(u["permissions"]))
+ if "token" in u:
+ print(f"Token: {u['token']}")
+
def main():
+ def handle_get(args, client):
+ result = client.get_taskhash(args.method, args.taskhash, all_properties=True)
+ if not result:
+ return 0
+
+ print(json.dumps(result, sort_keys=True, indent=4))
+ return 0
+
+ def handle_get_outhash(args, client):
+ result = client.get_outhash(args.method, args.outhash, args.taskhash)
+ if not result:
+ return 0
+
+ print(json.dumps(result, sort_keys=True, indent=4))
+ return 0
+
def handle_stats(args, client):
if args.reset:
s = client.reset_stats()
else:
s = client.get_stats()
- pprint.pprint(s)
+ print(json.dumps(s, sort_keys=True, indent=4))
return 0
def handle_stress(args, client):
@@ -54,25 +82,24 @@ def main():
nonlocal missed_hashes
nonlocal max_time
- client = hashserv.create_client(args.address)
-
- for i in range(args.requests):
- taskhash = hashlib.sha256()
- taskhash.update(args.taskhash_seed.encode('utf-8'))
- taskhash.update(str(i).encode('utf-8'))
+ with hashserv.create_client(args.address) as client:
+ for i in range(args.requests):
+ taskhash = hashlib.sha256()
+ taskhash.update(args.taskhash_seed.encode('utf-8'))
+ taskhash.update(str(i).encode('utf-8'))
- start_time = time.perf_counter()
- l = client.get_unihash(METHOD, taskhash.hexdigest())
- elapsed = time.perf_counter() - start_time
+ start_time = time.perf_counter()
+ l = client.get_unihash(METHOD, taskhash.hexdigest())
+ elapsed = time.perf_counter() - start_time
- with lock:
- if l:
- found_hashes += 1
- else:
- missed_hashes += 1
+ with lock:
+ if l:
+ found_hashes += 1
+ else:
+ missed_hashes += 1
- max_time = max(elapsed, max_time)
- pbar.update()
+ max_time = max(elapsed, max_time)
+ pbar.update()
max_time = 0
found_hashes = 0
@@ -111,12 +138,114 @@ def main():
with lock:
pbar.update()
+ def handle_remove(args, client):
+ where = {k: v for k, v in args.where}
+ if where:
+ result = client.remove(where)
+ print("Removed %d row(s)" % (result["count"]))
+ else:
+ print("No query specified")
+
+ def handle_clean_unused(args, client):
+ result = client.clean_unused(args.max_age)
+ print("Removed %d rows" % (result["count"]))
+ return 0
+
+ def handle_refresh_token(args, client):
+ r = client.refresh_token(args.username)
+ print_user(r)
+
+ def handle_set_user_permissions(args, client):
+ r = client.set_user_perms(args.username, args.permissions)
+ print_user(r)
+
+ def handle_get_user(args, client):
+ r = client.get_user(args.username)
+ print_user(r)
+
+ def handle_get_all_users(args, client):
+ users = client.get_all_users()
+ print("{username:20}| {permissions}".format(username="Username", permissions="Permissions"))
+ print(("-" * 20) + "+" + ("-" * 20))
+ for u in users:
+ print("{username:20}| {permissions}".format(username=u["username"], permissions=" ".join(u["permissions"])))
+
+ def handle_new_user(args, client):
+ r = client.new_user(args.username, args.permissions)
+ print_user(r)
+
+ def handle_delete_user(args, client):
+ r = client.delete_user(args.username)
+ print_user(r)
+
+ def handle_get_db_usage(args, client):
+ usage = client.get_db_usage()
+ print(usage)
+ tables = sorted(usage.keys())
+ print("{name:20}| {rows:20}".format(name="Table name", rows="Rows"))
+ print(("-" * 20) + "+" + ("-" * 20))
+ for t in tables:
+ print("{name:20}| {rows:<20}".format(name=t, rows=usage[t]["rows"]))
+ print()
+
+ total_rows = sum(t["rows"] for t in usage.values())
+ print(f"Total rows: {total_rows}")
+
+ def handle_get_db_query_columns(args, client):
+ columns = client.get_db_query_columns()
+ print("\n".join(sorted(columns)))
+
+ def handle_gc_status(args, client):
+ result = client.gc_status()
+ if not result["mark"]:
+ print("No Garbage collection in progress")
+ return 0
+
+ print("Current Mark: %s" % result["mark"])
+ print("Total hashes to keep: %d" % result["keep"])
+ print("Total hashes to remove: %s" % result["remove"])
+ return 0
+
+ def handle_gc_mark(args, client):
+ where = {k: v for k, v in args.where}
+ result = client.gc_mark(args.mark, where)
+ print("New hashes marked: %d" % result["count"])
+ return 0
+
+ def handle_gc_sweep(args, client):
+ result = client.gc_sweep(args.mark)
+ print("Removed %d rows" % result["count"])
+ return 0
+
+ def handle_unihash_exists(args, client):
+ result = client.unihash_exists(args.unihash)
+ if args.quiet:
+ return 0 if result else 1
+
+ print("true" if result else "false")
+ return 0
+
parser = argparse.ArgumentParser(description='Hash Equivalence Client')
parser.add_argument('--address', default=DEFAULT_ADDRESS, help='Server address (default "%(default)s")')
parser.add_argument('--log', default='WARNING', help='Set logging level')
+ parser.add_argument('--login', '-l', metavar="USERNAME", help="Authenticate as USERNAME")
+ parser.add_argument('--password', '-p', metavar="TOKEN", help="Authenticate using token TOKEN")
+ parser.add_argument('--become', '-b', metavar="USERNAME", help="Impersonate user USERNAME (if allowed) when performing actions")
+ parser.add_argument('--no-netrc', '-n', action="store_false", dest="netrc", help="Do not use .netrc")
subparsers = parser.add_subparsers()
+ get_parser = subparsers.add_parser('get', help="Get the unihash for a taskhash")
+ get_parser.add_argument("method", help="Method to query")
+ get_parser.add_argument("taskhash", help="Task hash to query")
+ get_parser.set_defaults(func=handle_get)
+
+ get_outhash_parser = subparsers.add_parser('get-outhash', help="Get output hash information")
+ get_outhash_parser.add_argument("method", help="Method to query")
+ get_outhash_parser.add_argument("outhash", help="Output hash to query")
+ get_outhash_parser.add_argument("taskhash", help="Task hash to query")
+ get_outhash_parser.set_defaults(func=handle_get_outhash)
+
stats_parser = subparsers.add_parser('stats', help='Show server stats')
stats_parser.add_argument('--reset', action='store_true',
help='Reset server stats')
@@ -135,6 +264,64 @@ def main():
help='Include string in outhash')
stress_parser.set_defaults(func=handle_stress)
+ remove_parser = subparsers.add_parser('remove', help="Remove hash entries")
+ remove_parser.add_argument("--where", "-w", metavar="KEY VALUE", nargs=2, action="append", default=[],
+ help="Remove entries from table where KEY == VALUE")
+ remove_parser.set_defaults(func=handle_remove)
+
+ clean_unused_parser = subparsers.add_parser('clean-unused', help="Remove unused database entries")
+ clean_unused_parser.add_argument("max_age", metavar="SECONDS", type=int, help="Remove unused entries older than SECONDS old")
+ clean_unused_parser.set_defaults(func=handle_clean_unused)
+
+ refresh_token_parser = subparsers.add_parser('refresh-token', help="Refresh auth token")
+ refresh_token_parser.add_argument("--username", "-u", help="Refresh the token for another user (if authorized)")
+ refresh_token_parser.set_defaults(func=handle_refresh_token)
+
+ set_user_perms_parser = subparsers.add_parser('set-user-perms', help="Set new permissions for user")
+ set_user_perms_parser.add_argument("--username", "-u", help="Username", required=True)
+ set_user_perms_parser.add_argument("permissions", metavar="PERM", nargs="*", default=[], help="New permissions")
+ set_user_perms_parser.set_defaults(func=handle_set_user_permissions)
+
+ get_user_parser = subparsers.add_parser('get-user', help="Get user")
+ get_user_parser.add_argument("--username", "-u", help="Username")
+ get_user_parser.set_defaults(func=handle_get_user)
+
+ get_all_users_parser = subparsers.add_parser('get-all-users', help="List all users")
+ get_all_users_parser.set_defaults(func=handle_get_all_users)
+
+ new_user_parser = subparsers.add_parser('new-user', help="Create new user")
+ new_user_parser.add_argument("--username", "-u", help="Username", required=True)
+ new_user_parser.add_argument("permissions", metavar="PERM", nargs="*", default=[], help="New permissions")
+ new_user_parser.set_defaults(func=handle_new_user)
+
+ delete_user_parser = subparsers.add_parser('delete-user', help="Delete user")
+ delete_user_parser.add_argument("--username", "-u", help="Username", required=True)
+ delete_user_parser.set_defaults(func=handle_delete_user)
+
+ db_usage_parser = subparsers.add_parser('get-db-usage', help="Database Usage")
+ db_usage_parser.set_defaults(func=handle_get_db_usage)
+
+ db_query_columns_parser = subparsers.add_parser('get-db-query-columns', help="Show columns that can be used in database queries")
+ db_query_columns_parser.set_defaults(func=handle_get_db_query_columns)
+
+ gc_status_parser = subparsers.add_parser("gc-status", help="Show garbage collection status")
+ gc_status_parser.set_defaults(func=handle_gc_status)
+
+ gc_mark_parser = subparsers.add_parser('gc-mark', help="Mark hashes to be kept for garbage collection")
+ gc_mark_parser.add_argument("mark", help="Mark for this garbage collection operation")
+ gc_mark_parser.add_argument("--where", "-w", metavar="KEY VALUE", nargs=2, action="append", default=[],
+ help="Keep entries in table where KEY == VALUE")
+ gc_mark_parser.set_defaults(func=handle_gc_mark)
+
+ gc_sweep_parser = subparsers.add_parser('gc-sweep', help="Perform garbage collection and delete any entries that are not marked")
+ gc_sweep_parser.add_argument("mark", help="Mark for this garbage collection operation")
+ gc_sweep_parser.set_defaults(func=handle_gc_sweep)
+
+ unihash_exists_parser = subparsers.add_parser('unihash-exists', help="Check if a unihash is known to the server")
+ unihash_exists_parser.add_argument("--quiet", action="store_true", help="Don't print status. Instead, exit with 0 if unihash exists and 1 if it does not")
+ unihash_exists_parser.add_argument("unihash", help="Unihash to check")
+ unihash_exists_parser.set_defaults(func=handle_unihash_exists)
+
args = parser.parse_args()
logger = logging.getLogger('hashserv')
@@ -148,11 +335,30 @@ def main():
console.setLevel(level)
logger.addHandler(console)
+ login = args.login
+ password = args.password
+
+ if login is None and args.netrc:
+ try:
+ n = netrc.netrc()
+ auth = n.authenticators(args.address)
+ if auth is not None:
+ login, _, password = auth
+ except FileNotFoundError:
+ pass
+ except netrc.NetrcParseError as e:
+ sys.stderr.write(f"Error parsing {e.filename}:{e.lineno}: {e.msg}\n")
+
func = getattr(args, 'func', None)
if func:
- client = hashserv.create_client(args.address)
-
- return func(args, client)
+ try:
+ with hashserv.create_client(args.address, login, password) as client:
+ if args.become:
+ client.become_user(args.become)
+ return func(args, client)
+ except bb.asyncrpc.InvokeError as e:
+ print(f"ERROR: {e}")
+ return 1
return 0
diff --git a/bin/bitbake-hashserv b/bin/bitbake-hashserv
index 153f65a37..4bfb7abfb 100755
--- a/bin/bitbake-hashserv
+++ b/bin/bitbake-hashserv
@@ -10,55 +10,162 @@ import sys
import logging
import argparse
import sqlite3
+import warnings
-sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib'))
+warnings.simplefilter("default")
+
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), "lib"))
import hashserv
+from hashserv.server import DEFAULT_ANON_PERMS
VERSION = "1.0.0"
-DEFAULT_BIND = 'unix://./hashserve.sock'
+DEFAULT_BIND = "unix://./hashserve.sock"
def main():
- parser = argparse.ArgumentParser(description='Hash Equivalence Reference Server. Version=%s' % VERSION,
- epilog='''The bind address is the path to a unix domain socket if it is
- prefixed with "unix://". Otherwise, it is an IP address
- and port in form ADDRESS:PORT. To bind to all addresses, leave
- the ADDRESS empty, e.g. "--bind :8686". To bind to a specific
- IPv6 address, enclose the address in "[]", e.g.
- "--bind [::1]:8686"'''
- )
-
- parser.add_argument('-b', '--bind', default=DEFAULT_BIND, help='Bind address (default "%(default)s")')
- parser.add_argument('-d', '--database', default='./hashserv.db', help='Database file (default "%(default)s")')
- parser.add_argument('-l', '--log', default='WARNING', help='Set logging level')
- parser.add_argument('-u', '--upstream', help='Upstream hashserv to pull hashes from')
- parser.add_argument('-r', '--read-only', action='store_true', help='Disallow write operations from clients')
+ parser = argparse.ArgumentParser(
+ description="Hash Equivalence Reference Server. Version=%s" % VERSION,
+ formatter_class=argparse.RawTextHelpFormatter,
+ epilog="""
+The bind address may take one of the following formats:
+ unix://PATH - Bind to unix domain socket at PATH
+ ws://ADDRESS:PORT - Bind to websocket on ADDRESS:PORT
+ ADDRESS:PORT - Bind to raw TCP socket on ADDRESS:PORT
+
+To bind to all addresses, leave the ADDRESS empty, e.g. "--bind :8686" or
+"--bind ws://:8686". To bind to a specific IPv6 address, enclose the address in
+"[]", e.g. "--bind [::1]:8686" or "--bind ws://[::1]:8686"
+
+Note that the default Anonymous permissions are designed to not break existing
+server instances when upgrading, but are not particularly secure defaults. If
+you want to use authentication, it is recommended that you use "--anon-perms
+@read" to only give anonymous users read access, or "--anon-perms @none" to
+give un-authenticated users no access at all.
+
+Setting "--anon-perms @all" or "--anon-perms @user-admin" is not allowed, since
+this would allow anonymous users to manage all users accounts, which is a bad
+idea.
+
+If you are using user authentication, you should run your server in websockets
+mode with an SSL terminating load balancer in front of it (as this server does
+not implement SSL). Otherwise all usernames and passwords will be transmitted
+in the clear. When configured this way, clients can connect using a secure
+websocket, as in "wss://SERVER:PORT"
+
+The following permissions are supported by the server:
+
+ @none - No permissions
+ @read - The ability to read equivalent hashes from the server
+ @report - The ability to report equivalent hashes to the server
+ @db-admin - Manage the hash database(s). This includes cleaning the
+ database, removing hashes, etc.
+ @user-admin - The ability to manage user accounts. This includes, creating
+ users, deleting users, resetting login tokens, and assigning
+ permissions.
+ @all - All possible permissions, including any that may be added
+ in the future
+ """,
+ )
+
+ parser.add_argument(
+ "-b",
+ "--bind",
+ default=os.environ.get("HASHSERVER_BIND", DEFAULT_BIND),
+ help='Bind address (default $HASHSERVER_BIND, "%(default)s")',
+ )
+ parser.add_argument(
+ "-d",
+ "--database",
+ default=os.environ.get("HASHSERVER_DB", "./hashserv.db"),
+ help='Database file (default $HASHSERVER_DB, "%(default)s")',
+ )
+ parser.add_argument(
+ "-l",
+ "--log",
+ default=os.environ.get("HASHSERVER_LOG_LEVEL", "WARNING"),
+ help='Set logging level (default $HASHSERVER_LOG_LEVEL, "%(default)s")',
+ )
+ parser.add_argument(
+ "-u",
+ "--upstream",
+ default=os.environ.get("HASHSERVER_UPSTREAM", None),
+ help="Upstream hashserv to pull hashes from ($HASHSERVER_UPSTREAM)",
+ )
+ parser.add_argument(
+ "-r",
+ "--read-only",
+ action="store_true",
+ help="Disallow write operations from clients ($HASHSERVER_READ_ONLY)",
+ )
+ parser.add_argument(
+ "--db-username",
+ default=os.environ.get("HASHSERVER_DB_USERNAME", None),
+ help="Database username ($HASHSERVER_DB_USERNAME)",
+ )
+ parser.add_argument(
+ "--db-password",
+ default=os.environ.get("HASHSERVER_DB_PASSWORD", None),
+ help="Database password ($HASHSERVER_DB_PASSWORD)",
+ )
+ parser.add_argument(
+ "--anon-perms",
+ metavar="PERM[,PERM[,...]]",
+ default=os.environ.get("HASHSERVER_ANON_PERMS", ",".join(DEFAULT_ANON_PERMS)),
+ help='Permissions to give anonymous users (default $HASHSERVER_ANON_PERMS, "%(default)s")',
+ )
+ parser.add_argument(
+ "--admin-user",
+ default=os.environ.get("HASHSERVER_ADMIN_USER", None),
+ help="Create default admin user with name ADMIN_USER ($HASHSERVER_ADMIN_USER)",
+ )
+ parser.add_argument(
+ "--admin-password",
+ default=os.environ.get("HASHSERVER_ADMIN_PASSWORD", None),
+ help="Create default admin user with password ADMIN_PASSWORD ($HASHSERVER_ADMIN_PASSWORD)",
+ )
args = parser.parse_args()
- logger = logging.getLogger('hashserv')
+ logger = logging.getLogger("hashserv")
level = getattr(logging, args.log.upper(), None)
if not isinstance(level, int):
- raise ValueError('Invalid log level: %s' % args.log)
+ raise ValueError("Invalid log level: %s (Try ERROR/WARNING/INFO/DEBUG)" % args.log)
logger.setLevel(level)
console = logging.StreamHandler()
console.setLevel(level)
logger.addHandler(console)
- server = hashserv.create_server(args.bind, args.database, upstream=args.upstream, read_only=args.read_only)
+ read_only = (os.environ.get("HASHSERVER_READ_ONLY", "0") == "1") or args.read_only
+ if "," in args.anon_perms:
+ anon_perms = args.anon_perms.split(",")
+ else:
+ anon_perms = args.anon_perms.split()
+
+ server = hashserv.create_server(
+ args.bind,
+ args.database,
+ upstream=args.upstream,
+ read_only=read_only,
+ db_username=args.db_username,
+ db_password=args.db_password,
+ anon_perms=anon_perms,
+ admin_username=args.admin_user,
+ admin_password=args.admin_password,
+ )
server.serve_forever()
return 0
-if __name__ == '__main__':
+if __name__ == "__main__":
try:
ret = main()
except Exception:
ret = 1
import traceback
+
traceback.print_exc()
sys.exit(ret)
diff --git a/bin/bitbake-layers b/bin/bitbake-layers
index ff085d674..d4b1d1aaf 100755
--- a/bin/bitbake-layers
+++ b/bin/bitbake-layers
@@ -14,6 +14,8 @@ import logging
import os
import sys
import argparse
+import warnings
+warnings.simplefilter("default")
bindir = os.path.dirname(__file__)
topdir = os.path.dirname(bindir)
@@ -66,11 +68,11 @@ def main():
registered = False
for plugin in plugins:
+ if hasattr(plugin, 'tinfoil_init'):
+ plugin.tinfoil_init(tinfoil)
if hasattr(plugin, 'register_commands'):
registered = True
plugin.register_commands(subparsers)
- if hasattr(plugin, 'tinfoil_init'):
- plugin.tinfoil_init(tinfoil)
if not registered:
logger.error("No commands registered - missing plugins?")
diff --git a/bin/bitbake-prserv b/bin/bitbake-prserv
index 1e9b6cbc1..ad0a06940 100755
--- a/bin/bitbake-prserv
+++ b/bin/bitbake-prserv
@@ -1,49 +1,83 @@
#!/usr/bin/env python3
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
import os
import sys,logging
-import optparse
+import argparse
+import warnings
+warnings.simplefilter("default")
-sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),'lib'))
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), "lib"))
import prserv
import prserv.serv
-__version__="1.0.0"
+VERSION = "1.1.0"
-PRHOST_DEFAULT='0.0.0.0'
+PRHOST_DEFAULT="0.0.0.0"
PRPORT_DEFAULT=8585
def main():
- parser = optparse.OptionParser(
- version="Bitbake PR Service Core version %s, %%prog version %s" % (prserv.__version__, __version__),
- usage = "%prog < --start | --stop > [options]")
-
- parser.add_option("-f", "--file", help="database filename(default: prserv.sqlite3)", action="store",
- dest="dbfile", type="string", default="prserv.sqlite3")
- parser.add_option("-l", "--log", help="log filename(default: prserv.log)", action="store",
- dest="logfile", type="string", default="prserv.log")
- parser.add_option("--loglevel", help="logging level, i.e. CRITICAL, ERROR, WARNING, INFO, DEBUG",
- action = "store", type="string", dest="loglevel", default = "INFO")
- parser.add_option("--start", help="start daemon",
- action="store_true", dest="start")
- parser.add_option("--stop", help="stop daemon",
- action="store_true", dest="stop")
- parser.add_option("--host", help="ip address to bind", action="store",
- dest="host", type="string", default=PRHOST_DEFAULT)
- parser.add_option("--port", help="port number(default: 8585)", action="store",
- dest="port", type="int", default=PRPORT_DEFAULT)
-
- options, args = parser.parse_args(sys.argv)
- prserv.init_logger(os.path.abspath(options.logfile),options.loglevel)
-
- if options.start:
- ret=prserv.serv.start_daemon(options.dbfile, options.host, options.port,os.path.abspath(options.logfile))
- elif options.stop:
- ret=prserv.serv.stop_daemon(options.host, options.port)
+ parser = argparse.ArgumentParser(
+ description="BitBake PR Server. Version=%s" % VERSION,
+ formatter_class=argparse.RawTextHelpFormatter)
+
+ parser.add_argument(
+ "-f",
+ "--file",
+ default="prserv.sqlite3",
+ help="database filename (default: prserv.sqlite3)",
+ )
+ parser.add_argument(
+ "-l",
+ "--log",
+ default="prserv.log",
+ help="log filename(default: prserv.log)",
+ )
+ parser.add_argument(
+ "--loglevel",
+ default="INFO",
+ help="logging level, i.e. CRITICAL, ERROR, WARNING, INFO, DEBUG",
+ )
+ parser.add_argument(
+ "--start",
+ action="store_true",
+ help="start daemon",
+ )
+ parser.add_argument(
+ "--stop",
+ action="store_true",
+ help="stop daemon",
+ )
+ parser.add_argument(
+ "--host",
+ help="ip address to bind",
+ default=PRHOST_DEFAULT,
+ )
+ parser.add_argument(
+ "--port",
+ type=int,
+ default=PRPORT_DEFAULT,
+ help="port number (default: 8585)",
+ )
+ parser.add_argument(
+ "-r",
+ "--read-only",
+ action="store_true",
+ help="open database in read-only mode",
+ )
+
+ args = parser.parse_args()
+ prserv.init_logger(os.path.abspath(args.log), args.loglevel)
+
+ if args.start:
+ ret=prserv.serv.start_daemon(args.file, args.host, args.port, os.path.abspath(args.log), args.read_only)
+ elif args.stop:
+ ret=prserv.serv.stop_daemon(args.host, args.port)
else:
ret=parser.print_help()
return ret
diff --git a/bin/bitbake-selftest b/bin/bitbake-selftest
index 6c0737416..f25f23b1a 100755
--- a/bin/bitbake-selftest
+++ b/bin/bitbake-selftest
@@ -7,6 +7,8 @@
import os
import sys, logging
+import warnings
+warnings.simplefilter("default")
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib'))
import unittest
@@ -29,6 +31,7 @@ tests = ["bb.tests.codeparser",
"bb.tests.runqueue",
"bb.tests.siggen",
"bb.tests.utils",
+ "bb.tests.compression",
"hashserv.tests",
"layerindexlib.tests.layerindexobj",
"layerindexlib.tests.restapi",
diff --git a/bin/bitbake-server b/bin/bitbake-server
index 8e5394736..454a3919a 100755
--- a/bin/bitbake-server
+++ b/bin/bitbake-server
@@ -8,14 +8,16 @@
import os
import sys
import warnings
+warnings.simplefilter("default")
import logging
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
-if sys.getfilesystemencoding() != "utf-8":
- sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\nPython can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.")
+import bb
+
+bb.utils.check_system_locale()
# Users shouldn't be running this code directly
-if len(sys.argv) != 10 or not sys.argv[1].startswith("decafbad"):
+if len(sys.argv) != 11 or not sys.argv[1].startswith("decafbad"):
print("bitbake-server is meant for internal execution by bitbake itself, please don't use it standalone.")
sys.exit(1)
@@ -27,7 +29,8 @@ logfile = sys.argv[4]
lockname = sys.argv[5]
sockname = sys.argv[6]
timeout = float(sys.argv[7])
-xmlrpcinterface = (sys.argv[8], int(sys.argv[9]))
+profile = bool(int(sys.argv[8]))
+xmlrpcinterface = (sys.argv[9], int(sys.argv[10]))
if xmlrpcinterface[0] == "None":
xmlrpcinterface = (None, xmlrpcinterface[1])
@@ -48,5 +51,5 @@ logger = logging.getLogger("BitBake")
handler = bb.event.LogHandler()
logger.addHandler(handler)
-bb.server.process.execServer(lockfd, readypipeinfd, lockname, sockname, timeout, xmlrpcinterface)
+bb.server.process.execServer(lockfd, readypipeinfd, lockname, sockname, timeout, xmlrpcinterface, profile)
diff --git a/bin/bitbake-worker b/bin/bitbake-worker
index 7765b9368..e8073f2ac 100755
--- a/bin/bitbake-worker
+++ b/bin/bitbake-worker
@@ -1,11 +1,14 @@
#!/usr/bin/env python3
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
import os
import sys
import warnings
+warnings.simplefilter("default")
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
from bb import fetch2
import logging
@@ -16,11 +19,12 @@ import signal
import pickle
import traceback
import queue
+import shlex
+import subprocess
from multiprocessing import Lock
from threading import Thread
-if sys.getfilesystemencoding() != "utf-8":
- sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\nPython can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.")
+bb.utils.check_system_locale()
# Users shouldn't be running this code directly
if len(sys.argv) != 2 or not sys.argv[1].startswith("decafbad"):
@@ -87,19 +91,19 @@ def worker_fire_prepickled(event):
worker_thread_exit = False
def worker_flush(worker_queue):
- worker_queue_int = b""
+ worker_queue_int = bytearray()
global worker_pipe, worker_thread_exit
while True:
try:
- worker_queue_int = worker_queue_int + worker_queue.get(True, 1)
+ worker_queue_int.extend(worker_queue.get(True, 1))
except queue.Empty:
pass
while (worker_queue_int or not worker_queue.empty()):
try:
(_, ready, _) = select.select([], [worker_pipe], [], 1)
if not worker_queue.empty():
- worker_queue_int = worker_queue_int + worker_queue.get()
+ worker_queue_int.extend(worker_queue.get())
written = os.write(worker_pipe, worker_queue_int)
worker_queue_int = worker_queue_int[written:]
except (IOError, OSError) as e:
@@ -117,11 +121,10 @@ def worker_child_fire(event, d):
data = b"<event>" + pickle.dumps(event) + b"</event>"
try:
- worker_pipe_lock.acquire()
- while(len(data)):
- written = worker_pipe.write(data)
- data = data[written:]
- worker_pipe_lock.release()
+ with bb.utils.lock_timeout(worker_pipe_lock):
+ while(len(data)):
+ written = worker_pipe.write(data)
+ data = data[written:]
except IOError:
sigterm_handler(None, None)
raise
@@ -140,15 +143,29 @@ def sigterm_handler(signum, frame):
os.killpg(0, signal.SIGTERM)
sys.exit()
-def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskhash, unihash, appends, taskdepdata, extraconfigdata, quieterrors=False, dry_run_exec=False):
+def fork_off_task(cfg, data, databuilder, workerdata, extraconfigdata, runtask):
+
+ fn = runtask['fn']
+ task = runtask['task']
+ taskname = runtask['taskname']
+ taskhash = runtask['taskhash']
+ unihash = runtask['unihash']
+ appends = runtask['appends']
+ layername = runtask['layername']
+ taskdepdata = runtask['taskdepdata']
+ quieterrors = runtask['quieterrors']
# We need to setup the environment BEFORE the fork, since
# a fork() or exec*() activates PSEUDO...
envbackup = {}
+ fakeroot = False
fakeenv = {}
umask = None
- taskdep = workerdata["taskdeps"][fn]
+ uid = os.getuid()
+ gid = os.getgid()
+
+ taskdep = runtask['taskdep']
if 'umask' in taskdep and taskname in taskdep['umask']:
umask = taskdep['umask'][taskname]
elif workerdata["umask"]:
@@ -160,24 +177,25 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
except TypeError:
pass
- dry_run = cfg.dry_run or dry_run_exec
+ dry_run = cfg.dry_run or runtask['dry_run']
# We can't use the fakeroot environment in a dry run as it possibly hasn't been built
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not dry_run:
- envvars = (workerdata["fakerootenv"][fn] or "").split()
- for key, value in (var.split('=') for var in envvars):
+ fakeroot = True
+ envvars = (runtask['fakerootenv'] or "").split()
+ for key, value in (var.split('=',1) for var in envvars):
envbackup[key] = os.environ.get(key)
os.environ[key] = value
fakeenv[key] = value
- fakedirs = (workerdata["fakerootdirs"][fn] or "").split()
+ fakedirs = (runtask['fakerootdirs'] or "").split()
for p in fakedirs:
bb.utils.mkdirhier(p)
logger.debug2('Running %s:%s under fakeroot, fakedirs: %s' %
(fn, taskname, ', '.join(fakedirs)))
else:
- envvars = (workerdata["fakerootnoenv"][fn] or "").split()
- for key, value in (var.split('=') for var in envvars):
+ envvars = (runtask['fakerootnoenv'] or "").split()
+ for key, value in (var.split('=',1) for var in envvars):
envbackup[key] = os.environ.get(key)
os.environ[key] = value
fakeenv[key] = value
@@ -219,19 +237,21 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
# Let SIGHUP exit as SIGTERM
signal.signal(signal.SIGHUP, sigterm_handler)
- # No stdin
- newsi = os.open(os.devnull, os.O_RDWR)
- os.dup2(newsi, sys.stdin.fileno())
+ # No stdin & stdout
+ # stdout is used as a status report channel and must not be used by child processes.
+ dumbio = os.open(os.devnull, os.O_RDWR)
+ os.dup2(dumbio, sys.stdin.fileno())
+ os.dup2(dumbio, sys.stdout.fileno())
- if umask:
+ if umask is not None:
os.umask(umask)
try:
- bb_cache = bb.cache.NoCache(databuilder)
(realfn, virtual, mc) = bb.cache.virtualfn2realfn(fn)
the_data = databuilder.mcdata[mc]
the_data.setVar("BB_WORKERCONTEXT", "1")
the_data.setVar("BB_TASKDEPDATA", taskdepdata)
+ the_data.setVar('BB_CURRENTTASK', taskname.replace("do_", ""))
if cfg.limited_deps:
the_data.setVar("BB_LIMITEDDEPS", "1")
the_data.setVar("BUILDNAME", workerdata["buildname"])
@@ -245,12 +265,20 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
bb.parse.siggen.set_taskhashes(workerdata["newhashes"])
ret = 0
- the_data = bb_cache.loadDataFull(fn, appends)
+ the_data = databuilder.parseRecipe(fn, appends, layername)
the_data.setVar('BB_TASKHASH', taskhash)
the_data.setVar('BB_UNIHASH', unihash)
+ bb.parse.siggen.setup_datacache_from_datastore(fn, the_data)
bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN"), taskname.replace("do_", "")))
+ if not bb.utils.to_boolean(the_data.getVarFlag(taskname, 'network')):
+ if bb.utils.is_local_uid(uid):
+ logger.debug("Attempting to disable network for %s" % taskname)
+ bb.utils.disable_network(uid, gid)
+ else:
+ logger.debug("Skipping disable network for %s since %s is not a local uid." % (taskname, uid))
+
# exported_vars() returns a generator which *cannot* be passed to os.environ.update()
# successfully. We also need to unset anything from the environment which shouldn't be there
exports = bb.data.exported_vars(the_data)
@@ -279,10 +307,20 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
if not quieterrors:
logger.critical(traceback.format_exc())
os._exit(1)
+
+ sys.stdout.flush()
+ sys.stderr.flush()
+
try:
if dry_run:
return 0
- return bb.build.exec_task(fn, taskname, the_data, cfg.profile)
+ try:
+ ret = bb.build.exec_task(fn, taskname, the_data, cfg.profile)
+ finally:
+ if fakeroot:
+ fakerootcmd = shlex.split(the_data.getVar("FAKEROOTCMD"))
+ subprocess.run(fakerootcmd + ['-S'], check=True, stdout=subprocess.PIPE)
+ return ret
except:
os._exit(1)
if not profiling:
@@ -314,12 +352,12 @@ class runQueueWorkerPipe():
if pipeout:
pipeout.close()
bb.utils.nonblockingfd(self.input)
- self.queue = b""
+ self.queue = bytearray()
def read(self):
start = len(self.queue)
try:
- self.queue = self.queue + (self.input.read(102400) or b"")
+ self.queue.extend(self.input.read(102400) or b"")
except (OSError, IOError) as e:
if e.errno != errno.EAGAIN:
raise
@@ -347,7 +385,7 @@ class BitbakeWorker(object):
def __init__(self, din):
self.input = din
bb.utils.nonblockingfd(self.input)
- self.queue = b""
+ self.queue = bytearray()
self.cookercfg = None
self.databuilder = None
self.data = None
@@ -381,7 +419,7 @@ class BitbakeWorker(object):
if len(r) == 0:
# EOF on pipe, server must have terminated
self.sigterm_exception(signal.SIGTERM, None)
- self.queue = self.queue + r
+ self.queue.extend(r)
except (OSError, IOError):
pass
if len(self.queue):
@@ -401,19 +439,35 @@ class BitbakeWorker(object):
while self.process_waitpid():
continue
-
def handle_item(self, item, func):
- if self.queue.startswith(b"<" + item + b">"):
- index = self.queue.find(b"</" + item + b">")
- while index != -1:
- func(self.queue[(len(item) + 2):index])
- self.queue = self.queue[(index + len(item) + 3):]
- index = self.queue.find(b"</" + item + b">")
+ opening_tag = b"<" + item + b">"
+ if not self.queue.startswith(opening_tag):
+ return
+
+ tag_len = len(opening_tag)
+ if len(self.queue) < tag_len + 4:
+ # we need to receive more data
+ return
+ header = self.queue[tag_len:tag_len + 4]
+ payload_len = int.from_bytes(header, 'big')
+ # closing tag has length (tag_len + 1)
+ if len(self.queue) < tag_len * 2 + 1 + payload_len:
+ # we need to receive more data
+ return
+
+ index = self.queue.find(b"</" + item + b">")
+ if index != -1:
+ try:
+ func(self.queue[(tag_len + 4):index])
+ except pickle.UnpicklingError:
+ workerlog_write("Unable to unpickle data: %s\n" % ":".join("{:02x}".format(c) for c in self.queue))
+ raise
+ self.queue = self.queue[(index + len(b"</") + len(item) + len(b">")):]
def handle_cookercfg(self, data):
self.cookercfg = pickle.loads(data)
self.databuilder = bb.cookerdata.CookerDataBuilder(self.cookercfg, worker=True)
- self.databuilder.parseBaseConfiguration()
+ self.databuilder.parseBaseConfiguration(worker=True)
self.data = self.databuilder.data
def handle_extraconfigdata(self, data):
@@ -428,6 +482,7 @@ class BitbakeWorker(object):
for mc in self.databuilder.mcdata:
self.databuilder.mcdata[mc].setVar("PRSERV_HOST", self.workerdata["prhost"])
self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.workerdata["hashservaddr"])
+ self.databuilder.mcdata[mc].setVar("__bbclasstype", "recipe")
def handle_newtaskhashes(self, data):
self.workerdata["newhashes"] = pickle.loads(data)
@@ -445,11 +500,15 @@ class BitbakeWorker(object):
sys.exit(0)
def handle_runtask(self, data):
- fn, task, taskname, taskhash, unihash, quieterrors, appends, taskdepdata, dry_run_exec = pickle.loads(data)
- workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname))
+ runtask = pickle.loads(data)
- pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, fn, task, taskname, taskhash, unihash, appends, taskdepdata, self.extraconfigdata, quieterrors, dry_run_exec)
+ fn = runtask['fn']
+ task = runtask['task']
+ taskname = runtask['taskname']
+ workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname))
+
+ pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, self.extraconfigdata, runtask)
self.build_pids[pid] = task
self.build_pipes[pid] = runQueueWorkerPipe(pipein, pipeout)
@@ -513,9 +572,11 @@ except BaseException as e:
import traceback
sys.stderr.write(traceback.format_exc())
sys.stderr.write(str(e))
+finally:
+ worker_thread_exit = True
+ worker_thread.join()
-worker_thread_exit = True
-worker_thread.join()
-
-workerlog_write("exitting")
+workerlog_write("exiting")
+if not normalexit:
+ sys.exit(1)
sys.exit(0)
diff --git a/bin/git-make-shallow b/bin/git-make-shallow
index 57069f7ed..9de557c10 100755
--- a/bin/git-make-shallow
+++ b/bin/git-make-shallow
@@ -1,5 +1,7 @@
#!/usr/bin/env python3
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -16,19 +18,23 @@ import itertools
import os
import subprocess
import sys
+import warnings
+warnings.simplefilter("default")
version = 1.0
+git_cmd = ['git', '-c', 'safe.bareRepository=all']
+
def main():
if sys.version_info < (3, 4, 0):
sys.exit('Python 3.4 or greater is required')
- git_dir = check_output(['git', 'rev-parse', '--git-dir']).rstrip()
+ git_dir = check_output(git_cmd + ['rev-parse', '--git-dir']).rstrip()
shallow_file = os.path.join(git_dir, 'shallow')
if os.path.exists(shallow_file):
try:
- check_output(['git', 'fetch', '--unshallow'])
+ check_output(git_cmd + ['fetch', '--unshallow'])
except subprocess.CalledProcessError:
try:
os.unlink(shallow_file)
@@ -37,21 +43,21 @@ def main():
raise
args = process_args()
- revs = check_output(['git', 'rev-list'] + args.revisions).splitlines()
+ revs = check_output(git_cmd + ['rev-list'] + args.revisions).splitlines()
make_shallow(shallow_file, args.revisions, args.refs)
- ref_revs = check_output(['git', 'rev-list'] + args.refs).splitlines()
+ ref_revs = check_output(git_cmd + ['rev-list'] + args.refs).splitlines()
remaining_history = set(revs) & set(ref_revs)
for rev in remaining_history:
- if check_output(['git', 'rev-parse', '{}^@'.format(rev)]):
+ if check_output(git_cmd + ['rev-parse', '{}^@'.format(rev)]):
sys.exit('Error: %s was not made shallow' % rev)
filter_refs(args.refs)
if args.shrink:
shrink_repo(git_dir)
- subprocess.check_call(['git', 'fsck', '--unreachable'])
+ subprocess.check_call(git_cmd + ['fsck', '--unreachable'])
def process_args():
@@ -68,12 +74,12 @@ def process_args():
args = parser.parse_args()
if args.refs:
- args.refs = check_output(['git', 'rev-parse', '--symbolic-full-name'] + args.refs).splitlines()
+ args.refs = check_output(git_cmd + ['rev-parse', '--symbolic-full-name'] + args.refs).splitlines()
else:
args.refs = get_all_refs(lambda r, t, tt: t == 'commit' or tt == 'commit')
args.refs = list(filter(lambda r: not r.endswith('/HEAD'), args.refs))
- args.revisions = check_output(['git', 'rev-parse'] + ['%s^{}' % i for i in args.revisions]).splitlines()
+ args.revisions = check_output(git_cmd + ['rev-parse'] + ['%s^{}' % i for i in args.revisions]).splitlines()
return args
@@ -91,7 +97,7 @@ def make_shallow(shallow_file, revisions, refs):
def get_all_refs(ref_filter=None):
"""Return all the existing refs in this repository, optionally filtering the refs."""
- ref_output = check_output(['git', 'for-each-ref', '--format=%(refname)\t%(objecttype)\t%(*objecttype)'])
+ ref_output = check_output(git_cmd + ['for-each-ref', '--format=%(refname)\t%(objecttype)\t%(*objecttype)'])
ref_split = [tuple(iter_extend(l.rsplit('\t'), 3)) for l in ref_output.splitlines()]
if ref_filter:
ref_split = (e for e in ref_split if ref_filter(*e))
@@ -109,7 +115,7 @@ def filter_refs(refs):
all_refs = get_all_refs()
to_remove = set(all_refs) - set(refs)
if to_remove:
- check_output(['xargs', '-0', '-n', '1', 'git', 'update-ref', '-d', '--no-deref'],
+ check_output(['xargs', '-0', '-n', '1'] + git_cmd + ['update-ref', '-d', '--no-deref'],
input=''.join(l + '\0' for l in to_remove))
@@ -122,7 +128,7 @@ def follow_history_intersections(revisions, refs):
if rev in seen:
continue
- parents = check_output(['git', 'rev-parse', '%s^@' % rev]).splitlines()
+ parents = check_output(git_cmd + ['rev-parse', '%s^@' % rev]).splitlines()
yield rev
seen.add(rev)
@@ -130,12 +136,12 @@ def follow_history_intersections(revisions, refs):
if not parents:
continue
- check_refs = check_output(['git', 'merge-base', '--independent'] + sorted(refs)).splitlines()
+ check_refs = check_output(git_cmd + ['merge-base', '--independent'] + sorted(refs)).splitlines()
for parent in parents:
for ref in check_refs:
print("Checking %s vs %s" % (parent, ref))
try:
- merge_base = check_output(['git', 'merge-base', parent, ref]).rstrip()
+ merge_base = check_output(git_cmd + ['merge-base', parent, ref]).rstrip()
except subprocess.CalledProcessError:
continue
else:
@@ -155,14 +161,14 @@ def iter_except(func, exception, start=None):
def shrink_repo(git_dir):
"""Shrink the newly shallow repository, removing the unreachable objects."""
- subprocess.check_call(['git', 'reflog', 'expire', '--expire-unreachable=now', '--all'])
- subprocess.check_call(['git', 'repack', '-ad'])
+ subprocess.check_call(git_cmd + ['reflog', 'expire', '--expire-unreachable=now', '--all'])
+ subprocess.check_call(git_cmd + ['repack', '-ad'])
try:
os.unlink(os.path.join(git_dir, 'objects', 'info', 'alternates'))
except OSError as exc:
if exc.errno != errno.ENOENT:
raise
- subprocess.check_call(['git', 'prune', '--expire', 'now'])
+ subprocess.check_call(git_cmd + ['prune', '--expire', 'now'])
if __name__ == '__main__':
diff --git a/bin/toaster b/bin/toaster
index 6b90ee187..f002c8c15 100755
--- a/bin/toaster
+++ b/bin/toaster
@@ -33,7 +33,7 @@ databaseCheck()
$MANAGE migrate --noinput || retval=1
if [ $retval -eq 1 ]; then
- echo "Failed migrations, aborting system start" 1>&2
+ echo "Failed migrations, halting system start" 1>&2
return $retval
fi
# Make sure that checksettings can pick up any value for TEMPLATECONF
@@ -41,7 +41,7 @@ databaseCheck()
$MANAGE checksettings --traceback || retval=1
if [ $retval -eq 1 ]; then
- printf "\nError while checking settings; aborting\n"
+ printf "\nError while checking settings; exiting\n"
return $retval
fi
@@ -84,7 +84,7 @@ webserverStartAll()
echo "Starting webserver..."
$MANAGE runserver --noreload "$ADDR_PORT" \
- </dev/null >>${BUILDDIR}/toaster_web.log 2>&1 \
+ </dev/null >>${TOASTER_LOGS_DIR}/web.log 2>&1 \
& echo $! >${BUILDDIR}/.toastermain.pid
sleep 1
@@ -181,6 +181,14 @@ WEBSERVER=1
export TOASTER_BUILDSERVER=1
ADDR_PORT="localhost:8000"
TOASTERDIR=`dirname $BUILDDIR`
+# ${BUILDDIR}/toaster_logs/ became the default location for toaster logs
+# This is needed for implemented django-log-viewer: https://pypi.org/project/django-log-viewer/
+# If the directory does not exist, create it.
+TOASTER_LOGS_DIR="${BUILDDIR}/toaster_logs/"
+if [ ! -d $TOASTER_LOGS_DIR ]
+then
+ mkdir $TOASTER_LOGS_DIR
+fi
unset CMD
for param in $*; do
case $param in
@@ -248,7 +256,7 @@ fi
# 3) the sqlite db if that is being used.
# 4) pid's we need to clean up on exit/shutdown
export TOASTER_DIR=$TOASTERDIR
-export BB_ENV_EXTRAWHITE="$BB_ENV_EXTRAWHITE TOASTER_DIR"
+export BB_ENV_PASSTHROUGH_ADDITIONS="$BB_ENV_PASSTHROUGH_ADDITIONS TOASTER_DIR"
# Determine the action. If specified by arguments, fine, if not, toggle it
if [ "$CMD" = "start" ] ; then
@@ -299,7 +307,7 @@ case $CMD in
export BITBAKE_UI='toasterui'
if [ $TOASTER_BUILDSERVER -eq 1 ] ; then
$MANAGE runbuilds \
- </dev/null >>${BUILDDIR}/toaster_runbuilds.log 2>&1 \
+ </dev/null >>${TOASTER_LOGS_DIR}/toaster_runbuilds.log 2>&1 \
& echo $! >${BUILDDIR}/.runbuilds.pid
else
echo "Toaster build server not started."
diff --git a/bin/toaster-eventreplay b/bin/toaster-eventreplay
index 8fa4ab711..74a319320 100755
--- a/bin/toaster-eventreplay
+++ b/bin/toaster-eventreplay
@@ -19,6 +19,8 @@ import sys
import json
import pickle
import codecs
+import warnings
+warnings.simplefilter("default")
from collections import namedtuple
@@ -28,79 +30,23 @@ sys.path.insert(0, join(dirname(dirname(abspath(__file__))), 'lib'))
import bb.cooker
from bb.ui import toasterui
-
-class EventPlayer:
- """Emulate a connection to a bitbake server."""
-
- def __init__(self, eventfile, variables):
- self.eventfile = eventfile
- self.variables = variables
- self.eventmask = []
-
- def waitEvent(self, _timeout):
- """Read event from the file."""
- line = self.eventfile.readline().strip()
- if not line:
- return
- try:
- event_str = json.loads(line)['vars'].encode('utf-8')
- event = pickle.loads(codecs.decode(event_str, 'base64'))
- event_name = "%s.%s" % (event.__module__, event.__class__.__name__)
- if event_name not in self.eventmask:
- return
- return event
- except ValueError as err:
- print("Failed loading ", line)
- raise err
-
- def runCommand(self, command_line):
- """Emulate running a command on the server."""
- name = command_line[0]
-
- if name == "getVariable":
- var_name = command_line[1]
- variable = self.variables.get(var_name)
- if variable:
- return variable['v'], None
- return None, "Missing variable %s" % var_name
-
- elif name == "getAllKeysWithFlags":
- dump = {}
- flaglist = command_line[1]
- for key, val in self.variables.items():
- try:
- if not key.startswith("__"):
- dump[key] = {
- 'v': val['v'],
- 'history' : val['history'],
- }
- for flag in flaglist:
- dump[key][flag] = val[flag]
- except Exception as err:
- print(err)
- return (dump, None)
-
- elif name == 'setEventMask':
- self.eventmask = command_line[-1]
- return True, None
-
- else:
- raise Exception("Command %s not implemented" % command_line[0])
-
- def getEventHandle(self):
- """
- This method is called by toasterui.
- The return value is passed to self.runCommand but not used there.
- """
- pass
+from bb.ui import eventreplay
def main(argv):
with open(argv[-1]) as eventfile:
# load variables from the first line
- variables = json.loads(eventfile.readline().strip())['allvariables']
-
+ variables = None
+ while line := eventfile.readline().strip():
+ try:
+ variables = json.loads(line)['allvariables']
+ break
+ except (KeyError, json.JSONDecodeError):
+ continue
+ if not variables:
+ sys.exit("Cannot find allvariables entry in event log file %s" % argv[-1])
+ eventfile.seek(0)
params = namedtuple('ConfigParams', ['observe_only'])(True)
- player = EventPlayer(eventfile, variables)
+ player = eventreplay.EventPlayer(eventfile, variables)
return toasterui.main(player, player, params)
diff --git a/conf/bitbake.conf b/conf/bitbake.conf
index b3d5fab23..f5a5a333a 100644
--- a/conf/bitbake.conf
+++ b/conf/bitbake.conf
@@ -23,6 +23,7 @@ DEPENDS = ""
DEPLOY_DIR = "${TMPDIR}/deploy"
DEPLOY_DIR_IMAGE = "${DEPLOY_DIR}/images"
DL_DIR = "${TMPDIR}/downloads"
+CACHE = "${TMPDIR}/cache"
FILESPATH = "${FILE_DIRNAME}/${PF}:${FILE_DIRNAME}/${P}:${FILE_DIRNAME}/${PN}:${FILE_DIRNAME}/files:${FILE_DIRNAME}"
FILE_DIRNAME = "${@os.path.dirname(d.getVar('FILE', False))}"
IMAGE_CMD = "_NO_DEFINED_IMAGE_TYPES_"
@@ -31,10 +32,10 @@ OVERRIDES = "local:${MACHINE}:${TARGET_OS}:${TARGET_ARCH}"
P = "${PN}-${PV}"
PERSISTENT_DIR = "${TMPDIR}/cache"
PF = "${PN}-${PV}-${PR}"
-PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
-PR = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[2] or 'r0'}"
+PN = "${@bb.parse.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
+PR = "${@bb.parse.vars_from_file(d.getVar('FILE', False),d)[2] or 'r0'}"
PROVIDES = ""
-PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
+PV = "${@bb.parse.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
S = "${WORKDIR}/${P}"
SRC_URI = "file://${FILE}"
STAMP = "${TMPDIR}/stamps/${PF}"
@@ -42,5 +43,4 @@ T = "${WORKDIR}/temp"
TARGET_ARCH = "${BUILD_ARCH}"
TMPDIR = "${TOPDIR}/tmp"
WORKDIR = "${TMPDIR}/work/${PF}"
-PERSISTENT_DIR = "${TMPDIR}/cache"
GITPKGV = "${@bb.fetch2.get_srcrev(d, 'gitpkgv_revision')}"
diff --git a/contrib/hashserv/Dockerfile b/contrib/hashserv/Dockerfile
index d6fc728f3..74b4a3be1 100644
--- a/contrib/hashserv/Dockerfile
+++ b/contrib/hashserv/Dockerfile
@@ -1,7 +1,7 @@
# SPDX-License-Identifier: MIT
#
# Copyright (c) 2021 Joshua Watt <JPEWhacker@gmail.com>
-#
+#
# Dockerfile to build a bitbake hash equivalence server container
#
# From the root of the bitbake repository, run:
@@ -15,5 +15,9 @@ RUN apk add --no-cache python3
COPY bin/bitbake-hashserv /opt/bbhashserv/bin/
COPY lib/hashserv /opt/bbhashserv/lib/hashserv/
+COPY lib/bb /opt/bbhashserv/lib/bb/
+COPY lib/codegen.py /opt/bbhashserv/lib/codegen.py
+COPY lib/ply /opt/bbhashserv/lib/ply/
+COPY lib/bs4 /opt/bbhashserv/lib/bs4/
ENTRYPOINT ["/opt/bbhashserv/bin/bitbake-hashserv"]
diff --git a/contrib/prserv/Dockerfile b/contrib/prserv/Dockerfile
new file mode 100644
index 000000000..9585fe3f0
--- /dev/null
+++ b/contrib/prserv/Dockerfile
@@ -0,0 +1,62 @@
+# SPDX-License-Identifier: MIT
+#
+# Copyright (c) 2022 Daniel Gomez <daniel@qtec.com>
+#
+# Dockerfile to build a bitbake PR service container
+#
+# From the root of the bitbake repository, run:
+#
+# docker build -f contrib/prserv/Dockerfile . -t prserv
+#
+# Running examples:
+#
+# 1. PR Service in RW mode, port 18585:
+#
+# docker run --detach --tty \
+# --env PORT=18585 \
+# --publish 18585:18585 \
+# --volume $PWD:/var/lib/bbprserv \
+# prserv
+#
+# 2. PR Service in RO mode, default port (8585) and custom LOGFILE:
+#
+# docker run --detach --tty \
+# --env DBMODE="--read-only" \
+# --env LOGFILE=/var/lib/bbprserv/prservro.log \
+# --publish 8585:8585 \
+# --volume $PWD:/var/lib/bbprserv \
+# prserv
+#
+
+FROM alpine:3.14.4
+
+RUN apk add --no-cache python3
+
+COPY bin/bitbake-prserv /opt/bbprserv/bin/
+COPY lib/prserv /opt/bbprserv/lib/prserv/
+COPY lib/bb /opt/bbprserv/lib/bb/
+COPY lib/codegen.py /opt/bbprserv/lib/codegen.py
+COPY lib/ply /opt/bbprserv/lib/ply/
+COPY lib/bs4 /opt/bbprserv/lib/bs4/
+
+ENV PATH=$PATH:/opt/bbprserv/bin
+
+RUN mkdir -p /var/lib/bbprserv
+
+ENV DBFILE=/var/lib/bbprserv/prserv.sqlite3 \
+ LOGFILE=/var/lib/bbprserv/prserv.log \
+ LOGLEVEL=debug \
+ HOST=0.0.0.0 \
+ PORT=8585 \
+ DBMODE=""
+
+ENTRYPOINT [ "/bin/sh", "-c", \
+"bitbake-prserv \
+--file=$DBFILE \
+--log=$LOGFILE \
+--loglevel=$LOGLEVEL \
+--start \
+--host=$HOST \
+--port=$PORT \
+$DBMODE \
+&& tail -f $LOGFILE"]
diff --git a/contrib/vim/indent/bitbake.vim b/contrib/vim/indent/bitbake.vim
index 138103409..7ee9d6993 100644
--- a/contrib/vim/indent/bitbake.vim
+++ b/contrib/vim/indent/bitbake.vim
@@ -40,7 +40,7 @@ set cpo&vim
let s:maxoff = 50 " maximum number of lines to look backwards for ()
-function GetPythonIndent(lnum)
+function! GetBBPythonIndent(lnum)
" If this line is explicitly joined: If the previous line was also joined,
" line it up with that one, otherwise add two 'shiftwidth'
@@ -257,7 +257,7 @@ let b:did_indent = 1
setlocal indentkeys+=0\"
-function BitbakeIndent(lnum)
+function! BitbakeIndent(lnum)
if !has('syntax_items')
return -1
endif
@@ -315,7 +315,7 @@ function BitbakeIndent(lnum)
endif
if index(["bbPyDefRegion", "bbPyFuncRegion"], name) != -1
- let ret = GetPythonIndent(a:lnum)
+ let ret = GetBBPythonIndent(a:lnum)
" Should normally always be indented by at least one shiftwidth; but allow
" return of -1 (defer to autoindent) or -2 (force indent to 0)
if ret == 0
diff --git a/contrib/vim/plugin/newbbappend.vim b/contrib/vim/plugin/newbbappend.vim
index e04174cf6..3f65f79cd 100644
--- a/contrib/vim/plugin/newbbappend.vim
+++ b/contrib/vim/plugin/newbbappend.vim
@@ -20,7 +20,7 @@ fun! NewBBAppendTemplate()
set nopaste
" New bbappend template
- 0 put ='FILESEXTRAPATHS_prepend := \"${THISDIR}/${PN}:\"'
+ 0 put ='FILESEXTRAPATHS:prepend := \"${THISDIR}/${PN}:\"'
2
if paste == 1
diff --git a/contrib/vim/syntax/bitbake.vim b/contrib/vim/syntax/bitbake.vim
index f964621ae..8f39b8f95 100644
--- a/contrib/vim/syntax/bitbake.vim
+++ b/contrib/vim/syntax/bitbake.vim
@@ -51,9 +51,9 @@ syn region bbString matchgroup=bbQuote start=+'+ skip=+\\$+ end=+'+
syn match bbExport "^export" nextgroup=bbIdentifier skipwhite
syn keyword bbExportFlag export contained nextgroup=bbIdentifier skipwhite
syn match bbIdentifier "[a-zA-Z0-9\-_\.\/\+]\+" display contained
-syn match bbVarDeref "${[a-zA-Z0-9\-_\.\/\+]\+}" contained
+syn match bbVarDeref "${[a-zA-Z0-9\-_:\.\/\+]\+}" contained
syn match bbVarEq "\(:=\|+=\|=+\|\.=\|=\.\|?=\|??=\|=\)" contained nextgroup=bbVarValue
-syn match bbVarDef "^\(export\s*\)\?\([a-zA-Z0-9\-_\.\/\+]\+\(_[${}a-zA-Z0-9\-_\.\/\+]\+\)\?\)\s*\(:=\|+=\|=+\|\.=\|=\.\|?=\|??=\|=\)\@=" contains=bbExportFlag,bbIdentifier,bbVarDeref nextgroup=bbVarEq
+syn match bbVarDef "^\(export\s*\)\?\([a-zA-Z0-9\-_\.\/\+][${}a-zA-Z0-9\-_:\.\/\+]*\)\s*\(:=\|+=\|=+\|\.=\|=\.\|?=\|??=\|=\)\@=" contains=bbExportFlag,bbIdentifier,bbOverrideOperator,bbVarDeref nextgroup=bbVarEq
syn match bbVarValue ".*$" contained contains=bbString,bbVarDeref,bbVarPyValue
syn region bbVarPyValue start=+${@+ skip=+\\$+ end=+}+ contained contains=@python
@@ -63,13 +63,14 @@ syn region bbVarFlagFlag matchgroup=bbArrayBrackets start="\[" end="\]\s*
" Includes and requires
syn keyword bbInclude inherit include require contained
-syn match bbIncludeRest ".*$" contained contains=bbString,bbVarDeref
+syn match bbIncludeRest ".*$" contained contains=bbString,bbVarDeref,bbVarPyValue
syn match bbIncludeLine "^\(inherit\|include\|require\)\s\+" contains=bbInclude nextgroup=bbIncludeRest
" Add taks and similar
syn keyword bbStatement addtask deltask addhandler after before EXPORT_FUNCTIONS contained
-syn match bbStatementRest ".*$" skipwhite contained contains=bbStatement
-syn match bbStatementLine "^\(addtask\|deltask\|addhandler\|after\|before\|EXPORT_FUNCTIONS\)\s\+" contains=bbStatement nextgroup=bbStatementRest
+syn match bbStatementRest /[^\\]*$/ skipwhite contained contains=bbStatement,bbVarDeref,bbVarPyValue
+syn region bbStatementRestCont start=/.*\\$/ end=/^[^\\]*$/ contained contains=bbStatement,bbVarDeref,bbVarPyValue,bbContinue keepend
+syn match bbStatementLine "^\(addtask\|deltask\|addhandler\|after\|before\|EXPORT_FUNCTIONS\)\s\+" contains=bbStatement nextgroup=bbStatementRest,bbStatementRestCont
" OE Important Functions
syn keyword bbOEFunctions do_fetch do_unpack do_patch do_configure do_compile do_stage do_install do_package contained
@@ -77,13 +78,15 @@ syn keyword bbOEFunctions do_fetch do_unpack do_patch do_configure do_comp
" Generic Functions
syn match bbFunction "\h[0-9A-Za-z_\-\.]*" display contained contains=bbOEFunctions
+syn keyword bbOverrideOperator append prepend remove contained
+
" BitBake shell metadata
syn include @shell syntax/sh.vim
if exists("b:current_syntax")
unlet b:current_syntax
endif
syn keyword bbShFakeRootFlag fakeroot contained
-syn match bbShFuncDef "^\(fakeroot\s*\)\?\([\.0-9A-Za-z_${}\-\.]\+\)\(python\)\@<!\(\s*()\s*\)\({\)\@=" contains=bbShFakeRootFlag,bbFunction,bbVarDeref,bbDelimiter nextgroup=bbShFuncRegion skipwhite
+syn match bbShFuncDef "^\(fakeroot\s*\)\?\([\.0-9A-Za-z_:${}\-\.]\+\)\(python\)\@<!\(\s*()\s*\)\({\)\@=" contains=bbShFakeRootFlag,bbFunction,bbOverrideOperator,bbVarDeref,bbDelimiter nextgroup=bbShFuncRegion skipwhite
syn region bbShFuncRegion matchgroup=bbDelimiter start="{\s*$" end="^}\s*$" contained contains=@shell
" Python value inside shell functions
@@ -91,7 +94,7 @@ syn region shDeref start=+${@+ skip=+\\$+ excludenl end=+}+ contained co
" BitBake python metadata
syn keyword bbPyFlag python contained
-syn match bbPyFuncDef "^\(fakeroot\s*\)\?\(python\)\(\s\+[0-9A-Za-z_${}\-\.]\+\)\?\(\s*()\s*\)\({\)\@=" contains=bbShFakeRootFlag,bbPyFlag,bbFunction,bbVarDeref,bbDelimiter nextgroup=bbPyFuncRegion skipwhite
+syn match bbPyFuncDef "^\(fakeroot\s*\)\?\(python\)\(\s\+[0-9A-Za-z_:${}\-\.]\+\)\?\(\s*()\s*\)\({\)\@=" contains=bbShFakeRootFlag,bbPyFlag,bbFunction,bbOverrideOperator,bbVarDeref,bbDelimiter nextgroup=bbPyFuncRegion skipwhite
syn region bbPyFuncRegion matchgroup=bbDelimiter start="{\s*$" end="^}\s*$" contained contains=@python
" BitBake 'def'd python functions
@@ -120,7 +123,9 @@ hi def link bbPyFlag Type
hi def link bbPyDef Statement
hi def link bbStatement Statement
hi def link bbStatementRest Identifier
+hi def link bbStatementRestCont Identifier
hi def link bbOEFunctions Special
hi def link bbVarPyValue PreProc
+hi def link bbOverrideOperator Operator
let b:current_syntax = "bb"
diff --git a/doc/Makefile b/doc/Makefile
index d40f390e2..996f01b7d 100644
--- a/doc/Makefile
+++ b/doc/Makefile
@@ -3,7 +3,7 @@
# You can set these variables from the command line, and also
# from the environment for the first two.
-SPHINXOPTS ?= -j auto
+SPHINXOPTS ?= -W --keep-going -j auto
SPHINXBUILD ?= sphinx-build
SOURCEDIR = .
BUILDDIR = _build
diff --git a/doc/README b/doc/README
index 1f8e0f6eb..d4f56afa3 100644
--- a/doc/README
+++ b/doc/README
@@ -8,12 +8,12 @@ Manual Organization
Folders exist for individual manuals as follows:
-* bitbake-user-manual - The BitBake User Manual
+* bitbake-user-manual --- The BitBake User Manual
Each folder is self-contained regarding content and figures.
If you want to find HTML versions of the BitBake manuals on the web,
-go to http://www.openembedded.org/wiki/Documentation.
+go to https://www.openembedded.org/wiki/Documentation.
Sphinx
======
diff --git a/doc/_templates/footer.html b/doc/_templates/footer.html
new file mode 100644
index 000000000..1398f20d7
--- /dev/null
+++ b/doc/_templates/footer.html
@@ -0,0 +1,9 @@
+<footer>
+ <hr/>
+ <div role="contentinfo">
+ <p>&copy; Copyright {{ copyright }}
+ <br>Last updated on {{ last_updated }} from the <a href="https://git.openembedded.org/bitbake/">bitbake</a> git repository.
+ </p>
+ </div>
+</footer>
+
diff --git a/doc/bitbake-user-manual/bitbake-user-manual-execution.rst b/doc/bitbake-user-manual/bitbake-user-manual-execution.rst
index 14c342a6a..d58fbb32e 100644
--- a/doc/bitbake-user-manual/bitbake-user-manual-execution.rst
+++ b/doc/bitbake-user-manual/bitbake-user-manual-execution.rst
@@ -40,7 +40,7 @@ the BitBake command and its options, see ":ref:`The BitBake Command
the number of processors, which takes into account hyper-threading.
Thus, a quad-core build host with hyper-threading most likely shows
eight processors, which is the value you would then assign to
- ``BB_NUMBER_THREADS``.
+ :term:`BB_NUMBER_THREADS`.
A possibly simpler solution is that some Linux distributions (e.g.
Debian and Ubuntu) provide the ``ncpus`` command.
@@ -65,13 +65,13 @@ data itself is of various types:
The ``layer.conf`` files are used to construct key variables such as
:term:`BBPATH` and :term:`BBFILES`.
-``BBPATH`` is used to search for configuration and class files under the
-``conf`` and ``classes`` directories, respectively. ``BBFILES`` is used
+:term:`BBPATH` is used to search for configuration and class files under the
+``conf`` and ``classes`` directories, respectively. :term:`BBFILES` is used
to locate both recipe and recipe append files (``.bb`` and
``.bbappend``). If there is no ``bblayers.conf`` file, it is assumed the
-user has set the ``BBPATH`` and ``BBFILES`` directly in the environment.
+user has set the :term:`BBPATH` and :term:`BBFILES` directly in the environment.
-Next, the ``bitbake.conf`` file is located using the ``BBPATH`` variable
+Next, the ``bitbake.conf`` file is located using the :term:`BBPATH` variable
that was just constructed. The ``bitbake.conf`` file may also include
other configuration files using the ``include`` or ``require``
directives.
@@ -79,8 +79,8 @@ directives.
Prior to parsing configuration files, BitBake looks at certain
variables, including:
-- :term:`BB_ENV_WHITELIST`
-- :term:`BB_ENV_EXTRAWHITE`
+- :term:`BB_ENV_PASSTHROUGH`
+- :term:`BB_ENV_PASSTHROUGH_ADDITIONS`
- :term:`BB_PRESERVE_ENV`
- :term:`BB_ORIGENV`
- :term:`BITBAKE_UI`
@@ -104,7 +104,7 @@ BitBake first searches the current working directory for an optional
contain a :term:`BBLAYERS` variable that is a
space-delimited list of 'layer' directories. Recall that if BitBake
cannot find a ``bblayers.conf`` file, then it is assumed the user has
-set the ``BBPATH`` and ``BBFILES`` variables directly in the
+set the :term:`BBPATH` and :term:`BBFILES` variables directly in the
environment.
For each directory (layer) in this list, a ``conf/layer.conf`` file is
@@ -114,7 +114,7 @@ files automatically set up :term:`BBPATH` and other
variables correctly for a given build directory.
BitBake then expects to find the ``conf/bitbake.conf`` file somewhere in
-the user-specified ``BBPATH``. That configuration file generally has
+the user-specified :term:`BBPATH`. That configuration file generally has
include directives to pull in any other metadata such as files specific
to the architecture, the machine, the local environment, and so forth.
@@ -135,7 +135,7 @@ The ``base.bbclass`` file is always included. Other classes that are
specified in the configuration using the
:term:`INHERIT` variable are also included. BitBake
searches for class files in a ``classes`` subdirectory under the paths
-in ``BBPATH`` in the same way as configuration files.
+in :term:`BBPATH` in the same way as configuration files.
A good way to get an idea of the configuration files and the class files
used in your execution environment is to run the following BitBake
@@ -184,13 +184,13 @@ Locating and Parsing Recipes
During the configuration phase, BitBake will have set
:term:`BBFILES`. BitBake now uses it to construct a
list of recipes to parse, along with any append files (``.bbappend``) to
-apply. ``BBFILES`` is a space-separated list of available files and
+apply. :term:`BBFILES` is a space-separated list of available files and
supports wildcards. An example would be::
BBFILES = "/path/to/bbfiles/*.bb /path/to/appends/*.bbappend"
BitBake parses each
-recipe and append file located with ``BBFILES`` and stores the values of
+recipe and append file located with :term:`BBFILES` and stores the values of
various variables into the datastore.
.. note::
@@ -201,18 +201,18 @@ For each file, a fresh copy of the base configuration is made, then the
recipe is parsed line by line. Any inherit statements cause BitBake to
find and then parse class files (``.bbclass``) using
:term:`BBPATH` as the search path. Finally, BitBake
-parses in order any append files found in ``BBFILES``.
+parses in order any append files found in :term:`BBFILES`.
One common convention is to use the recipe filename to define pieces of
metadata. For example, in ``bitbake.conf`` the recipe name and version
are used to set the variables :term:`PN` and
:term:`PV`::
- PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
- PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
+ PN = "${@bb.parse.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
+ PV = "${@bb.parse.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
In this example, a recipe called "something_1.2.3.bb" would set
-``PN`` to "something" and ``PV`` to "1.2.3".
+:term:`PN` to "something" and :term:`PV` to "1.2.3".
By the time parsing is complete for a recipe, BitBake has a list of
tasks that the recipe defines and a set of data consisting of keys and
@@ -228,7 +228,7 @@ and then reload it.
Where possible, subsequent BitBake commands reuse this cache of recipe
information. The validity of this cache is determined by first computing
a checksum of the base configuration data (see
-:term:`BB_HASHCONFIG_WHITELIST`) and
+:term:`BB_HASHCONFIG_IGNORE_VARS`) and
then checking if the checksum matches. If that checksum matches what is
in the cache and the recipe and class files have not changed, BitBake is
able to use the cache. BitBake then reloads the cached information about
@@ -260,21 +260,21 @@ Providers
Assuming BitBake has been instructed to execute a target and that all
the recipe files have been parsed, BitBake starts to figure out how to
-build the target. BitBake looks through the ``PROVIDES`` list for each
-of the recipes. A ``PROVIDES`` list is the list of names by which the
-recipe can be known. Each recipe's ``PROVIDES`` list is created
+build the target. BitBake looks through the :term:`PROVIDES` list for each
+of the recipes. A :term:`PROVIDES` list is the list of names by which the
+recipe can be known. Each recipe's :term:`PROVIDES` list is created
implicitly through the recipe's :term:`PN` variable and
explicitly through the recipe's :term:`PROVIDES`
variable, which is optional.
-When a recipe uses ``PROVIDES``, that recipe's functionality can be
-found under an alternative name or names other than the implicit ``PN``
+When a recipe uses :term:`PROVIDES`, that recipe's functionality can be
+found under an alternative name or names other than the implicit :term:`PN`
name. As an example, suppose a recipe named ``keyboard_1.0.bb``
contained the following::
PROVIDES += "fullkeyboard"
-The ``PROVIDES``
+The :term:`PROVIDES`
list for this recipe becomes "keyboard", which is implicit, and
"fullkeyboard", which is explicit. Consequently, the functionality found
in ``keyboard_1.0.bb`` can be found under two different names.
@@ -284,12 +284,12 @@ in ``keyboard_1.0.bb`` can be found under two different names.
Preferences
===========
-The ``PROVIDES`` list is only part of the solution for figuring out a
+The :term:`PROVIDES` list is only part of the solution for figuring out a
target's recipes. Because targets might have multiple providers, BitBake
needs to prioritize providers by determining provider preferences.
A common example in which a target has multiple providers is
-"virtual/kernel", which is on the ``PROVIDES`` list for each kernel
+"virtual/kernel", which is on the :term:`PROVIDES` list for each kernel
recipe. Each machine often selects the best kernel provider by using a
line similar to the following in the machine configuration file::
@@ -309,10 +309,10 @@ specify a particular version. You can influence the order by using the
:term:`DEFAULT_PREFERENCE` variable.
By default, files have a preference of "0". Setting
-``DEFAULT_PREFERENCE`` to "-1" makes the recipe unlikely to be used
-unless it is explicitly referenced. Setting ``DEFAULT_PREFERENCE`` to
-"1" makes it likely the recipe is used. ``PREFERRED_VERSION`` overrides
-any ``DEFAULT_PREFERENCE`` setting. ``DEFAULT_PREFERENCE`` is often used
+:term:`DEFAULT_PREFERENCE` to "-1" makes the recipe unlikely to be used
+unless it is explicitly referenced. Setting :term:`DEFAULT_PREFERENCE` to
+"1" makes it likely the recipe is used. :term:`PREFERRED_VERSION` overrides
+any :term:`DEFAULT_PREFERENCE` setting. :term:`DEFAULT_PREFERENCE` is often used
to mark newer and more experimental recipe versions until they have
undergone sufficient testing to be considered stable.
@@ -394,7 +394,7 @@ ready to run, those tasks have all their dependencies met, and the
thread threshold has not been exceeded.
It is worth noting that you can greatly speed up the build time by
-properly setting the ``BB_NUMBER_THREADS`` variable.
+properly setting the :term:`BB_NUMBER_THREADS` variable.
As each task completes, a timestamp is written to the directory
specified by the :term:`STAMP` variable. On subsequent
@@ -435,7 +435,7 @@ BitBake writes a shell script to
executes the script. The generated shell script contains all the
exported variables, and the shell functions with all variables expanded.
Output from the shell script goes to the file
-``${T}/log.do_taskname.pid``. Looking at the expanded shell functions in
+``${``\ :term:`T`\ ``}/log.do_taskname.pid``. Looking at the expanded shell functions in
the run file and the output in the log files is a useful debugging
technique.
@@ -477,7 +477,7 @@ changes because it should not affect the output for target packages. The
simplistic approach for excluding the working directory is to set it to
some fixed value and create the checksum for the "run" script. BitBake
goes one step better and uses the
-:term:`BB_HASHBASE_WHITELIST` variable
+:term:`BB_BASEHASH_IGNORE_VARS` variable
to define a list of variables that should never be included when
generating the signatures.
@@ -523,7 +523,7 @@ it cannot figure out dependencies.
Thus far, this section has limited discussion to the direct inputs into
a task. Information based on direct inputs is referred to as the
"basehash" in the code. However, there is still the question of a task's
-indirect inputs - the things that were already built and present in the
+indirect inputs --- the things that were already built and present in the
build directory. The checksum (or signature) for a particular task needs
to add the hashes of all the tasks on which the particular task depends.
Choosing which dependencies to add is a policy decision. However, the
@@ -534,11 +534,11 @@ At the code level, there are a variety of ways both the basehash and the
dependent task hashes can be influenced. Within the BitBake
configuration file, we can give BitBake some extra information to help
it construct the basehash. The following statement effectively results
-in a list of global variable dependency excludes - variables never
+in a list of global variable dependency excludes --- variables never
included in any checksum. This example uses variables from OpenEmbedded
to help illustrate the concept::
- BB_HASHBASE_WHITELIST ?= "TMPDIR FILE PATH PWD BB_TASKHASH BBPATH DL_DIR \
+ BB_BASEHASH_IGNORE_VARS ?= "TMPDIR FILE PATH PWD BB_TASKHASH BBPATH DL_DIR \
SSTATE_DIR THISDIR FILESEXTRAPATHS FILE_DIRNAME HOME LOGNAME SHELL \
USER FILESPATH STAGING_DIR_HOST STAGING_DIR_TARGET COREBASE PRSERV_HOST \
PRSERV_DUMPDIR PRSERV_DUMPFILE PRSERV_LOCKDOWN PARALLEL_MAKE \
@@ -552,8 +552,8 @@ through dependency chains are more complex and are generally
accomplished with a Python function. The code in
``meta/lib/oe/sstatesig.py`` shows two examples of this and also
illustrates how you can insert your own policy into the system if so
-desired. This file defines the two basic signature generators
-OpenEmbedded-Core uses: "OEBasic" and "OEBasicHash". By default, there
+desired. This file defines the basic signature generator
+OpenEmbedded-Core uses: "OEBasicHash". By default, there
is a dummy "noop" signature handler enabled in BitBake. This means that
behavior is unchanged from previous versions. ``OE-Core`` uses the
"OEBasicHash" signature handler by default through this setting in the
@@ -561,14 +561,13 @@ behavior is unchanged from previous versions. ``OE-Core`` uses the
BB_SIGNATURE_HANDLER ?= "OEBasicHash"
-The "OEBasicHash" ``BB_SIGNATURE_HANDLER`` is the same as the "OEBasic"
-version but adds the task hash to the stamp files. This results in any
-metadata change that changes the task hash, automatically causing the
-task to be run again. This removes the need to bump
-:term:`PR` values, and changes to metadata automatically
-ripple across the build.
+The main feature of the "OEBasicHash" :term:`BB_SIGNATURE_HANDLER` is that
+it adds the task hash to the stamp files. Thanks to this, any metadata
+change will change the task hash, automatically causing the task to be run
+again. This removes the need to bump :term:`PR` values, and changes to
+metadata automatically ripple across the build.
-It is also worth noting that the end result of these signature
+It is also worth noting that the end result of signature
generators is to make some dependency and hash information available to
the build. This information includes:
@@ -578,10 +577,7 @@ the build. This information includes:
- ``BB_BASEHASH_``\ *filename:taskname*: The base hashes for each
dependent task.
-- ``BBHASHDEPS_``\ *filename:taskname*: The task dependencies for
- each task.
-
-- ``BB_TASKHASH``: The hash of the currently running task.
+- :term:`BB_TASKHASH`: The hash of the currently running task.
It is worth noting that BitBake's "-S" option lets you debug BitBake's
processing of signatures. The options passed to -S allow different
@@ -590,10 +586,11 @@ or possibly those defined in the metadata/signature handler itself. The
simplest parameter to pass is "none", which causes a set of signature
information to be written out into ``STAMPS_DIR`` corresponding to the
targets specified. The other currently available parameter is
-"printdiff", which causes BitBake to try to establish the closest
+"printdiff", which causes BitBake to try to establish the most recent
signature match it can (e.g. in the sstate cache) and then run
-``bitbake-diffsigs`` over the matches to determine the stamps and delta
-where these two stamp trees diverge.
+compare the matched signatures to determine the stamps and delta
+where these two stamp trees diverge. This can be used to determine why
+tasks need to be re-run in situations where that is not expected.
.. note::
@@ -648,13 +645,6 @@ compiled binary. To handle this, BitBake calls the
each successful setscene task to know whether or not it needs to obtain
the dependencies of that task.
-Finally, after all the setscene tasks have executed, BitBake calls the
-function listed in
-:term:`BB_SETSCENE_VERIFY_FUNCTION2`
-with the list of tasks BitBake thinks has been "covered". The metadata
-can then ensure that this list is correct and can inform BitBake that it
-wants specific tasks to be run regardless of the setscene result.
-
You can find more information on setscene metadata in the
:ref:`bitbake-user-manual/bitbake-user-manual-metadata:task checksums and setscene`
section.
@@ -667,7 +657,7 @@ builds are when execute, bitbake also supports user defined
configuration of the `Python
logging <https://docs.python.org/3/library/logging.html>`__ facilities
through the :term:`BB_LOGCONFIG` variable. This
-variable defines a json or yaml `logging
+variable defines a JSON or YAML `logging
configuration <https://docs.python.org/3/library/logging.config.html>`__
that will be intelligently merged into the default configuration. The
logging configuration is merged using the following rules:
@@ -701,9 +691,9 @@ logging configuration is merged using the following rules:
adds a filter called ``BitBake.defaultFilter``, both filters will be
applied to the logger
-As an example, consider the following user logging configuration file
-which logs all Hash Equivalence related messages of VERBOSE or higher to
-a file called ``hashequiv.log`` ::
+As a first example, you can create a ``hashequiv.json`` user logging
+configuration file to log all Hash Equivalence related messages of ``VERBOSE``
+or higher priority to a file called ``hashequiv.log``::
{
"version": 1,
@@ -732,3 +722,40 @@ a file called ``hashequiv.log`` ::
}
}
}
+
+Then set the :term:`BB_LOGCONFIG` variable in ``conf/local.conf``::
+
+ BB_LOGCONFIG = "hashequiv.json"
+
+Another example is this ``warn.json`` file to log all ``WARNING`` and
+higher priority messages to a ``warn.log`` file::
+
+ {
+ "version": 1,
+ "formatters": {
+ "warnlogFormatter": {
+ "()": "bb.msg.BBLogFormatter",
+ "format": "%(levelname)s: %(message)s"
+ }
+ },
+
+ "handlers": {
+ "warnlog": {
+ "class": "logging.FileHandler",
+ "formatter": "warnlogFormatter",
+ "level": "WARNING",
+ "filename": "warn.log"
+ }
+ },
+
+ "loggers": {
+ "BitBake": {
+ "handlers": ["warnlog"]
+ }
+ },
+
+ "@disable_existing_loggers": false
+ }
+
+Note that BitBake's helper classes for structured logging are implemented in
+``lib/bb/msg.py``.
diff --git a/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst b/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
index bd1fb4fc7..fb4f0a23d 100644
--- a/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
+++ b/doc/bitbake-user-manual/bitbake-user-manual-fetching.rst
@@ -51,7 +51,7 @@ This code unpacks the downloaded files to the specified by ``WORKDIR``.
examine the OpenEmbedded class file ``base.bbclass``
.
-The ``SRC_URI`` and ``WORKDIR`` variables are not hardcoded into the
+The :term:`SRC_URI` and ``WORKDIR`` variables are not hardcoded into the
fetcher, since those fetcher methods can be (and are) called with
different variable names. In OpenEmbedded for example, the shared state
(sstate) code uses the fetch module to fetch the sstate files.
@@ -64,17 +64,17 @@ URLs by looking for source files in a specific search order:
:term:`PREMIRRORS` variable.
- *Source URI:* If pre-mirrors fail, BitBake uses the original URL (e.g
- from ``SRC_URI``).
+ from :term:`SRC_URI`).
- *Mirror Sites:* If fetch failures occur, BitBake next uses mirror
locations as defined by the :term:`MIRRORS` variable.
For each URL passed to the fetcher, the fetcher calls the submodule that
handles that particular URL type. This behavior can be the source of
-some confusion when you are providing URLs for the ``SRC_URI`` variable.
+some confusion when you are providing URLs for the :term:`SRC_URI` variable.
Consider the following two URLs::
- http://git.yoctoproject.org/git/poky;protocol=git
+ https://git.yoctoproject.org/git/poky;protocol=git
git://git.yoctoproject.org/git/poky;protocol=http
In the former case, the URL is passed to the ``wget`` fetcher, which does not
@@ -84,18 +84,18 @@ fetcher does know how to use HTTP as a transport.
Here are some examples that show commonly used mirror definitions::
PREMIRRORS ?= "\
- bzr://.*/.\* http://somemirror.org/sources/ \\n \
- cvs://.*/.\* http://somemirror.org/sources/ \\n \
- git://.*/.\* http://somemirror.org/sources/ \\n \
- hg://.*/.\* http://somemirror.org/sources/ \\n \
- osc://.*/.\* http://somemirror.org/sources/ \\n \
- p4://.*/.\* http://somemirror.org/sources/ \\n \
- svn://.*/.\* http://somemirror.org/sources/ \\n"
+ bzr://.*/.\* http://somemirror.org/sources/ \
+ cvs://.*/.\* http://somemirror.org/sources/ \
+ git://.*/.\* http://somemirror.org/sources/ \
+ hg://.*/.\* http://somemirror.org/sources/ \
+ osc://.*/.\* http://somemirror.org/sources/ \
+ p4://.*/.\* http://somemirror.org/sources/ \
+ svn://.*/.\* http://somemirror.org/sources/"
MIRRORS =+ "\
- ftp://.*/.\* http://somemirror.org/sources/ \\n \
- http://.*/.\* http://somemirror.org/sources/ \\n \
- https://.*/.\* http://somemirror.org/sources/ \\n"
+ ftp://.*/.\* http://somemirror.org/sources/ \
+ http://.*/.\* http://somemirror.org/sources/ \
+ https://.*/.\* http://somemirror.org/sources/"
It is useful to note that BitBake
supports cross-URLs. It is possible to mirror a Git repository on an
@@ -110,14 +110,14 @@ which is specified by the :term:`DL_DIR` variable.
File integrity is of key importance for reproducing builds. For
non-local archive downloads, the fetcher code can verify SHA-256 and MD5
checksums to ensure the archives have been downloaded correctly. You can
-specify these checksums by using the ``SRC_URI`` variable with the
+specify these checksums by using the :term:`SRC_URI` variable with the
appropriate varflags as follows::
SRC_URI[md5sum] = "value"
SRC_URI[sha256sum] = "value"
You can also specify the checksums as
-parameters on the ``SRC_URI`` as shown below::
+parameters on the :term:`SRC_URI` as shown below::
SRC_URI = "http://example.com/foobar.tar.bz2;md5sum=4a8e0f237e961fd7785d19d07fdb994d"
@@ -129,7 +129,7 @@ shows how you name the URIs::
SRC_URI[foo.md5sum] = 4a8e0f237e961fd7785d19d07fdb994d
After a file has been downloaded and
-has had its checksum checked, a ".done" stamp is placed in ``DL_DIR``.
+has had its checksum checked, a ".done" stamp is placed in :term:`DL_DIR`.
BitBake uses this stamp during subsequent builds to avoid downloading or
comparing a checksum for the file again.
@@ -144,6 +144,10 @@ download without a checksum triggers an error message. The
make any attempted network access a fatal error, which is useful for
checking that mirrors are complete as well as other things.
+If :term:`BB_CHECK_SSL_CERTS` is set to ``0`` then SSL certificate checking will
+be disabled. This variable defaults to ``1`` so SSL certificates are normally
+checked.
+
.. _bb-the-unpack:
The Unpack
@@ -163,6 +167,9 @@ govern the behavior of the unpack stage:
- *dos:* Applies to ``.zip`` and ``.jar`` files and specifies whether
to use DOS line ending conversion on text files.
+- *striplevel:* Strip specified number of leading components (levels)
+ from file names on extraction
+
- *subdir:* Unpacks the specific URL to the specified subdirectory
within the root directory.
@@ -222,6 +229,11 @@ downloaded file is useful for avoiding collisions in
:term:`DL_DIR` when dealing with multiple files that
have the same name.
+If a username and password are specified in the ``SRC_URI``, a Basic
+Authorization header will be added to each request, including across redirects.
+To instead limit the Authorization header to the first request, add
+"redirectauth=0" to the list of parameters.
+
Some example URLs are as follows::
SRC_URI = "http://oe.handhelds.org/not_there.aac"
@@ -384,6 +396,19 @@ This fetcher supports the following parameters:
protocol is "file". You can also use "http", "https", "ssh" and
"rsync".
+ .. note::
+
+ When ``protocol`` is "ssh", the URL expected in :term:`SRC_URI` differs
+ from the one that is typically passed to ``git clone`` command and provided
+ by the Git server to fetch from. For example, the URL returned by GitLab
+ server for ``mesa`` when cloning over SSH is
+ ``git@gitlab.freedesktop.org:mesa/mesa.git``, however the expected URL in
+ :term:`SRC_URI` is the following::
+
+ SRC_URI = "git://git@gitlab.freedesktop.org/mesa/mesa.git;branch=main;protocol=ssh;..."
+
+ Note the ``:`` character changed for a ``/`` before the path to the project.
+
- *"nocheckout":* Tells the fetcher to not checkout source code when
unpacking when set to "1". Set this option for the URL where there is
a custom routine to checkout code. The default is "0".
@@ -399,17 +424,17 @@ This fetcher supports the following parameters:
- *"nobranch":* Tells the fetcher to not check the SHA validation for
the branch when set to "1". The default is "0". Set this option for
- the recipe that refers to the commit that is valid for a tag instead
- of the branch.
+ the recipe that refers to the commit that is valid for any namespace
+ (branch, tag, ...) instead of the branch.
- *"bareclone":* Tells the fetcher to clone a bare clone into the
destination directory without checking out a working tree. Only the
raw Git metadata is provided. This parameter implies the "nocheckout"
parameter as well.
-- *"branch":* The branch(es) of the Git tree to clone. If unset, this
- is assumed to be "master". The number of branch parameters much match
- the number of name parameters.
+- *"branch":* The branch(es) of the Git tree to clone. Unless
+ "nobranch" is set to "1", this is a mandatory parameter. The number of
+ branch parameters must match the number of name parameters.
- *"rev":* The revision to use for the checkout. The default is
"master".
@@ -432,17 +457,33 @@ This fetcher supports the following parameters:
Here are some example URLs::
- SRC_URI = "git://git.oe.handhelds.org/git/vip.git;tag=version-1"
- SRC_URI = "git://git.oe.handhelds.org/git/vip.git;protocol=http"
+ SRC_URI = "git://github.com/fronteed/icheck.git;protocol=https;branch=${PV};tag=${PV}"
+ SRC_URI = "git://github.com/asciidoc/asciidoc-py;protocol=https;branch=main"
+ SRC_URI = "git://git@gitlab.freedesktop.org/mesa/mesa.git;branch=main;protocol=ssh;..."
+
+.. note::
+
+ When using ``git`` as the fetcher of the main source code of your software,
+ ``S`` should be set accordingly::
+
+ S = "${WORKDIR}/git"
.. note::
Specifying passwords directly in ``git://`` urls is not supported.
- There are several reasons: ``SRC_URI`` is often written out to logs and
+ There are several reasons: :term:`SRC_URI` is often written out to logs and
other places, and that could easily leak passwords; it is also all too
easy to share metadata without removing passwords. SSH keys, ``~/.netrc``
and ``~/.ssh/config`` files can be used as alternatives.
+Using tags with the git fetcher may cause surprising behaviour. Bitbake needs to
+resolve the tag to a specific revision and to do that, it has to connect to and use
+the upstream repository. This is because the revision the tags point at can change and
+we've seen cases of this happening in well known public repositories. This can mean
+many more network connections than expected and recipes may be reparsed at every build.
+Source mirrors will also be bypassed as the upstream repository is the only source
+of truth to resolve the revision accurately. For these reasons, whilst the fetcher
+can support tags, we recommend being specific about revisions in recipes.
.. _gitsm-fetcher:
@@ -487,7 +528,7 @@ To use this fetcher, make sure your recipe has proper
The fetcher uses the ``rcleartool`` or
``cleartool`` remote client, depending on which one is available.
-Following are options for the ``SRC_URI`` statement:
+Following are options for the :term:`SRC_URI` statement:
- *vob*: The name, which must include the prepending "/" character,
of the ClearCase VOB. This option is required.
@@ -549,7 +590,7 @@ password if you do not wish to keep those values in a recipe itself. If
you choose not to use ``P4CONFIG``, or to explicitly set variables that
``P4CONFIG`` can contain, you can specify the ``P4PORT`` value, which is
the server's URL and port number, and you can specify a username and
-password directly in your recipe within ``SRC_URI``.
+password directly in your recipe within :term:`SRC_URI`.
Here is an example that relies on ``P4CONFIG`` to specify the server URL
and port, username, and password, and fetches the Head Revision::
@@ -655,6 +696,133 @@ Here is an example URL::
It can also be used when setting mirrors definitions using the :term:`PREMIRRORS` variable.
+.. _gcp-fetcher:
+
+GCP Fetcher (``gs://``)
+--------------------------
+
+This submodule fetches data from a
+`Google Cloud Storage Bucket <https://cloud.google.com/storage/docs/buckets>`__.
+It uses the `Google Cloud Storage Python Client <https://cloud.google.com/python/docs/reference/storage/latest>`__
+to check the status of objects in the bucket and download them.
+The use of the Python client makes it substantially faster than using command
+line tools such as gsutil.
+
+The fetcher requires the Google Cloud Storage Python Client to be installed, along
+with the gsutil tool.
+
+The fetcher requires that the machine has valid credentials for accessing the
+chosen bucket. Instructions for authentication can be found in the
+`Google Cloud documentation <https://cloud.google.com/docs/authentication/provide-credentials-adc#local-dev>`__.
+
+If it used from the OpenEmbedded build system, the fetcher can be used for
+fetching sstate artifacts from a GCS bucket by specifying the
+``SSTATE_MIRRORS`` variable as shown below::
+
+ SSTATE_MIRRORS ?= "\
+ file://.* gs://<bucket name>/PATH \
+ "
+
+The fetcher can also be used in recipes::
+
+ SRC_URI = "gs://<bucket name>/<foo_container>/<bar_file>"
+
+However, the checksum of the file should be also be provided::
+
+ SRC_URI[sha256sum] = "<sha256 string>"
+
+.. _crate-fetcher:
+
+Crate Fetcher (``crate://``)
+----------------------------
+
+This submodule fetches code for
+`Rust language "crates" <https://doc.rust-lang.org/reference/glossary.html?highlight=crate#crate>`__
+corresponding to Rust libraries and programs to compile. Such crates are typically shared
+on https://crates.io/ but this fetcher supports other crate registries too.
+
+The format for the :term:`SRC_URI` setting must be::
+
+ SRC_URI = "crate://REGISTRY/NAME/VERSION"
+
+Here is an example URL::
+
+ SRC_URI = "crate://crates.io/glob/0.2.11"
+
+.. _npm-fetcher:
+
+NPM Fetcher (``npm://``)
+------------------------
+
+This submodule fetches source code from an
+`NPM <https://en.wikipedia.org/wiki/Npm_(software)>`__
+Javascript package registry.
+
+The format for the :term:`SRC_URI` setting must be::
+
+ SRC_URI = "npm://some.registry.url;ParameterA=xxx;ParameterB=xxx;..."
+
+This fetcher supports the following parameters:
+
+- *"package":* The NPM package name. This is a mandatory parameter.
+
+- *"version":* The NPM package version. This is a mandatory parameter.
+
+- *"downloadfilename":* Specifies the filename used when storing the downloaded file.
+
+- *"destsuffix":* Specifies the directory to use to unpack the package (default: ``npm``).
+
+Note that NPM fetcher only fetches the package source itself. The dependencies
+can be fetched through the `npmsw-fetcher`_.
+
+Here is an example URL with both fetchers::
+
+ SRC_URI = " \
+ npm://registry.npmjs.org/;package=cute-files;version=${PV} \
+ npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json \
+ "
+
+See :yocto_docs:`Creating Node Package Manager (NPM) Packages
+</dev-manual/packages.html#creating-node-package-manager-npm-packages>`
+in the Yocto Project manual for details about using
+:yocto_docs:`devtool <https://docs.yoctoproject.org/ref-manual/devtool-reference.html>`
+to automatically create a recipe from an NPM URL.
+
+.. _npmsw-fetcher:
+
+NPM shrinkwrap Fetcher (``npmsw://``)
+-------------------------------------
+
+This submodule fetches source code from an
+`NPM shrinkwrap <https://docs.npmjs.com/cli/v8/commands/npm-shrinkwrap>`__
+description file, which lists the dependencies
+of an NPM package while locking their versions.
+
+The format for the :term:`SRC_URI` setting must be::
+
+ SRC_URI = "npmsw://some.registry.url;ParameterA=xxx;ParameterB=xxx;..."
+
+This fetcher supports the following parameters:
+
+- *"dev":* Set this parameter to ``1`` to install "devDependencies".
+
+- *"destsuffix":* Specifies the directory to use to unpack the dependencies
+ (``${S}`` by default).
+
+Note that the shrinkwrap file can also be provided by the recipe for
+the package which has such dependencies, for example::
+
+ SRC_URI = " \
+ npm://registry.npmjs.org/;package=cute-files;version=${PV} \
+ npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json \
+ "
+
+Such a file can automatically be generated using
+:yocto_docs:`devtool <https://docs.yoctoproject.org/ref-manual/devtool-reference.html>`
+as described in the :yocto_docs:`Creating Node Package Manager (NPM) Packages
+</dev-manual/packages.html#creating-node-package-manager-npm-packages>`
+section of the Yocto Project.
+
Other Fetchers
--------------
@@ -664,10 +832,10 @@ Fetch submodules also exist for the following:
- Mercurial (``hg://``)
-- npm (``npm://``)
-
- OSC (``osc://``)
+- S3 (``s3://``)
+
- Secure FTP (``sftp://``)
- Secure Shell (``ssh://``)
@@ -680,4 +848,4 @@ submodules. However, you might find the code helpful and readable.
Auto Revisions
==============
-We need to document ``AUTOREV`` and ``SRCREV_FORMAT`` here.
+We need to document ``AUTOREV`` and :term:`SRCREV_FORMAT` here.
diff --git a/doc/bitbake-user-manual/bitbake-user-manual-hello.rst b/doc/bitbake-user-manual/bitbake-user-manual-hello.rst
index a9c33709a..654196ca2 100644
--- a/doc/bitbake-user-manual/bitbake-user-manual-hello.rst
+++ b/doc/bitbake-user-manual/bitbake-user-manual-hello.rst
@@ -18,28 +18,32 @@ it.
Obtaining BitBake
=================
-See the :ref:`bitbake-user-manual/bitbake-user-manual-hello:obtaining bitbake` section for
+See the :ref:`bitbake-user-manual/bitbake-user-manual-intro:obtaining bitbake` section for
information on how to obtain BitBake. Once you have the source code on
your machine, the BitBake directory appears as follows::
$ ls -al
- total 100
- drwxrwxr-x. 9 wmat wmat 4096 Jan 31 13:44 .
- drwxrwxr-x. 3 wmat wmat 4096 Feb 4 10:45 ..
- -rw-rw-r--. 1 wmat wmat 365 Nov 26 04:55 AUTHORS
- drwxrwxr-x. 2 wmat wmat 4096 Nov 26 04:55 bin
- drwxrwxr-x. 4 wmat wmat 4096 Jan 31 13:44 build
- -rw-rw-r--. 1 wmat wmat 16501 Nov 26 04:55 ChangeLog
- drwxrwxr-x. 2 wmat wmat 4096 Nov 26 04:55 classes
- drwxrwxr-x. 2 wmat wmat 4096 Nov 26 04:55 conf
- drwxrwxr-x. 3 wmat wmat 4096 Nov 26 04:55 contrib
- -rw-rw-r--. 1 wmat wmat 17987 Nov 26 04:55 COPYING
- drwxrwxr-x. 3 wmat wmat 4096 Nov 26 04:55 doc
- -rw-rw-r--. 1 wmat wmat 69 Nov 26 04:55 .gitignore
- -rw-rw-r--. 1 wmat wmat 849 Nov 26 04:55 HEADER
- drwxrwxr-x. 5 wmat wmat 4096 Jan 31 13:44 lib
- -rw-rw-r--. 1 wmat wmat 195 Nov 26 04:55 MANIFEST.in
- -rw-rw-r--. 1 wmat wmat 2887 Nov 26 04:55 TODO
+ total 108
+ drwxr-xr-x 9 fawkh 10000 4096 feb 24 12:10 .
+ drwx------ 36 fawkh 10000 4096 mar 2 17:00 ..
+ -rw-r--r-- 1 fawkh 10000 365 feb 24 12:10 AUTHORS
+ drwxr-xr-x 2 fawkh 10000 4096 feb 24 12:10 bin
+ -rw-r--r-- 1 fawkh 10000 16501 feb 24 12:10 ChangeLog
+ drwxr-xr-x 2 fawkh 10000 4096 feb 24 12:10 classes
+ drwxr-xr-x 2 fawkh 10000 4096 feb 24 12:10 conf
+ drwxr-xr-x 5 fawkh 10000 4096 feb 24 12:10 contrib
+ drwxr-xr-x 6 fawkh 10000 4096 feb 24 12:10 doc
+ drwxr-xr-x 8 fawkh 10000 4096 mar 2 16:26 .git
+ -rw-r--r-- 1 fawkh 10000 31 feb 24 12:10 .gitattributes
+ -rw-r--r-- 1 fawkh 10000 392 feb 24 12:10 .gitignore
+ drwxr-xr-x 13 fawkh 10000 4096 feb 24 12:11 lib
+ -rw-r--r-- 1 fawkh 10000 1224 feb 24 12:10 LICENSE
+ -rw-r--r-- 1 fawkh 10000 15394 feb 24 12:10 LICENSE.GPL-2.0-only
+ -rw-r--r-- 1 fawkh 10000 1286 feb 24 12:10 LICENSE.MIT
+ -rw-r--r-- 1 fawkh 10000 229 feb 24 12:10 MANIFEST.in
+ -rw-r--r-- 1 fawkh 10000 2413 feb 24 12:10 README
+ -rw-r--r-- 1 fawkh 10000 43 feb 24 12:10 toaster-requirements.txt
+ -rw-r--r-- 1 fawkh 10000 2887 feb 24 12:10 TODO
At this point, you should have BitBake cloned to a directory that
matches the previous listing except for dates and user names.
@@ -52,7 +56,7 @@ directory to where your local BitBake files are and run the following
command::
$ ./bin/bitbake --version
- BitBake Build Tool Core version 1.23.0, bitbake version 1.23.0
+ BitBake Build Tool Core version 2.3.1
The console output tells you what version
you are running.
@@ -99,7 +103,7 @@ discussion mailing list about the BitBake build tool.
This example was inspired by and drew heavily from
`Mailing List post - The BitBake equivalent of "Hello, World!"
- <http://www.mail-archive.com/yocto@yoctoproject.org/msg09379.html>`_.
+ <https://www.mail-archive.com/yocto@yoctoproject.org/msg09379.html>`_.
As stated earlier, the goal of this example is to eventually compile
"Hello World". However, it is unknown what BitBake needs and what you
@@ -130,38 +134,23 @@ Following is the complete "Hello World" example.
directory. Run the ``bitbake`` command and see what it does::
$ bitbake
- The BBPATH variable is not set and bitbake did not
- find a conf/bblayers.conf file in the expected location.
+ ERROR: The BBPATH variable is not set and bitbake did not find a conf/bblayers.conf file in the expected location.
Maybe you accidentally invoked bitbake from the wrong directory?
- DEBUG: Removed the following variables from the environment:
- GNOME_DESKTOP_SESSION_ID, XDG_CURRENT_DESKTOP,
- GNOME_KEYRING_CONTROL, DISPLAY, SSH_AGENT_PID, LANG, no_proxy,
- XDG_SESSION_PATH, XAUTHORITY, SESSION_MANAGER, SHLVL,
- MANDATORY_PATH, COMPIZ_CONFIG_PROFILE, WINDOWID, EDITOR,
- GPG_AGENT_INFO, SSH_AUTH_SOCK, GDMSESSION, GNOME_KEYRING_PID,
- XDG_SEAT_PATH, XDG_CONFIG_DIRS, LESSOPEN, DBUS_SESSION_BUS_ADDRESS,
- _, XDG_SESSION_COOKIE, DESKTOP_SESSION, LESSCLOSE, DEFAULTS_PATH,
- UBUNTU_MENUPROXY, OLDPWD, XDG_DATA_DIRS, COLORTERM, LS_COLORS
-
- The majority of this output is specific to environment variables that
- are not directly relevant to BitBake. However, the very first
- message regarding the ``BBPATH`` variable and the
- ``conf/bblayers.conf`` file is relevant.
When you run BitBake, it begins looking for metadata files. The
:term:`BBPATH` variable is what tells BitBake where
- to look for those files. ``BBPATH`` is not set and you need to set
- it. Without ``BBPATH``, BitBake cannot find any configuration files
+ to look for those files. :term:`BBPATH` is not set and you need to set
+ it. Without :term:`BBPATH`, BitBake cannot find any configuration files
(``.conf``) or recipe files (``.bb``) at all. BitBake also cannot
find the ``bitbake.conf`` file.
-#. **Setting BBPATH:** For this example, you can set ``BBPATH`` in
+#. **Setting BBPATH:** For this example, you can set :term:`BBPATH` in
the same manner that you set ``PATH`` earlier in the appendix. You
should realize, though, that it is much more flexible to set the
- ``BBPATH`` variable up in a configuration file for each project.
+ :term:`BBPATH` variable up in a configuration file for each project.
From your shell, enter the following commands to set and export the
- ``BBPATH`` variable::
+ :term:`BBPATH` variable::
$ BBPATH="projectdirectory"
$ export BBPATH
@@ -175,24 +164,18 @@ Following is the complete "Hello World" example.
("~") character as BitBake does not expand that character as the
shell would.
-#. **Run BitBake:** Now that you have ``BBPATH`` defined, run the
+#. **Run BitBake:** Now that you have :term:`BBPATH` defined, run the
``bitbake`` command again::
$ bitbake
- ERROR: Traceback (most recent call last):
- File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 163, in wrapped
- return func(fn, *args)
- File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 173, in parse_config_file
- return bb.parse.handle(fn, data, include)
- File "/home/scott-lenovo/bitbake/lib/bb/parse/__init__.py", line 99, in handle
- return h['handle'](fn, data, include)
- File "/home/scott-lenovo/bitbake/lib/bb/parse/parse_py/ConfHandler.py", line 120, in handle
- abs_fn = resolve_file(fn, data)
- File "/home/scott-lenovo/bitbake/lib/bb/parse/__init__.py", line 117, in resolve_file
- raise IOError("file %s not found in %s" % (fn, bbpath))
- IOError: file conf/bitbake.conf not found in /home/scott-lenovo/hello
-
- ERROR: Unable to parse conf/bitbake.conf: file conf/bitbake.conf not found in /home/scott-lenovo/hello
+ ERROR: Unable to parse /home/scott-lenovo/bitbake/lib/bb/parse/__init__.py
+ Traceback (most recent call last):
+ File "/home/scott-lenovo/bitbake/lib/bb/parse/__init__.py", line 127, in resolve_file(fn='conf/bitbake.conf', d=<bb.data_smart.DataSmart object at 0x7f22919a3df0>):
+ if not newfn:
+ > raise IOError(errno.ENOENT, "file %s not found in %s" % (fn, bbpath))
+ fn = newfn
+ FileNotFoundError: [Errno 2] file conf/bitbake.conf not found in <projectdirectory>
+
This sample output shows that BitBake could not find the
``conf/bitbake.conf`` file in the project directory. This file is
@@ -205,7 +188,7 @@ Following is the complete "Hello World" example.
recipe files. For this example, you need to create the file in your
project directory and define some key BitBake variables. For more
information on the ``bitbake.conf`` file, see
- http://git.openembedded.org/bitbake/tree/conf/bitbake.conf.
+ https://git.openembedded.org/bitbake/tree/conf/bitbake.conf.
Use the following commands to create the ``conf`` directory in the
project directory::
@@ -216,7 +199,7 @@ Following is the complete "Hello World" example.
use some editor to create the ``bitbake.conf`` so that it contains
the following::
- PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
+ PN = "${@bb.parse.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
TMPDIR = "${TOPDIR}/tmp"
CACHE = "${TMPDIR}/cache"
@@ -226,12 +209,12 @@ Following is the complete "Hello World" example.
.. note::
- Without a value for PN , the variables STAMP , T , and B , prevent more
- than one recipe from working. You can fix this by either setting PN to
+ Without a value for :term:`PN`, the variables :term:`STAMP`, :term:`T`, and :term:`B`, prevent more
+ than one recipe from working. You can fix this by either setting :term:`PN` to
have a value similar to what OpenEmbedded and BitBake use in the default
- bitbake.conf file (see previous example). Or, by manually updating each
- recipe to set PN . You will also need to include PN as part of the STAMP
- , T , and B variable definitions in the local.conf file.
+ ``bitbake.conf`` file (see previous example). Or, by manually updating each
+ recipe to set :term:`PN`. You will also need to include :term:`PN` as part of the :term:`STAMP`,
+ :term:`T`, and :term:`B` variable definitions in the ``local.conf`` file.
The ``TMPDIR`` variable establishes a directory that BitBake uses
for build output and intermediate files other than the cached
@@ -254,18 +237,14 @@ Following is the complete "Hello World" example.
exists, you can run the ``bitbake`` command again::
$ bitbake
- ERROR: Traceback (most recent call last):
- File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 163, in wrapped
- return func(fn, *args)
- File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 177, in _inherit
- bb.parse.BBHandler.inherit(bbclass, "configuration INHERITs", 0, data)
- File "/home/scott-lenovo/bitbake/lib/bb/parse/parse_py/BBHandler.py", line 92, in inherit
- include(fn, file, lineno, d, "inherit")
- File "/home/scott-lenovo/bitbake/lib/bb/parse/parse_py/ConfHandler.py", line 100, in include
- raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), oldfn, lineno)
- ParseError: ParseError in configuration INHERITs: Could not inherit file classes/base.bbclass
-
- ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inherit file classes/base.bbclass
+ ERROR: Unable to parse /home/scott-lenovo/bitbake/lib/bb/parse/parse_py/BBHandler.py
+ Traceback (most recent call last):
+ File "/home/scott-lenovo/bitbake/lib/bb/parse/parse_py/BBHandler.py", line 67, in inherit(files=['base'], fn='configuration INHERITs', lineno=0, d=<bb.data_smart.DataSmart object at 0x7fab6815edf0>):
+ if not os.path.exists(file):
+ > raise ParseError("Could not inherit file %s" % (file), fn, lineno)
+
+ bb.parse.ParseError: ParseError in configuration INHERITs: Could not inherit file classes/base.bbclass
+
In the sample output,
BitBake could not find the ``classes/base.bbclass`` file. You need
@@ -284,7 +263,10 @@ Following is the complete "Hello World" example.
$ mkdir classes
Move to the ``classes`` directory and then create the
- ``base.bbclass`` file by inserting this single line: addtask build
+ ``base.bbclass`` file by inserting this single line::
+
+ addtask build
+
The minimal task that BitBake runs is the ``do_build`` task. This is
all the example needs in order to build the project. Of course, the
``base.bbclass`` can have much more depending on which build
@@ -328,10 +310,19 @@ Following is the complete "Hello World" example.
BBFILES += "${LAYERDIR}/*.bb"
BBFILE_COLLECTIONS += "mylayer"
BBFILE_PATTERN_mylayer := "^${LAYERDIR_RE}/"
+ LAYERSERIES_CORENAMES = "hello_world_example"
+ LAYERSERIES_COMPAT_mylayer = "hello_world_example"
For information on these variables, click on :term:`BBFILES`,
- :term:`LAYERDIR`, :term:`BBFILE_COLLECTIONS` or :term:`BBFILE_PATTERN_mylayer <BBFILE_PATTERN>`
- to go to the definitions in the glossary.
+ :term:`LAYERDIR`, :term:`BBFILE_COLLECTIONS`, :term:`BBFILE_PATTERN_mylayer <BBFILE_PATTERN>`
+ or :term:`LAYERSERIES_COMPAT` to go to the definitions in the glossary.
+
+ .. note::
+
+ We are setting both ``LAYERSERIES_CORENAMES`` and :term:`LAYERSERIES_COMPAT` in this particular case, because we
+ are using bitbake without OpenEmbedded.
+ You should usually just use :term:`LAYERSERIES_COMPAT` to specify the OE-Core versions for which your layer
+ is compatible, and add the meta-openembedded layer to your project.
You need to create the recipe file next. Inside your layer at the
top-level, use an editor and create a recipe file named
@@ -389,12 +380,14 @@ Following is the complete "Hello World" example.
target::
$ bitbake printhello
+ Loading cache: 100% |
+ Loaded 0 entries from dependency cache.
Parsing recipes: 100% |##################################################################################|
- Time: 00:00:00
Parsing of 1 .bb files complete (0 cached, 1 parsed). 1 targets, 0 skipped, 0 masked, 0 errors.
NOTE: Resolving any missing task queue dependencies
- NOTE: Preparing RunQueue
- NOTE: Executing RunQueue Tasks
+ Initialising tasks: 100% |###############################################################################|
+ NOTE: No setscene tasks
+ NOTE: Executing Tasks
********************
* *
* Hello, World! *
diff --git a/doc/bitbake-user-manual/bitbake-user-manual-intro.rst b/doc/bitbake-user-manual/bitbake-user-manual-intro.rst
index b3cea61ff..35ffb88b0 100644
--- a/doc/bitbake-user-manual/bitbake-user-manual-intro.rst
+++ b/doc/bitbake-user-manual/bitbake-user-manual-intro.rst
@@ -27,7 +27,7 @@ Linux software stacks using a task-oriented approach.
Conceptually, BitBake is similar to GNU Make in some regards but has
significant differences:
-- BitBake executes tasks according to provided metadata that builds up
+- BitBake executes tasks according to the provided metadata that builds up
the tasks. Metadata is stored in recipe (``.bb``) and related recipe
"append" (``.bbappend``) files, configuration (``.conf``) and
underlying include (``.inc``) files, and in class (``.bbclass``)
@@ -60,11 +60,10 @@ member Chris Larson split the project into two distinct pieces:
- OpenEmbedded, a metadata set utilized by BitBake
Today, BitBake is the primary basis of the
-`OpenEmbedded <http://www.openembedded.org/>`__ project, which is being
-used to build and maintain Linux distributions such as the `Angstrom
-Distribution <http://www.angstrom-distribution.org/>`__, and which is
-also being used as the build tool for Linux projects such as the `Yocto
-Project <http://www.yoctoproject.org>`__.
+`OpenEmbedded <https://www.openembedded.org/>`__ project, which is being
+used to build and maintain Linux distributions such as the `Poky
+Reference Distribution <https://www.yoctoproject.org/software-item/poky/>`__,
+developed under the umbrella of the `Yocto Project <https://www.yoctoproject.org>`__.
Prior to BitBake, no other build tool adequately met the needs of an
aspiring embedded Linux distribution. All of the build systems used by
@@ -319,7 +318,7 @@ You can obtain BitBake several different ways:
The following example downloads a snapshot of BitBake version 1.17.0::
- $ wget http://git.openembedded.org/bitbake/snapshot/bitbake-1.17.0.tar.gz
+ $ wget https://git.openembedded.org/bitbake/snapshot/bitbake-1.17.0.tar.gz
$ tar zxpvf bitbake-1.17.0.tar.gz
After extraction of the tarball using
@@ -417,8 +416,8 @@ Following is the usage and syntax for BitBake::
-l DEBUG_DOMAINS, --log-domains=DEBUG_DOMAINS
Show debug logging for the specified logging domains
-P, --profile Profile the command and save reports.
- -u UI, --ui=UI The user interface to use (knotty, ncurses or taskexp
- - default knotty).
+ -u UI, --ui=UI The user interface to use (knotty, ncurses, taskexp or
+ teamcity - default knotty).
--token=XMLRPCTOKEN Specify the connection token to be used when
connecting to a remote server.
--revisions-changed Set the exit code depending on whether upstream
@@ -433,6 +432,9 @@ Following is the usage and syntax for BitBake::
Environment variable BB_SERVER_TIMEOUT.
--no-setscene Do not run any setscene tasks. sstate will be ignored
and everything needed, built.
+ --skip-setscene Skip setscene tasks if they would be executed. Tasks
+ previously restored from sstate will be kept, unlike
+ --no-setscene
--setscene-only Only run setscene tasks, don't run any real tasks.
--remote-server=REMOTE_SERVER
Connect to the specified server.
@@ -534,10 +536,10 @@ current working directory:
- ``pn-buildlist``: Shows a simple list of targets that are to be
built.
-To stop depending on common depends, use the "-I" depend option and
+To stop depending on common depends, use the ``-I`` depend option and
BitBake omits them from the graph. Leaving this information out can
produce more readable graphs. This way, you can remove from the graph
-``DEPENDS`` from inherited classes such as ``base.bbclass``.
+:term:`DEPENDS` from inherited classes such as ``base.bbclass``.
Here are two examples that create dependency graphs. The second example
omits depends common in OpenEmbedded from the graph::
@@ -564,7 +566,7 @@ for two separate targets:
.. image:: figures/bb_multiconfig_files.png
:align: center
-The reason for this required file hierarchy is because the ``BBPATH``
+The reason for this required file hierarchy is because the :term:`BBPATH`
variable is not constructed until the layers are parsed. Consequently,
using the configuration file as a pre-configuration file is not possible
unless it is located in the current working directory.
diff --git a/doc/bitbake-user-manual/bitbake-user-manual-metadata.rst b/doc/bitbake-user-manual/bitbake-user-manual-metadata.rst
index 615c9f9ce..58975f4c8 100644
--- a/doc/bitbake-user-manual/bitbake-user-manual-metadata.rst
+++ b/doc/bitbake-user-manual/bitbake-user-manual-metadata.rst
@@ -91,9 +91,10 @@ level changes:
Variables that are exported to the environment are preceded by the
string "export" in the command's output.
-- For recipe changes, use the following::
+- To find changes to a given variable in a specific recipe, use the
+ following::
- $ bitbake recipe -e \| grep VARIABLE="
+ $ bitbake recipename -e | grep VARIABLENAME=\"
This command checks to see if the variable actually makes
it into a specific recipe.
@@ -103,15 +104,15 @@ Line Joining
Outside of :ref:`functions <bitbake-user-manual/bitbake-user-manual-metadata:functions>`,
BitBake joins any line ending in
-a backslash character ("\") with the following line before parsing
-statements. The most common use for the "\" character is to split
+a backslash character ("\\") with the following line before parsing
+statements. The most common use for the "\\" character is to split
variable assignments over multiple lines, as in the following example::
FOO = "bar \
baz \
qaz"
-Both the "\" character and the newline
+Both the "\\" character and the newline
character that follow it are removed when joining lines. Thus, no
newline characters end up in the value of ``FOO``.
@@ -124,7 +125,7 @@ Consider this additional example where the two assignments both assign
.. note::
- BitBake does not interpret escape sequences like "\n" in variable
+ BitBake does not interpret escape sequences like "\\n" in variable
values. For these to have an effect, the value must be passed to some
utility that interprets escape sequences, such as
``printf`` or ``echo -n``.
@@ -158,7 +159,7 @@ behavior::
C = "qux"
*At this point, ${A} equals "qux bar baz"*
B = "norf"
- *At this point, ${A} equals "norf baz"\*
+ *At this point, ${A} equals "norf baz"*
Contrast this behavior with the
:ref:`bitbake-user-manual/bitbake-user-manual-metadata:immediate variable
@@ -194,22 +195,45 @@ value. However, if ``A`` is not set, the variable is set to "aval".
Setting a weak default value (??=)
----------------------------------
-It is possible to use a "weaker" assignment than in the previous section
-by using the "??=" operator. This assignment behaves identical to "?="
-except that the assignment is made at the end of the parsing process
-rather than immediately. Consequently, when multiple "??=" assignments
-exist, the last one is used. Also, any "=" or "?=" assignment will
-override the value set with "??=". Here is an example::
+The weak default value of a variable is the value which that variable
+will expand to if no value has been assigned to it via any of the other
+assignment operators. The "??=" operator takes effect immediately, replacing
+any previously defined weak default value. Here is an example::
- A ??= "somevalue"
- A ??= "someothervalue"
+ W ??= "x"
+ A := "${W}" # Immediate variable expansion
+ W ??= "y"
+ B := "${W}" # Immediate variable expansion
+ W ??= "z"
+ C = "${W}"
+ W ?= "i"
-If ``A`` is set before the above statements are
-parsed, the variable retains its value. If ``A`` is not set, the
-variable is set to "someothervalue".
+After parsing we will have::
-Again, this assignment is a "lazy" or "weak" assignment because it does
-not occur until the end of the parsing process.
+ A = "x"
+ B = "y"
+ C = "i"
+ W = "i"
+
+Appending and prepending non-override style will not substitute the weak
+default value, which means that after parsing::
+
+ W ??= "x"
+ W += "y"
+
+we will have::
+
+ W = " y"
+
+On the other hand, override-style appends/prepends/removes are applied after
+any active weak default value has been substituted::
+
+ W ??= "x"
+ W:append = "y"
+
+After parsing we will have::
+
+ W = "xy"
Immediate variable expansion (:=)
---------------------------------
@@ -225,7 +249,7 @@ immediately, rather than when the variable is actually used::
C := "${C}append"
In this example, ``A`` contains "test 123", even though the final value
-of ``T`` is "456". The variable ``B`` will end up containing "456
+of :term:`T` is "456". The variable :term:`B` will end up containing "456
cvalappend". This is because references to undefined variables are
preserved as is during (immediate)expansion. This is in contrast to GNU
Make, where undefined variables expand to nothing. The variable ``C``
@@ -248,7 +272,7 @@ examples::
C = "cval"
C =+ "test"
-The variable ``B`` contains "bval additionaldata" and ``C`` contains "test
+The variable :term:`B` contains "bval additionaldata" and ``C`` contains "test
cval".
.. _appending-and-prepending-without-spaces:
@@ -267,7 +291,7 @@ examples::
C = "cval"
C =. "test"
-The variable ``B`` contains "bvaladditionaldata" and ``C`` contains
+The variable :term:`B` contains "bvaladditionaldata" and ``C`` contains
"testcval".
Appending and Prepending (Override Style Syntax)
@@ -281,13 +305,13 @@ operators in that their effects are applied at variable expansion time
rather than being immediately applied. Here are some examples::
B = "bval"
- B_append = " additional data"
+ B:append = " additional data"
C = "cval"
- C_prepend = "additional data "
+ C:prepend = "additional data "
D = "dval"
- D_append = "additional data"
+ D:append = "additional data"
-The variable ``B``
+The variable :term:`B`
becomes "bval additional data" and ``C`` becomes "additional data cval".
The variable ``D`` becomes "dvaladditional data".
@@ -295,6 +319,10 @@ The variable ``D`` becomes "dvaladditional data".
You must control all spacing when you use the override syntax.
+.. note::
+
+ The overrides are applied in this order, ":append", ":prepend", ":remove".
+
It is also possible to append and prepend to shell functions and
BitBake-style Python functions. See the ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:shell functions`" and ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:bitbake-style python functions`"
sections for examples.
@@ -306,16 +334,17 @@ Removal (Override Style Syntax)
You can remove values from lists using the removal override style
syntax. Specifying a value for removal causes all occurrences of that
-value to be removed from the variable.
+value to be removed from the variable. Unlike ":append" and ":prepend",
+there is no need to add a leading or trailing space to the value.
When you use this syntax, BitBake expects one or more strings.
Surrounding spaces and spacing are preserved. Here is an example::
FOO = "123 456 789 123456 123 456 123 456"
- FOO_remove = "123"
- FOO_remove = "456"
+ FOO:remove = "123"
+ FOO:remove = "456"
FOO2 = " abc def ghi abcdef abc def abc def def"
- FOO2_remove = "\
+ FOO2:remove = "\
def \
abc \
ghi \
@@ -324,14 +353,36 @@ Surrounding spaces and spacing are preserved. Here is an example::
The variable ``FOO`` becomes
" 789 123456 " and ``FOO2`` becomes " abcdef ".
-Like "_append" and "_prepend", "_remove" is applied at variable
+Like ":append" and ":prepend", ":remove" is applied at variable
expansion time.
+.. note::
+
+ The overrides are applied in this order, ":append", ":prepend", ":remove".
+ This implies it is not possible to re-append previously removed strings.
+ However, one can undo a ":remove" by using an intermediate variable whose
+ content is passed to the ":remove" so that modifying the intermediate
+ variable equals to keeping the string in::
+
+ FOOREMOVE = "123 456 789"
+ FOO:remove = "${FOOREMOVE}"
+ ...
+ FOOREMOVE = "123 789"
+
+ This expands to ``FOO:remove = "123 789"``.
+
+.. note::
+
+ Override application order may not match variable parse history, i.e.
+ the output of ``bitbake -e`` may contain ":remove" before ":append",
+ but the result will be removed string, because ":remove" is handled
+ last.
+
Override Style Operation Advantages
-----------------------------------
-An advantage of the override style operations "_append", "_prepend", and
-"_remove" as compared to the "+=" and "=+" operators is that the
+An advantage of the override style operations ":append", ":prepend", and
+":remove" as compared to the "+=" and "=+" operators is that the
override style operators provide guaranteed operations. For example,
consider a class ``foo.bbclass`` that needs to add the value "val" to
the variable ``FOO``, and a recipe that uses ``foo.bbclass`` as follows::
@@ -346,18 +397,18 @@ not what is desired::
FOO += "val"
If, on the other hand, ``foo.bbclass``
-uses the "_append" operator, then the final value of ``FOO`` will be
+uses the ":append" operator, then the final value of ``FOO`` will be
"initial val", as intended::
- FOO_append = " val"
+ FOO:append = " val"
.. note::
- It is never necessary to use "+=" together with "_append". The following
+ It is never necessary to use "+=" together with ":append". The following
sequence of assignments appends "barbaz" to FOO::
- FOO_append = "bar"
- FOO_append = "baz"
+ FOO:append = "bar"
+ FOO:append = "baz"
The only effect of changing the second assignment in the previous
@@ -378,8 +429,8 @@ You can find more out about variable flags in general in the
You can define, append, and prepend values to variable flags. All the
standard syntax operations previously mentioned work for variable flags
-except for override style syntax (i.e. "_prepend", "_append", and
-"_remove").
+except for override style syntax (i.e. ":prepend", ":append", and
+":remove").
Here are some examples showing how to set variable flags::
@@ -397,6 +448,12 @@ documentation to a BitBake variable as follows::
CACHE[doc] = "The directory holding the cache of the metadata."
+.. note::
+
+ Variable flag names starting with an underscore (``_``) character
+ are allowed but are ignored by ``d.getVarFlags("VAR")``
+ in Python code. Such flag names are used internally by BitBake.
+
Inline Python Variable Expansion
--------------------------------
@@ -412,8 +469,8 @@ variables from BitBake's internal data dictionary, ``d``. The following
lines select the values of a package name and its version number,
respectively::
- PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
- PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
+ PN = "${@bb.parse.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
+ PV = "${@bb.parse.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
.. note::
@@ -496,40 +553,40 @@ Conditional Syntax (Overrides)
BitBake uses :term:`OVERRIDES` to control what
variables are overridden after BitBake parses recipes and configuration
-files. This section describes how you can use ``OVERRIDES`` as
+files. This section describes how you can use :term:`OVERRIDES` as
conditional metadata, talks about key expansion in relationship to
-``OVERRIDES``, and provides some examples to help with understanding.
+:term:`OVERRIDES`, and provides some examples to help with understanding.
Conditional Metadata
--------------------
-You can use ``OVERRIDES`` to conditionally select a specific version of
+You can use :term:`OVERRIDES` to conditionally select a specific version of
a variable and to conditionally append or prepend the value of a
variable.
.. note::
- Overrides can only use lower-case characters. Additionally,
- underscores are not permitted in override names as they are used to
+ Overrides can only use lower-case characters, digits and dashes.
+ In particular, colons are not permitted in override names as they are used to
separate overrides from each other and from the variable name.
-- *Selecting a Variable:* The ``OVERRIDES`` variable is a
+- *Selecting a Variable:* The :term:`OVERRIDES` variable is a
colon-character-separated list that contains items for which you want
to satisfy conditions. Thus, if you have a variable that is
- conditional on "arm", and "arm" is in ``OVERRIDES``, then the
+ conditional on "arm", and "arm" is in :term:`OVERRIDES`, then the
"arm"-specific version of the variable is used rather than the
non-conditional version. Here is an example::
OVERRIDES = "architecture:os:machine"
TEST = "default"
- TEST_os = "osspecific"
- TEST_nooverride = "othercondvalue"
+ TEST:os = "osspecific"
+ TEST:nooverride = "othercondvalue"
- In this example, the ``OVERRIDES``
+ In this example, the :term:`OVERRIDES`
variable lists three overrides: "architecture", "os", and "machine".
The variable ``TEST`` by itself has a default value of "default". You
select the os-specific version of the ``TEST`` variable by appending
- the "os" override to the variable (i.e. ``TEST_os``).
+ the "os" override to the variable (i.e. ``TEST:os``).
To better understand this, consider a practical example that assumes
an OpenEmbedded metadata-based Linux kernel recipe file. The
@@ -538,36 +595,36 @@ variable.
that value based on the architecture of the build::
KBRANCH = "standard/base"
- KBRANCH_qemuarm = "standard/arm-versatile-926ejs"
- KBRANCH_qemumips = "standard/mti-malta32"
- KBRANCH_qemuppc = "standard/qemuppc"
- KBRANCH_qemux86 = "standard/common-pc/base"
- KBRANCH_qemux86-64 = "standard/common-pc-64/base"
- KBRANCH_qemumips64 = "standard/mti-malta64"
+ KBRANCH:qemuarm = "standard/arm-versatile-926ejs"
+ KBRANCH:qemumips = "standard/mti-malta32"
+ KBRANCH:qemuppc = "standard/qemuppc"
+ KBRANCH:qemux86 = "standard/common-pc/base"
+ KBRANCH:qemux86-64 = "standard/common-pc-64/base"
+ KBRANCH:qemumips64 = "standard/mti-malta64"
- *Appending and Prepending:* BitBake also supports append and prepend
operations to variable values based on whether a specific item is
- listed in ``OVERRIDES``. Here is an example::
+ listed in :term:`OVERRIDES`. Here is an example::
DEPENDS = "glibc ncurses"
OVERRIDES = "machine:local"
- DEPENDS_append_machine = "libmad"
+ DEPENDS:append:machine = "libmad"
- In this example, ``DEPENDS`` becomes "glibc ncurses libmad".
+ In this example, :term:`DEPENDS` becomes "glibc ncurses libmad".
Again, using an OpenEmbedded metadata-based kernel recipe file as an
example, the following lines will conditionally append to the
``KERNEL_FEATURES`` variable based on the architecture::
- KERNEL_FEATURES_append = " ${KERNEL_EXTRA_FEATURES}"
- KERNEL_FEATURES_append_qemux86=" cfg/sound.scc cfg/paravirt_kvm.scc"
- KERNEL_FEATURES_append_qemux86-64=" cfg/sound.scc cfg/paravirt_kvm.scc"
+ KERNEL_FEATURES:append = " ${KERNEL_EXTRA_FEATURES}"
+ KERNEL_FEATURES:append:qemux86=" cfg/sound.scc cfg/paravirt_kvm.scc"
+ KERNEL_FEATURES:append:qemux86-64=" cfg/sound.scc cfg/paravirt_kvm.scc"
- *Setting a Variable for a Single Task:* BitBake supports setting a
variable just for the duration of a single task. Here is an example::
- FOO_task-configure = "val 1"
- FOO_task-compile = "val 2"
+ FOO:task-configure = "val 1"
+ FOO:task-compile = "val 2"
In the
previous example, ``FOO`` has the value "val 1" while the
@@ -580,9 +637,19 @@ variable.
``do_compile`` task.
You can also use this syntax with other combinations (e.g.
- "``_prepend``") as shown in the following example::
+ "``:prepend``") as shown in the following example::
+
+ EXTRA_OEMAKE:prepend:task-compile = "${PARALLEL_MAKE} "
+
+.. note::
+
+ Before BitBake 1.52 (Honister 3.4), the syntax for :term:`OVERRIDES`
+ used ``_`` instead of ``:``, so you will still find a lot of documentation
+ using ``_append``, ``_prepend``, and ``_remove``, for example.
- EXTRA_OEMAKE_prepend_task-compile = "${PARALLEL_MAKE} "
+ For details, see the
+ :yocto_docs:`Overrides Syntax Changes </migration-guides/migration-3.4.html#override-syntax-changes>`
+ section in the Yocto Project manual migration notes.
Key Expansion
-------------
@@ -612,33 +679,33 @@ users.
There is often confusion concerning the order in which overrides and
various "append" operators take effect. Recall that an append or prepend
-operation using "_append" and "_prepend" does not result in an immediate
+operation using ":append" and ":prepend" does not result in an immediate
assignment as would "+=", ".=", "=+", or "=.". Consider the following
example::
OVERRIDES = "foo"
A = "Z"
- A_foo_append = "X"
+ A:foo:append = "X"
For this case,
``A`` is unconditionally set to "Z" and "X" is unconditionally and
-immediately appended to the variable ``A_foo``. Because overrides have
-not been applied yet, ``A_foo`` is set to "X" due to the append and
+immediately appended to the variable ``A:foo``. Because overrides have
+not been applied yet, ``A:foo`` is set to "X" due to the append and
``A`` simply equals "Z".
Applying overrides, however, changes things. Since "foo" is listed in
-``OVERRIDES``, the conditional variable ``A`` is replaced with the "foo"
-version, which is equal to "X". So effectively, ``A_foo`` replaces
+:term:`OVERRIDES`, the conditional variable ``A`` is replaced with the "foo"
+version, which is equal to "X". So effectively, ``A:foo`` replaces
``A``.
This next example changes the order of the override and the append::
OVERRIDES = "foo"
A = "Z"
- A_append_foo = "X"
+ A:append:foo = "X"
For this case, before
-overrides are handled, ``A`` is set to "Z" and ``A_append_foo`` is set
+overrides are handled, ``A`` is set to "Z" and ``A:append:foo`` is set
to "X". Once the override for "foo" is applied, however, ``A`` gets
appended with "X". Consequently, ``A`` becomes "ZX". Notice that spaces
are not appended.
@@ -648,21 +715,21 @@ back as in the first example::
OVERRIDES = "foo"
A = "Y"
- A_foo_append = "Z"
- A_foo_append = "X"
+ A:foo:append = "Z"
+ A:foo:append = "X"
For this case, before any overrides are resolved,
``A`` is set to "Y" using an immediate assignment. After this immediate
-assignment, ``A_foo`` is set to "Z", and then further appended with "X"
+assignment, ``A:foo`` is set to "Z", and then further appended with "X"
leaving the variable set to "ZX". Finally, applying the override for
"foo" results in the conditional variable ``A`` becoming "ZX" (i.e.
-``A`` is replaced with ``A_foo``).
+``A`` is replaced with ``A:foo``).
This final example mixes in some varying operators::
A = "1"
- A_append = "2"
- A_append = "3"
+ A:append = "2"
+ A:append = "3"
A += "4"
A .= "5"
@@ -670,7 +737,7 @@ For this case, the type of append
operators are affecting the order of assignments as BitBake passes
through the code multiple times. Initially, ``A`` is set to "1 45"
because of the three statements that use immediate operators. After
-these assignments are made, BitBake applies the "_append" operations.
+these assignments are made, BitBake applies the ":append" operations.
Those operations result in ``A`` becoming "1 4523".
Sharing Functionality
@@ -686,7 +753,7 @@ share the task.
This section presents the mechanisms BitBake provides to allow you to
share functionality between recipes. Specifically, the mechanisms
-include ``include``, ``inherit``, ``INHERIT``, and ``require``
+include ``include``, ``inherit``, :term:`INHERIT`, and ``require``
directives.
Locating Include and Class Files
@@ -702,7 +769,7 @@ current directory for ``include`` and ``require`` directives.
In order for include and class files to be found by BitBake, they need
to be located in a "classes" subdirectory that can be found in
-``BBPATH``.
+:term:`BBPATH`.
``inherit`` Directive
---------------------
@@ -725,7 +792,7 @@ functionality for using Autotools that could be shared across recipes::
inherit autotools
In this case, BitBake would search for the directory
-``classes/autotools.bbclass`` in ``BBPATH``.
+``classes/autotools.bbclass`` in :term:`BBPATH`.
.. note::
@@ -752,7 +819,7 @@ parsed. One way to achieve a conditional inherit in this case is to use
overrides::
VARIABLE = ""
- VARIABLE_someoverride = "myclass"
+ VARIABLE:someoverride = "myclass"
Another method is by using anonymous Python. Here is an example::
@@ -780,7 +847,7 @@ BitBake understands the ``include`` directive. This directive causes
BitBake to parse whatever file you specify, and to insert that file at
that location. The directive is much like its equivalent in Make except
that if the path specified on the include line is a relative path,
-BitBake locates the first file it can find within ``BBPATH``.
+BitBake locates the first file it can find within :term:`BBPATH`.
The include directive is a more generic method of including
functionality as compared to the :ref:`inherit <bitbake-user-manual/bitbake-user-manual-metadata:\`\`inherit\`\` directive>`
@@ -822,7 +889,7 @@ does not suit a ``.bbclass`` file.
Similar to how BitBake handles :ref:`include <bitbake-user-manual/bitbake-user-manual-metadata:\`\`include\`\` directive>`, if
the path specified on the require line is a relative path, BitBake
-locates the first file it can find within ``BBPATH``.
+locates the first file it can find within :term:`BBPATH`.
As an example, suppose you have two versions of a recipe (e.g.
``foo_1.2.2.bb`` and ``foo_2.0.0.bb``) where each version contains some
@@ -851,7 +918,7 @@ As an example, suppose you needed to inherit a class file called
This configuration directive causes the named class to be inherited at
the point of the directive during parsing. As with the ``inherit``
directive, the ``.bbclass`` file must be located in a "classes"
-subdirectory in one of the directories specified in ``BBPATH``.
+subdirectory in one of the directories specified in :term:`BBPATH`.
.. note::
@@ -893,7 +960,7 @@ Regardless of the type of function, you can only define them in class
Shell Functions
---------------
-Functions written in shell script and executed either directly as
+Functions written in shell script are executed either directly as
functions, tasks, or both. They can also be called by other shell
functions. Here is an example shell function definition::
@@ -907,7 +974,7 @@ rules. The scripts are executed by ``/bin/sh``, which may not be a bash
shell but might be something such as ``dash``. You should not use
Bash-specific script (bashisms).
-Overrides and override-style operators like ``_append`` and ``_prepend``
+Overrides and override-style operators like ``:append`` and ``:prepend``
can also be applied to shell functions. Most commonly, this application
would be used in a ``.bbappend`` file to modify functions in the main
recipe. It can also be used to modify functions inherited from classes.
@@ -919,7 +986,7 @@ As an example, consider the following::
fn
}
- fn_prepend() {
+ fn:prepend() {
bbplain second
}
@@ -927,7 +994,7 @@ As an example, consider the following::
bbplain third
}
- do_foo_append() {
+ do_foo:append() {
bbplain fourth
}
@@ -943,7 +1010,7 @@ Running ``do_foo`` prints the following::
Overrides and override-style operators can be applied to any shell
function, not just :ref:`tasks <bitbake-user-manual/bitbake-user-manual-metadata:tasks>`.
-You can use the ``bitbake -e`` recipename command to view the final
+You can use the ``bitbake -e recipename`` command to view the final
assembled function after all overrides have been applied.
BitBake-Style Python Functions
@@ -977,7 +1044,7 @@ override-style operators to BitBake-style Python functions.
As an example, consider the following::
- python do_foo_prepend() {
+ python do_foo:prepend() {
bb.plain("first")
}
@@ -985,7 +1052,7 @@ As an example, consider the following::
bb.plain("second")
}
- python do_foo_append() {
+ python do_foo:append() {
bb.plain("third")
}
@@ -995,7 +1062,7 @@ Running ``do_foo`` prints the following::
recipename do_foo: second
recipename do_foo: third
-You can use the ``bitbake -e`` recipename command to view
+You can use the ``bitbake -e recipename`` command to view
the final assembled function after all overrides have been applied.
Python Functions
@@ -1015,7 +1082,7 @@ is an example::
SOMECONDITION = "1"
DEPENDS = "${@get_depends(d)}"
-This would result in ``DEPENDS`` containing ``dependencywithcond``.
+This would result in :term:`DEPENDS` containing ``dependencywithcond``.
Here are some things to know about Python functions:
@@ -1134,12 +1201,12 @@ equivalent to the following snippet::
values set for the variables within the anonymous functions become
available to tasks, which always run after parsing.
-Overrides and override-style operators such as "``_append``" are applied
+Overrides and override-style operators such as "``:append``" are applied
before anonymous functions run. In the following example, ``FOO`` ends
up with the value "foo from anonymous"::
FOO = "foo"
- FOO_append = " from outside"
+ FOO:append = " from outside"
python () {
d.setVar("FOO", "foo from anonymous")
@@ -1164,7 +1231,7 @@ To understand the benefits of this feature, consider the basic scenario
where a class defines a task function and your recipe inherits the
class. In this basic scenario, your recipe inherits the task function as
defined in the class. If desired, your recipe can add to the start and
-end of the function by using the "_prepend" or "_append" operations
+end of the function by using the ":prepend" or ":append" operations
respectively, or it can redefine the function completely. However, if it
redefines the function, there is no means for it to call the class
version of the function. ``EXPORT_FUNCTIONS`` provides a mechanism that
@@ -1342,8 +1409,8 @@ the build machine cannot influence the build.
.. note::
By default, BitBake cleans the environment to include only those
- things exported or listed in its whitelist to ensure that the build
- environment is reproducible and consistent. You can prevent this
+ things exported or listed in its passthrough list to ensure that the
+ build environment is reproducible and consistent. You can prevent this
"cleaning" by setting the :term:`BB_PRESERVE_ENV` variable.
Consequently, if you do want something to get passed into the build task
@@ -1351,14 +1418,14 @@ environment, you must take these two steps:
#. Tell BitBake to load what you want from the environment into the
datastore. You can do so through the
- :term:`BB_ENV_WHITELIST` and
- :term:`BB_ENV_EXTRAWHITE` variables. For
+ :term:`BB_ENV_PASSTHROUGH` and
+ :term:`BB_ENV_PASSTHROUGH_ADDITIONS` variables. For
example, assume you want to prevent the build system from accessing
- your ``$HOME/.ccache`` directory. The following command "whitelists"
- the environment variable ``CCACHE_DIR`` causing BitBake to allow that
- variable into the datastore::
+ your ``$HOME/.ccache`` directory. The following command adds the
+ the environment variable ``CCACHE_DIR`` to BitBake's passthrough
+ list to allow that variable into the datastore::
- export BB_ENV_EXTRAWHITE="$BB_ENV_EXTRAWHITE CCACHE_DIR"
+ export BB_ENV_PASSTHROUGH_ADDITIONS="$BB_ENV_PASSTHROUGH_ADDITIONS CCACHE_DIR"
#. Tell BitBake to export what you have loaded into the datastore to the
task environment of every running task. Loading something from the
@@ -1375,14 +1442,14 @@ environment, you must take these two steps:
A side effect of the previous steps is that BitBake records the
variable as a dependency of the build process in things like the
setscene checksums. If doing so results in unnecessary rebuilds of
- tasks, you can whitelist the variable so that the setscene code
+ tasks, you can also flag the variable so that the setscene code
ignores the dependency when it creates checksums.
Sometimes, it is useful to be able to obtain information from the
original execution environment. BitBake saves a copy of the original
environment into a special variable named :term:`BB_ORIGENV`.
-The ``BB_ORIGENV`` variable returns a datastore object that can be
+The :term:`BB_ORIGENV` variable returns a datastore object that can be
queried using the standard datastore operators such as
``getVar(, False)``. The datastore object is useful, for example, to
find the original ``DISPLAY`` variable. Here is an example::
@@ -1429,12 +1496,35 @@ functionality of the task:
directory listed is used as the current working directory for the
task.
+- ``[file-checksums]``: Controls the file dependencies for a task. The
+ baseline file list is the set of files associated with
+ :term:`SRC_URI`. May be used to set additional dependencies on
+ files not associated with :term:`SRC_URI`.
+
+ The value set to the list is a file-boolean pair where the first
+ value is the file name and the second is whether or not it
+ physically exists on the filesystem. ::
+
+ do_configure[file-checksums] += "${MY_DIRPATH}/my-file.txt:True"
+
+ It is important to record any paths which the task looked at and
+ which didn't exist. This means that if these do exist at a later
+ time, the task can be rerun with the new additional files. The
+ "exists" True or False value after the path allows this to be
+ handled.
+
- ``[lockfiles]``: Specifies one or more lockfiles to lock while the
task executes. Only one task may hold a lockfile, and any task that
attempts to lock an already locked file will block until the lock is
released. You can use this variable flag to accomplish mutual
exclusion.
+- ``[network]``: When set to "1", allows a task to access the network. By
+ default, only the ``do_fetch`` task is granted network access. Recipes
+ shouldn't access the network outside of ``do_fetch`` as it usually
+ undermines fetcher source mirroring, image and licence manifests, software
+ auditing and supply chain security.
+
- ``[noexec]``: When set to "1", marks the task as being empty, with
no execution required. You can use the ``[noexec]`` flag to set up
tasks as dependency placeholders, or to disable tasks defined
@@ -1467,7 +1557,7 @@ functionality of the task:
can result in unpredictable behavior.
- Setting the varflag to a value greater than the value used in
- the ``BB_NUMBER_THREADS`` variable causes ``number_threads`` to
+ the :term:`BB_NUMBER_THREADS` variable causes ``number_threads`` to
have no effect.
- ``[postfuncs]``: List of functions to call after the completion of
@@ -1537,7 +1627,7 @@ intent is to make it easy to do things like email notification on build
failures.
Following is an example event handler that prints the name of the event
-and the content of the ``FILE`` variable::
+and the content of the :term:`FILE` variable::
addhandler myclass_eventhandler
python myclass_eventhandler() {
@@ -1576,7 +1666,7 @@ might have an interest in viewing:
- ``bb.event.ConfigParsed()``: Fired when the base configuration; which
consists of ``bitbake.conf``, ``base.bbclass`` and any global
- ``INHERIT`` statements; has been parsed. You can see multiple such
+ :term:`INHERIT` statements; has been parsed. You can see multiple such
events when each of the workers parse the base configuration or if
the server changes configuration and reparses. Any given datastore
only has one such event executed against it, however. If
@@ -1647,13 +1737,18 @@ user interfaces:
.. _variants-class-extension-mechanism:
-Variants - Class Extension Mechanism
-====================================
+Variants --- Class Extension Mechanism
+======================================
+
+BitBake supports multiple incarnations of a recipe file via the
+:term:`BBCLASSEXTEND` variable.
-BitBake supports two features that facilitate creating from a single
-recipe file multiple incarnations of that recipe file where all
-incarnations are buildable. These features are enabled through the
-:term:`BBCLASSEXTEND` and :term:`BBVERSIONS` variables.
+The :term:`BBCLASSEXTEND` variable is a space separated list of classes used
+to "extend" the recipe for each variant. Here is an example that results in a
+second incarnation of the current recipe being available. This second
+incarnation will have the "native" class inherited. ::
+
+ BBCLASSEXTEND = "native"
.. note::
@@ -1663,34 +1758,6 @@ incarnations are buildable. These features are enabled through the
class. For specific examples, see the OE-Core native , nativesdk , and
multilib classes.
-- ``BBCLASSEXTEND``: This variable is a space separated list of
- classes used to "extend" the recipe for each variant. Here is an
- example that results in a second incarnation of the current recipe
- being available. This second incarnation will have the "native" class
- inherited. ::
-
- BBCLASSEXTEND = "native"
-
-- ``BBVERSIONS``: This variable allows a single recipe to build
- multiple versions of a project from a single recipe file. You can
- also specify conditional metadata (using the
- :term:`OVERRIDES` mechanism) for a single
- version, or an optionally named range of versions. Here is an
- example::
-
- BBVERSIONS = "1.0 2.0 git"
- SRC_URI_git = "git://someurl/somepath.git"
-
- BBVERSIONS = "1.0.[0-6]:1.0.0+ 1.0.[7-9]:1.0.7+"
- SRC_URI_append_1.0.7+ = "file://some_patch_which_the_new_versions_need.patch;patch=1"
-
- The name of the range defaults to the original version of the recipe. For
- example, in OpenEmbedded, the recipe file ``foo_1.0.0+.bb`` creates a default
- name range of ``1.0.0+``. This is useful because the range name is not only
- placed into overrides, but it is also made available for the metadata to use
- in the variable that defines the base recipe versions for use in ``file://``
- search paths (:term:`FILESPATH`).
-
Dependencies
============
@@ -1756,13 +1823,13 @@ Build Dependencies
BitBake uses the :term:`DEPENDS` variable to manage
build time dependencies. The ``[deptask]`` varflag for tasks signifies
-the task of each item listed in ``DEPENDS`` that must complete before
+the task of each item listed in :term:`DEPENDS` that must complete before
that task can be executed. Here is an example::
do_configure[deptask] = "do_populate_sysroot"
In this example, the ``do_populate_sysroot`` task
-of each item in ``DEPENDS`` must complete before ``do_configure`` can
+of each item in :term:`DEPENDS` must complete before ``do_configure`` can
execute.
Runtime Dependencies
@@ -1771,8 +1838,8 @@ Runtime Dependencies
BitBake uses the :term:`PACKAGES`, :term:`RDEPENDS`, and :term:`RRECOMMENDS`
variables to manage runtime dependencies.
-The ``PACKAGES`` variable lists runtime packages. Each of those packages
-can have ``RDEPENDS`` and ``RRECOMMENDS`` runtime dependencies. The
+The :term:`PACKAGES` variable lists runtime packages. Each of those packages
+can have :term:`RDEPENDS` and :term:`RRECOMMENDS` runtime dependencies. The
``[rdeptask]`` flag for tasks is used to signify the task of each item
runtime dependency which must have completed before that task can be
executed. ::
@@ -1780,9 +1847,9 @@ executed. ::
do_package_qa[rdeptask] = "do_packagedata"
In the previous
-example, the ``do_packagedata`` task of each item in ``RDEPENDS`` must
+example, the ``do_packagedata`` task of each item in :term:`RDEPENDS` must
have completed before ``do_package_qa`` can execute.
-Although ``RDEPENDS`` contains entries from the
+Although :term:`RDEPENDS` contains entries from the
runtime dependency namespace, BitBake knows how to map them back
to the build-time dependency namespace, in which the tasks are defined.
@@ -1825,7 +1892,7 @@ Inter-Task Dependencies
BitBake uses the ``[depends]`` flag in a more generic form to manage
inter-task dependencies. This more generic form allows for
inter-dependency checks for specific tasks rather than checks for the
-data in ``DEPENDS``. Here is an example::
+data in :term:`DEPENDS`. Here is an example::
do_patch[depends] = "quilt-native:do_populate_sysroot"
@@ -1911,6 +1978,33 @@ looking at the source code of the ``bb`` module, which is in
the commonly used functions ``bb.utils.contains()`` and
``bb.utils.mkdirhier()``, which come with docstrings.
+Extending Python Library Code
+-----------------------------
+
+If you wish to add your own Python library code (e.g. to provide
+functions/classes you can use from Python functions in the metadata)
+you can do so from any layer using the ``addpylib`` directive.
+This directive is typically added to your layer configuration (
+``conf/layer.conf``) although it will be handled in any ``.conf`` file.
+
+Usage is of the form::
+
+ addpylib <directory> <namespace>
+
+Where <directory> specifies the directory to add to the library path.
+The specified <namespace> is imported automatically, and if the imported
+module specifies an attribute named ``BBIMPORTS``, that list of
+sub-modules is iterated and imported too.
+
+Testing and Debugging BitBake Python code
+-----------------------------------------
+
+The OpenEmbedded build system implements a convenient ``pydevshell`` target which
+you can use to access the BitBake datastore and experiment with your own Python
+code. See :yocto_docs:`Using a Python Development Shell
+</dev-manual/python-development-shell.html#using-a-python-development-shell>` in the Yocto
+Project manual for details.
+
Task Checksums and Setscene
===========================
@@ -1943,16 +2037,6 @@ The following list describes related variables:
Specifies a function BitBake calls that determines whether BitBake
requires a setscene dependency to be met.
-- :term:`BB_SETSCENE_VERIFY_FUNCTION2`:
- Specifies a function to call that verifies the list of planned task
- execution before the main task execution happens.
-
-- :term:`BB_STAMP_POLICY`: Defines the mode
- for comparing timestamps of stamp files.
-
-- :term:`BB_STAMP_WHITELIST`: Lists stamp
- files that are looked at when the stamp policy is "whitelist".
-
- :term:`BB_TASKHASH`: Within an executing task,
this variable holds the hash of the task as returned by the currently
enabled signature generator.
@@ -1967,7 +2051,7 @@ Wildcard Support in Variables
=============================
Support for wildcard use in variables varies depending on the context in
-which it is used. For example, some variables and file names allow
+which it is used. For example, some variables and filenames allow
limited use of wildcards through the "``%``" and "``*``" characters.
Other variables or names support Python's
`glob <https://docs.python.org/3/library/glob.html>`_ syntax,
diff --git a/doc/bitbake-user-manual/bitbake-user-manual-ref-variables-context.rst b/doc/bitbake-user-manual/bitbake-user-manual-ref-variables-context.rst
new file mode 100644
index 000000000..e9c454ba1
--- /dev/null
+++ b/doc/bitbake-user-manual/bitbake-user-manual-ref-variables-context.rst
@@ -0,0 +1,91 @@
+.. SPDX-License-Identifier: CC-BY-2.5
+
+================
+Variable Context
+================
+
+|
+
+Variables might only have an impact or can be used in certain contexts. Some
+should only be used in global files like ``.conf``, while others are intended only
+for local files like ``.bb``. This chapter aims to describe some important variable
+contexts.
+
+.. _ref-varcontext-configuration:
+
+BitBake's own configuration
+===========================
+
+Variables starting with ``BB_`` usually configure the behaviour of BitBake itself.
+For example, one could configure:
+
+- System resources, like disk space to be used (:term:`BB_DISKMON_DIRS`),
+ or the number of tasks to be run in parallel by BitBake (:term:`BB_NUMBER_THREADS`).
+
+- How the fetchers shall behave, e.g., :term:`BB_FETCH_PREMIRRORONLY` is used
+ by BitBake to determine if BitBake's fetcher shall search only
+ :term:`PREMIRRORS` for files.
+
+Those variables are usually configured globally.
+
+BitBake configuration
+=====================
+
+There are variables:
+
+- Like :term:`B` or :term:`T`, that are used to specify directories used by
+ BitBake during the build of a particular recipe. Those variables are
+ specified in ``bitbake.conf``. Some, like :term:`B`, are quite often
+ overwritten in recipes.
+
+- Starting with ``FAKEROOT``, to configure how the ``fakeroot`` command is
+ handled. Those are usually set by ``bitbake.conf`` and might get adapted in a
+ ``bbclass``.
+
+- Detailing where BitBake will store and fetch information from, for
+ data reuse between build runs like :term:`CACHE`, :term:`DL_DIR` or
+ :term:`PERSISTENT_DIR`. Those are usually global.
+
+
+Layers and files
+================
+
+Variables starting with ``LAYER`` configure how BitBake handles layers.
+Additionally, variables starting with ``BB`` configure how layers and files are
+handled. For example:
+
+- :term:`LAYERDEPENDS` is used to configure on which layers a given layer
+ depends.
+
+- The configured layers are contained in :term:`BBLAYERS` and files in
+ :term:`BBFILES`.
+
+Those variables are often used in the files ``layer.conf`` and ``bblayers.conf``.
+
+Recipes and packages
+====================
+
+Variables handling recipes and packages can be split into:
+
+- :term:`PN`, :term:`PV` or :term:`PF` for example, contain information about
+ the name or revision of a recipe or package. Usually, the default set in
+ ``bitbake.conf`` is used, but those are from time to time overwritten in
+ recipes.
+
+- :term:`SUMMARY`, :term:`DESCRIPTION`, :term:`LICENSE` or :term:`HOMEPAGE`
+ contain the expected information and should be set specifically for every
+ recipe.
+
+- In recipes, variables are also used to control build and runtime
+ dependencies between recipes/packages with other recipes/packages. The
+ most common should be: :term:`PROVIDES`, :term:`RPROVIDES`, :term:`DEPENDS`,
+ and :term:`RDEPENDS`.
+
+- There are further variables starting with ``SRC`` that specify the sources in
+ a recipe like :term:`SRC_URI` or :term:`SRCDATE`. Those are also usually set
+ in recipes.
+
+- Which version or provider of a recipe should be given preference when
+ multiple recipes would provide the same item, is controlled by variables
+ starting with ``PREFERRED_``. Those are normally set in the configuration
+ files of a ``MACHINE`` or ``DISTRO``.
diff --git a/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst b/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst
index e1b640e2f..899e584f9 100644
--- a/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst
+++ b/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.rst
@@ -23,18 +23,15 @@ overview of their function and contents.
systems extend the functionality of the variable as it is
described here in this glossary.
- - Finally, there are variables mentioned in this glossary that do
- not appear in the BitBake glossary. These other variables are
- variables used in systems that use BitBake.
-
.. glossary::
+ :sorted:
:term:`ASSUME_PROVIDED`
Lists recipe names (:term:`PN` values) BitBake does not
attempt to build. Instead, BitBake assumes these recipes have already
been built.
- In OpenEmbedded-Core, ``ASSUME_PROVIDED`` mostly specifies native
+ In OpenEmbedded-Core, :term:`ASSUME_PROVIDED` mostly specifies native
tools that should not be built. An example is ``git-native``, which
when specified allows for the Git binary from the host to be used
rather than building ``git-native``.
@@ -43,8 +40,7 @@ overview of their function and contents.
Azure Storage Shared Access Signature, when using the
:ref:`Azure Storage fetcher <bitbake-user-manual/bitbake-user-manual-fetching:fetchers>`
This variable can be defined to be used by the fetcher to authenticate
- and gain access to non-public artifacts.
- ::
+ and gain access to non-public artifacts::
AZ_SAS = ""se=2021-01-01&sp=r&sv=2018-11-09&sr=c&skoid=<skoid>&sig=<signature>""
@@ -87,14 +83,41 @@ overview of their function and contents.
- Attempts to access networks not in the host list cause a failure.
- Using ``BB_ALLOWED_NETWORKS`` in conjunction with
+ Using :term:`BB_ALLOWED_NETWORKS` in conjunction with
:term:`PREMIRRORS` is very useful. Adding the
- host you want to use to ``PREMIRRORS`` results in the source code
+ host you want to use to :term:`PREMIRRORS` results in the source code
being fetched from an allowed location and avoids raising an error
when a host that is not allowed is in a
:term:`SRC_URI` statement. This is because the
- fetcher does not attempt to use the host listed in ``SRC_URI`` after
- a successful fetch from the ``PREMIRRORS`` occurs.
+ fetcher does not attempt to use the host listed in :term:`SRC_URI` after
+ a successful fetch from the :term:`PREMIRRORS` occurs.
+
+ :term:`BB_BASEHASH_IGNORE_VARS`
+ Lists variables that are excluded from checksum and dependency data.
+ Variables that are excluded can therefore change without affecting
+ the checksum mechanism. A common example would be the variable for
+ the path of the build. BitBake's output should not (and usually does
+ not) depend on the directory in which it was built.
+
+ :term:`BB_CACHEDIR`
+ Specifies the code parser cache directory (distinct from :term:`CACHE`
+ and :term:`PERSISTENT_DIR` although they can be set to the same value
+ if desired). The default value is "${TOPDIR}/cache".
+
+ :term:`BB_CHECK_SSL_CERTS`
+ Specifies if SSL certificates should be checked when fetching. The default
+ value is ``1`` and certificates are not checked if the value is set to ``0``.
+
+ :term:`BB_HASH_CODEPARSER_VALS`
+ Specifies values for variables to use when populating the codeparser cache.
+ This can be used selectively to set dummy values for variables to avoid
+ the codeparser cache growing on every parse. Variables that would typically
+ be included are those where the value is not significant for where the
+ codeparser cache is used (i.e. when calculating variable dependencies for
+ code fragments.) The value is space-separated without quoting values, for
+ example::
+
+ BB_HASH_CODEPARSER_VALS = "T=/ WORKDIR=/ DATE=1234 TIME=1234"
:term:`BB_CONSOLELOG`
Specifies the path to a log file into which BitBake's user interface
@@ -137,7 +160,7 @@ overview of their function and contents.
where:
<action> is:
- ABORT: Immediately abort the build when
+ HALT: Immediately halt the build when
a threshold is broken.
STOPTASKS: Stop the build after the currently
executing tasks have finished when
@@ -168,41 +191,41 @@ overview of their function and contents.
Here are some examples::
- BB_DISKMON_DIRS = "ABORT,${TMPDIR},1G,100K WARN,${SSTATE_DIR},1G,100K"
+ BB_DISKMON_DIRS = "HALT,${TMPDIR},1G,100K WARN,${SSTATE_DIR},1G,100K"
BB_DISKMON_DIRS = "STOPTASKS,${TMPDIR},1G"
- BB_DISKMON_DIRS = "ABORT,${TMPDIR},,100K"
+ BB_DISKMON_DIRS = "HALT,${TMPDIR},,100K"
The first example works only if you also set the
:term:`BB_DISKMON_WARNINTERVAL`
- variable. This example causes the build system to immediately abort
+ variable. This example causes the build system to immediately halt
when either the disk space in ``${TMPDIR}`` drops below 1 Gbyte or
the available free inodes drops below 100 Kbytes. Because two
directories are provided with the variable, the build system also
issues a warning when the disk space in the ``${SSTATE_DIR}``
directory drops below 1 Gbyte or the number of free inodes drops
below 100 Kbytes. Subsequent warnings are issued during intervals as
- defined by the ``BB_DISKMON_WARNINTERVAL`` variable.
+ defined by the :term:`BB_DISKMON_WARNINTERVAL` variable.
The second example stops the build after all currently executing
tasks complete when the minimum disk space in the ``${TMPDIR}``
directory drops below 1 Gbyte. No disk monitoring occurs for the free
inodes in this case.
- The final example immediately aborts the build when the number of
+ The final example immediately halts the build when the number of
free inodes in the ``${TMPDIR}`` directory drops below 100 Kbytes. No
disk space monitoring for the directory itself occurs in this case.
:term:`BB_DISKMON_WARNINTERVAL`
Defines the disk space and free inode warning intervals.
- If you are going to use the ``BB_DISKMON_WARNINTERVAL`` variable, you
+ If you are going to use the :term:`BB_DISKMON_WARNINTERVAL` variable, you
must also use the :term:`BB_DISKMON_DIRS`
variable and define its action as "WARN". During the build,
subsequent warnings are issued each time disk space or number of free
inodes further reduces by the respective interval.
- If you do not provide a ``BB_DISKMON_WARNINTERVAL`` variable and you
- do use ``BB_DISKMON_DIRS`` with the "WARN" action, the disk
+ If you do not provide a :term:`BB_DISKMON_WARNINTERVAL` variable and you
+ do use :term:`BB_DISKMON_DIRS` with the "WARN" action, the disk
monitoring interval defaults to the following:
BB_DISKMON_WARNINTERVAL = "50M,5K"
@@ -235,23 +258,23 @@ overview of their function and contents.
based on the interval occur each time a respective interval is
reached beyond the initial warning (i.e. 1 Gbytes and 100 Kbytes).
- :term:`BB_ENV_WHITELIST`
- Specifies the internal whitelist of variables to allow through from
+ :term:`BB_ENV_PASSTHROUGH`
+ Specifies the internal list of variables to allow through from
the external environment into BitBake's datastore. If the value of
this variable is not specified (which is the default), the following
list is used: :term:`BBPATH`, :term:`BB_PRESERVE_ENV`,
- :term:`BB_ENV_WHITELIST`, and :term:`BB_ENV_EXTRAWHITE`.
+ :term:`BB_ENV_PASSTHROUGH`, and :term:`BB_ENV_PASSTHROUGH_ADDITIONS`.
.. note::
You must set this variable in the external environment in order
for it to work.
- :term:`BB_ENV_EXTRAWHITE`
- Specifies an additional set of variables to allow through (whitelist)
- from the external environment into BitBake's datastore. This list of
- variables are on top of the internal list set in
- :term:`BB_ENV_WHITELIST`.
+ :term:`BB_ENV_PASSTHROUGH_ADDITIONS`
+ Specifies an additional set of variables to allow through from the
+ external environment into BitBake's datastore. This list of variables
+ are on top of the internal list set in
+ :term:`BB_ENV_PASSTHROUGH`.
.. note::
@@ -267,7 +290,7 @@ overview of their function and contents.
:term:`BB_FILENAME`
Contains the filename of the recipe that owns the currently running
task. For example, if the ``do_fetch`` task that resides in the
- ``my-recipe.bb`` is executing, the ``BB_FILENAME`` variable contains
+ ``my-recipe.bb`` is executing, the :term:`BB_FILENAME` variable contains
"/foo/path/my-recipe.bb".
:term:`BB_GENERATE_MIRROR_TARBALLS`
@@ -280,24 +303,69 @@ overview of their function and contents.
BB_GENERATE_MIRROR_TARBALLS = "1"
- :term:`BB_HASHCONFIG_WHITELIST`
- Lists variables that are excluded from base configuration checksum,
- which is used to determine if the cache can be reused.
+ :term:`BB_GENERATE_SHALLOW_TARBALLS`
+ Setting this variable to "1" when :term:`BB_GIT_SHALLOW` is also set to
+ "1" causes bitbake to generate shallow mirror tarballs when fetching git
+ repositories. The number of commits included in the shallow mirror
+ tarballs is controlled by :term:`BB_GIT_SHALLOW_DEPTH`.
- One of the ways BitBake determines whether to re-parse the main
- metadata is through checksums of the variables in the datastore of
- the base configuration data. There are variables that you typically
- want to exclude when checking whether or not to re-parse and thus
- rebuild the cache. As an example, you would usually exclude ``TIME``
- and ``DATE`` because these variables are always changing. If you did
- not exclude them, BitBake would never reuse the cache.
+ If both :term:`BB_GIT_SHALLOW` and :term:`BB_GENERATE_MIRROR_TARBALLS` are
+ enabled, bitbake will generate shallow mirror tarballs by default for git
+ repositories. This separate variable exists so that shallow tarball
+ generation can be enabled without needing to also enable normal mirror
+ generation if it is not desired.
- :term:`BB_HASHBASE_WHITELIST`
- Lists variables that are excluded from checksum and dependency data.
- Variables that are excluded can therefore change without affecting
- the checksum mechanism. A common example would be the variable for
- the path of the build. BitBake's output should not (and usually does
- not) depend on the directory in which it was built.
+ For example usage, see :term:`BB_GIT_SHALLOW`.
+
+ :term:`BB_GIT_SHALLOW`
+ Setting this variable to "1" enables the support for fetching, using and
+ generating mirror tarballs of `shallow git repositories <https://riptutorial.com/git/example/4584/shallow-clone>`_.
+ The external `git-make-shallow <https://git.openembedded.org/bitbake/tree/bin/git-make-shallow>`_
+ script is used for shallow mirror tarball creation.
+
+ When :term:`BB_GIT_SHALLOW` is enabled, bitbake will attempt to fetch a shallow
+ mirror tarball. If the shallow mirror tarball cannot be fetched, it will
+ try to fetch the full mirror tarball and use that.
+
+ When a mirror tarball is not available, a full git clone will be performed
+ regardless of whether this variable is set or not. Support for shallow
+ clones is not currently implemented as git does not directly support
+ shallow cloning a particular git commit hash (it only supports cloning
+ from a tag or branch reference).
+
+ See also :term:`BB_GIT_SHALLOW_DEPTH` and
+ :term:`BB_GENERATE_SHALLOW_TARBALLS`.
+
+ Example usage::
+
+ BB_GIT_SHALLOW ?= "1"
+
+ # Keep only the top commit
+ BB_GIT_SHALLOW_DEPTH ?= "1"
+
+ # This defaults to enabled if both BB_GIT_SHALLOW and
+ # BB_GENERATE_MIRROR_TARBALLS are enabled
+ BB_GENERATE_SHALLOW_TARBALLS ?= "1"
+
+ :term:`BB_GIT_SHALLOW_DEPTH`
+ When used with :term:`BB_GENERATE_SHALLOW_TARBALLS`, this variable sets
+ the number of commits to include in generated shallow mirror tarballs.
+ With a depth of 1, only the commit referenced in :term:`SRCREV` is
+ included in the shallow mirror tarball. Increasing the depth includes
+ additional parent commits, working back through the commit history.
+
+ If this variable is unset, bitbake will default to a depth of 1 when
+ generating shallow mirror tarballs.
+
+ For example usage, see :term:`BB_GIT_SHALLOW`.
+
+ :term:`BB_GLOBAL_PYMODULES`
+ Specifies the list of Python modules to place in the global namespace.
+ It is intended that only the core layer should set this and it is meant
+ to be a very small list, typically just ``os`` and ``sys``.
+ :term:`BB_GLOBAL_PYMODULES` is expected to be set before the first
+ ``addpylib`` directive.
+ See also ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:extending python library code`".
:term:`BB_HASHCHECK_FUNCTION`
Specifies the name of the function to call during the "setscene" part
@@ -313,12 +381,66 @@ overview of their function and contents.
However, the more accurate the data returned, the more efficient the
build will be.
+ :term:`BB_HASHCONFIG_IGNORE_VARS`
+ Lists variables that are excluded from base configuration checksum,
+ which is used to determine if the cache can be reused.
+
+ One of the ways BitBake determines whether to re-parse the main
+ metadata is through checksums of the variables in the datastore of
+ the base configuration data. There are variables that you typically
+ want to exclude when checking whether or not to re-parse and thus
+ rebuild the cache. As an example, you would usually exclude ``TIME``
+ and ``DATE`` because these variables are always changing. If you did
+ not exclude them, BitBake would never reuse the cache.
+
+ :term:`BB_HASHSERVE`
+ Specifies the Hash Equivalence server to use.
+
+ If set to ``auto``, BitBake automatically starts its own server
+ over a UNIX domain socket. An option is to connect this server
+ to an upstream one, by setting :term:`BB_HASHSERVE_UPSTREAM`.
+
+ If set to ``unix://path``, BitBake will connect to an existing
+ hash server available over a UNIX domain socket.
+
+ If set to ``host:port``, BitBake will connect to a remote server on the
+ specified host. This allows multiple clients to share the same
+ hash equivalence data.
+
+ The remote server can be started manually through
+ the ``bin/bitbake-hashserv`` script provided by BitBake,
+ which supports UNIX domain sockets too. This script also allows
+ to start the server in read-only mode, to avoid accepting
+ equivalences that correspond to Share State caches that are
+ only available on specific clients.
+
+ :term:`BB_HASHSERVE_UPSTREAM`
+ Specifies an upstream Hash Equivalence server.
+
+ This optional setting is only useful when a local Hash Equivalence
+ server is started (setting :term:`BB_HASHSERVE` to ``auto``),
+ and you wish the local server to query an upstream server for
+ Hash Equivalence data.
+
+ Example usage::
+
+ BB_HASHSERVE_UPSTREAM = "hashserv.yocto.io:8687"
+
:term:`BB_INVALIDCONF`
Used in combination with the ``ConfigParsed`` event to trigger
re-parsing the base metadata (i.e. all the recipes). The
``ConfigParsed`` event can set the variable to trigger the re-parse.
You must be careful to avoid recursive loops with this functionality.
+ :term:`BB_LOADFACTOR_MAX`
+ Setting this to a value will cause BitBake to check the system load
+ average before executing new tasks. If the load average is above the
+ the number of CPUs multipled by this factor, no new task will be started
+ unless there is no task executing. A value of "1.5" has been found to
+ work reasonably. This is helpful for systems which don't have pressure
+ regulation enabled, which is more granular. Pressure values take
+ precedence over loadfactor.
+
:term:`BB_LOGCONFIG`
Specifies the name of a config file that contains the user logging
configuration. See
@@ -327,8 +449,8 @@ overview of their function and contents.
:term:`BB_LOGFMT`
Specifies the name of the log files saved into
- ``${``\ :term:`T`\ ``}``. By default, the ``BB_LOGFMT``
- variable is undefined and the log file names get created using the
+ ``${``\ :term:`T`\ ``}``. By default, the :term:`BB_LOGFMT`
+ variable is undefined and the log filenames get created using the
following form::
log.{task}.{pid}
@@ -336,6 +458,19 @@ overview of their function and contents.
If you want to force log files to take a specific name, you can set this
variable in a configuration file.
+ :term:`BB_MULTI_PROVIDER_ALLOWED`
+ Allows you to suppress BitBake warnings caused when building two
+ separate recipes that provide the same output.
+
+ BitBake normally issues a warning when building two different recipes
+ where each provides the same output. This scenario is usually
+ something the user does not want. However, cases do exist where it
+ makes sense, particularly in the ``virtual/*`` namespace. You can use
+ this variable to suppress BitBake's warnings.
+
+ To use the variable, list provider names (e.g. recipe names,
+ ``virtual/kernel``, and so forth).
+
:term:`BB_NICE_LEVEL`
Allows BitBake to run at a specific priority (i.e. nice level).
System permissions usually mean that BitBake can reduce its priority
@@ -351,19 +486,20 @@ overview of their function and contents.
running builds when not connected to the Internet, and when operating
in certain kinds of firewall environments.
+ :term:`BB_NUMBER_PARSE_THREADS`
+ Sets the number of threads BitBake uses when parsing. By default, the
+ number of threads is equal to the number of cores on the system.
+
:term:`BB_NUMBER_THREADS`
The maximum number of tasks BitBake should run in parallel at any one
time. If your host development system supports multiple cores, a good
rule of thumb is to set this variable to twice the number of cores.
- :term:`BB_NUMBER_PARSE_THREADS`
- Sets the number of threads BitBake uses when parsing. By default, the
- number of threads is equal to the number of cores on the system.
-
:term:`BB_ORIGENV`
Contains a copy of the original external environment in which BitBake
- was run. The copy is taken before any whitelisted variable values are
- filtered into BitBake's datastore.
+ was run. The copy is taken before any variable values configured to
+ pass through from the external environment are filtered into BitBake's
+ datastore.
.. note::
@@ -371,21 +507,72 @@ overview of their function and contents.
queried using the normal datastore operations.
:term:`BB_PRESERVE_ENV`
- Disables whitelisting and instead allows all variables through from
- the external environment into BitBake's datastore.
+ Disables environment filtering and instead allows all variables through
+ from the external environment into BitBake's datastore.
.. note::
You must set this variable in the external environment in order
for it to work.
+ :term:`BB_PRESSURE_MAX_CPU`
+ Specifies a maximum CPU pressure threshold, above which BitBake's
+ scheduler will not start new tasks (providing there is at least
+ one active task). If no value is set, CPU pressure is not
+ monitored when starting tasks.
+
+ The pressure data is calculated based upon what Linux kernels since
+ version 4.20 expose under ``/proc/pressure``. The threshold represents
+ the difference in "total" pressure from the previous second. The
+ minimum value is 1.0 (extremely slow builds) and the maximum is
+ 1000000 (a pressure value unlikely to ever be reached).
+
+ This threshold can be set in ``conf/local.conf`` as::
+
+ BB_PRESSURE_MAX_CPU = "500"
+
+ :term:`BB_PRESSURE_MAX_IO`
+ Specifies a maximum I/O pressure threshold, above which BitBake's
+ scheduler will not start new tasks (providing there is at least
+ one active task). If no value is set, I/O pressure is not
+ monitored when starting tasks.
+
+ The pressure data is calculated based upon what Linux kernels since
+ version 4.20 expose under ``/proc/pressure``. The threshold represents
+ the difference in "total" pressure from the previous second. The
+ minimum value is 1.0 (extremely slow builds) and the maximum is
+ 1000000 (a pressure value unlikely to ever be reached).
+
+ At this point in time, experiments show that IO pressure tends to
+ be short-lived and regulating just the CPU with
+ :term:`BB_PRESSURE_MAX_CPU` can help to reduce it.
+
+ :term:`BB_PRESSURE_MAX_MEMORY`
+
+ Specifies a maximum memory pressure threshold, above which BitBake's
+ scheduler will not start new tasks (providing there is at least
+ one active task). If no value is set, memory pressure is not
+ monitored when starting tasks.
+
+ The pressure data is calculated based upon what Linux kernels since
+ version 4.20 expose under ``/proc/pressure``. The threshold represents
+ the difference in "total" pressure from the previous second. The
+ minimum value is 1.0 (extremely slow builds) and the maximum is
+ 1000000 (a pressure value unlikely to ever be reached).
+
+ Memory pressure is experienced when time is spent swapping,
+ refaulting pages from the page cache or performing direct reclaim.
+ This is why memory pressure is rarely seen, but setting this variable
+ might be useful as a last resort to prevent OOM errors if they are
+ occurring during builds.
+
:term:`BB_RUNFMT`
Specifies the name of the executable script files (i.e. run files)
saved into ``${``\ :term:`T`\ ``}``. By default, the
- ``BB_RUNFMT`` variable is undefined and the run file names get
+ :term:`BB_RUNFMT` variable is undefined and the run filenames get
created using the following form::
- run.{task}.{pid}
+ run.{func}.{pid}
If you want to force run files to take a specific name, you can set this
variable in a configuration file.
@@ -399,14 +586,14 @@ overview of their function and contents.
Selects the name of the scheduler to use for the scheduling of
BitBake tasks. Three options exist:
- - *basic* - The basic framework from which everything derives. Using
+ - *basic* --- the basic framework from which everything derives. Using
this option causes tasks to be ordered numerically as they are
parsed.
- - *speed* - Executes tasks first that have more tasks depending on
+ - *speed* --- executes tasks first that have more tasks depending on
them. The "speed" option is the default.
- - *completion* - Causes the scheduler to try to complete a given
+ - *completion* --- causes the scheduler to try to complete a given
recipe once its build has started.
:term:`BB_SCHEDULERS`
@@ -426,17 +613,6 @@ overview of their function and contents.
The function specified by this variable returns a "True" or "False"
depending on whether the dependency needs to be met.
- :term:`BB_SETSCENE_VERIFY_FUNCTION2`
- Specifies a function to call that verifies the list of planned task
- execution before the main task execution happens. The function is
- called once BitBake has a list of setscene tasks that have run and
- either succeeded or failed.
-
- The function allows for a task list check to see if they make sense.
- Even if BitBake was planning to skip a task, the returned value of
- the function can force BitBake to run the task, which is necessary
- under certain metadata defined circumstances.
-
:term:`BB_SIGNATURE_EXCLUDE_FLAGS`
Lists variable flags (varflags) that can be safely excluded from
checksum and dependency data for keys in the datastore. When
@@ -459,40 +635,17 @@ overview of their function and contents.
:term:`BB_SRCREV_POLICY`
Defines the behavior of the fetcher when it interacts with source
control systems and dynamic source revisions. The
- ``BB_SRCREV_POLICY`` variable is useful when working without a
+ :term:`BB_SRCREV_POLICY` variable is useful when working without a
network.
The variable can be set using one of two policies:
- - *cache* - Retains the value the system obtained previously rather
+ - *cache* --- retains the value the system obtained previously rather
than querying the source control system each time.
- - *clear* - Queries the source controls system every time. With this
+ - *clear* --- queries the source controls system every time. With this
policy, there is no cache. The "clear" policy is the default.
- :term:`BB_STAMP_POLICY`
- Defines the mode used for how timestamps of stamp files are compared.
- You can set the variable to one of the following modes:
-
- - *perfile* - Timestamp comparisons are only made between timestamps
- of a specific recipe. This is the default mode.
-
- - *full* - Timestamp comparisons are made for all dependencies.
-
- - *whitelist* - Identical to "full" mode except timestamp
- comparisons are made for recipes listed in the
- :term:`BB_STAMP_WHITELIST` variable.
-
- .. note::
-
- Stamp policies are largely obsolete with the introduction of
- setscene tasks.
-
- :term:`BB_STAMP_WHITELIST`
- Lists files whose stamp file timestamps are compared when the stamp
- policy mode is set to "whitelist". For information on stamp policies,
- see the :term:`BB_STAMP_POLICY` variable.
-
:term:`BB_STRICT_CHECKSUM`
Sets a more strict checksum mechanism for non-local URLs. Setting
this variable to a value causes BitBake to report an error if it
@@ -503,7 +656,7 @@ overview of their function and contents.
Allows adjustment of a task's Input/Output priority. During
Autobuilder testing, random failures can occur for tasks due to I/O
starvation. These failures occur during various QEMU runtime
- timeouts. You can use the ``BB_TASK_IONICE_LEVEL`` variable to adjust
+ timeouts. You can use the :term:`BB_TASK_IONICE_LEVEL` variable to adjust
the I/O priority of these tasks.
.. note::
@@ -538,7 +691,7 @@ overview of their function and contents.
You can use this variable in combination with task overrides to raise
or lower priorities of specific tasks. For example, on the `Yocto
- Project <http://www.yoctoproject.org>`__ autobuilder, QEMU emulation
+ Project <https://www.yoctoproject.org>`__ autobuilder, QEMU emulation
in images is given a higher priority as compared to build tasks to
ensure that images do not suffer timeouts on loaded systems.
@@ -577,13 +730,13 @@ overview of their function and contents.
.. note::
- Internally, the ``BBCLASSEXTEND`` mechanism generates recipe
+ Internally, the :term:`BBCLASSEXTEND` mechanism generates recipe
variants by rewriting variable values and applying overrides such
as ``_class-native``. For example, to generate a native version of
a recipe, a :term:`DEPENDS` on "foo" is
- rewritten to a ``DEPENDS`` on "foo-native".
+ rewritten to a :term:`DEPENDS` on "foo-native".
- Even when using ``BBCLASSEXTEND``, the recipe is only parsed once.
+ Even when using :term:`BBCLASSEXTEND`, the recipe is only parsed once.
Parsing once adds some limitations. For example, it is not
possible to include a different file depending on the variant,
since ``include`` statements are processed when the recipe is
@@ -616,17 +769,17 @@ overview of their function and contents.
This variable is useful in situations where the same recipe appears
in more than one layer. Setting this variable allows you to
prioritize a layer against other layers that contain the same recipe
- - effectively letting you control the precedence for the multiple
+ --- effectively letting you control the precedence for the multiple
layers. The precedence established through this variable stands
regardless of a recipe's version (:term:`PV` variable).
- For example, a layer that has a recipe with a higher ``PV`` value but
- for which the ``BBFILE_PRIORITY`` is set to have a lower precedence
+ For example, a layer that has a recipe with a higher :term:`PV` value but
+ for which the :term:`BBFILE_PRIORITY` is set to have a lower precedence
still has a lower precedence.
- A larger value for the ``BBFILE_PRIORITY`` variable results in a
+ A larger value for the :term:`BBFILE_PRIORITY` variable results in a
higher precedence. For example, the value 6 has a higher precedence
- than the value 5. If not specified, the ``BBFILE_PRIORITY`` variable
- is set based on layer dependencies (see the ``LAYERDEPENDS`` variable
+ than the value 5. If not specified, the :term:`BBFILE_PRIORITY` variable
+ is set based on layer dependencies (see the :term:`LAYERDEPENDS` variable
for more information. The default priority, if unspecified for a
layer with no dependencies, is the lowest defined priority + 1 (or 1
if no priorities are defined).
@@ -649,7 +802,7 @@ overview of their function and contents.
Activates content depending on presence of identified layers. You
identify the layers by the collections that the layers define.
- Use the ``BBFILES_DYNAMIC`` variable to avoid ``.bbappend`` files whose
+ Use the :term:`BBFILES_DYNAMIC` variable to avoid ``.bbappend`` files whose
corresponding ``.bb`` file is in a layer that attempts to modify other
layers through ``.bbappend`` but does not want to introduce a hard
dependency on those other layers.
@@ -658,7 +811,7 @@ overview of their function and contents.
``.bb`` files in case a layer is not present. Use this avoid hard
dependency on those other layers.
- Use the following form for ``BBFILES_DYNAMIC``::
+ Use the following form for :term:`BBFILES_DYNAMIC`::
collection_name:filename_pattern
@@ -678,7 +831,7 @@ overview of their function and contents.
"
This next example shows an error message that occurs because invalid
- entries are found, which cause parsing to abort::
+ entries are found, which cause parsing to fail::
ERROR: BBFILES_DYNAMIC entries must be of the form {!}<collection name>:<filename pattern>, not:
/work/my-layer/bbappends/meta-security-isafw/*/*/*.bbappend
@@ -695,7 +848,7 @@ overview of their function and contents.
:term:`BBINCLUDELOGS_LINES`
If :term:`BBINCLUDELOGS` is set, specifies
the maximum number of lines from the task log file to print when
- reporting a failed task. If you do not set ``BBINCLUDELOGS_LINES``,
+ reporting a failed task. If you do not set :term:`BBINCLUDELOGS_LINES`,
the entire log is printed.
:term:`BBLAYERS`
@@ -721,7 +874,7 @@ overview of their function and contents.
:term:`BBMASK`
Prevents BitBake from processing recipes and recipe append files.
- You can use the ``BBMASK`` variable to "hide" these ``.bb`` and
+ You can use the :term:`BBMASK` variable to "hide" these ``.bb`` and
``.bbappend`` files. BitBake ignores any recipe or recipe append
files that match any of the expressions. It is as if BitBake does not
see them at all. Consequently, matching files are not parsed or
@@ -758,7 +911,7 @@ overview of their function and contents.
Enables BitBake to perform multiple configuration builds and lists
each separate configuration (multiconfig). You can use this variable
to cause BitBake to build multiple targets where each target has a
- separate configuration. Define ``BBMULTICONFIG`` in your
+ separate configuration. Define :term:`BBMULTICONFIG` in your
``conf/local.conf`` configuration file.
As an example, the following line specifies three multiconfigs, each
@@ -770,18 +923,18 @@ overview of their function and contents.
build directory within a directory named ``conf/multiconfig`` (e.g.
build_directory\ ``/conf/multiconfig/configA.conf``).
- For information on how to use ``BBMULTICONFIG`` in an environment
+ For information on how to use :term:`BBMULTICONFIG` in an environment
that supports building targets with multiple configurations, see the
":ref:`bitbake-user-manual/bitbake-user-manual-intro:executing a multiple configuration build`"
section.
:term:`BBPATH`
- Used by BitBake to locate class (``.bbclass``) and configuration
- (``.conf``) files. This variable is analogous to the ``PATH``
- variable.
+ A colon-separated list used by BitBake to locate class (``.bbclass``)
+ and configuration (``.conf``) files. This variable is analogous to the
+ ``PATH`` variable.
If you run BitBake from a directory outside of the build directory,
- you must be sure to set ``BBPATH`` to point to the build directory.
+ you must be sure to set :term:`BBPATH` to point to the build directory.
Set the variable as you would any environment variable and then run
BitBake::
@@ -797,16 +950,6 @@ overview of their function and contents.
Allows you to use a configuration file to add to the list of
command-line target recipes you want to build.
- :term:`BBVERSIONS`
- Allows a single recipe to build multiple versions of a project from a
- single recipe file. You also able to specify conditional metadata
- using the :term:`OVERRIDES` mechanism for a
- single version or for an optionally named range of versions.
-
- For more information on ``BBVERSIONS``, see the
- ":ref:`bitbake-user-manual/bitbake-user-manual-metadata:variants - class extension mechanism`"
- section.
-
:term:`BITBAKE_UI`
Used to specify the UI module to use when running BitBake. Using this
variable is equivalent to using the ``-u`` command-line option.
@@ -838,7 +981,7 @@ overview of their function and contents.
The most common usage of this is variable is to set it to "-1" within
a recipe for a development version of a piece of software. Using the
variable in this way causes the stable version of the recipe to build
- by default in the absence of ``PREFERRED_VERSION`` being used to
+ by default in the absence of :term:`PREFERRED_VERSION` being used to
build the development version.
.. note::
@@ -851,7 +994,7 @@ overview of their function and contents.
Lists a recipe's build-time dependencies (i.e. other recipe files).
Consider this simple example for two recipes named "a" and "b" that
- produce similarly named packages. In this example, the ``DEPENDS``
+ produce similarly named packages. In this example, the :term:`DEPENDS`
statement appears in the "a" recipe::
DEPENDS = "b"
@@ -869,7 +1012,7 @@ overview of their function and contents.
:term:`DL_DIR`
The central download directory used by the build process to store
- downloads. By default, ``DL_DIR`` gets files suitable for mirroring for
+ downloads. By default, :term:`DL_DIR` gets files suitable for mirroring for
everything except Git repositories. If you want tarballs of Git
repositories, use the :term:`BB_GENERATE_MIRROR_TARBALLS` variable.
@@ -880,18 +1023,18 @@ overview of their function and contents.
``bblayers.conf`` configuration file.
To exclude a recipe from a world build using this variable, set the
- variable to "1" in the recipe.
+ variable to "1" in the recipe. Set it to "0" to add it back to world build.
.. note::
- Recipes added to ``EXCLUDE_FROM_WORLD`` may still be built during a world
+ Recipes added to :term:`EXCLUDE_FROM_WORLD` may still be built during a world
build in order to satisfy dependencies of other recipes. Adding a
- recipe to ``EXCLUDE_FROM_WORLD`` only ensures that the recipe is not
+ recipe to :term:`EXCLUDE_FROM_WORLD` only ensures that the recipe is not
explicitly added to the list of build targets in a world build.
:term:`FAKEROOT`
Contains the command to use when running a shell script in a fakeroot
- environment. The ``FAKEROOT`` variable is obsolete and has been
+ environment. The :term:`FAKEROOT` variable is obsolete and has been
replaced by the other ``FAKEROOT*`` variables. See these entries in
the glossary for more information.
@@ -938,6 +1081,11 @@ overview of their function and contents.
environment variable. The value is a colon-separated list of
directories that are searched left-to-right in order.
+ :term:`FILE_LAYERNAME`
+ During parsing and task execution, this is set to the name of the
+ layer containing the recipe file. Code can use this to identify which
+ layer a recipe is from.
+
:term:`GITDIR`
The directory in which a local copy of a Git repository is stored
when it is cloned.
@@ -954,9 +1102,9 @@ overview of their function and contents.
Causes the named class or classes to be inherited globally. Anonymous
functions in the class or classes are not executed for the base
configuration and in each individual recipe. The OpenEmbedded build
- system ignores changes to ``INHERIT`` in individual recipes.
+ system ignores changes to :term:`INHERIT` in individual recipes.
- For more information on ``INHERIT``, see the
+ For more information on :term:`INHERIT`, see the
":ref:`bitbake-user-manual/bitbake-user-manual-metadata:\`\`inherit\`\` configuration directive`"
section.
@@ -986,6 +1134,29 @@ overview of their function and contents.
variable is not available outside of ``layer.conf`` and references
are expanded immediately when parsing of the file completes.
+ :term:`LAYERSERIES_COMPAT`
+ Lists the versions of the OpenEmbedded-Core (OE-Core) for which
+ a layer is compatible. Using the :term:`LAYERSERIES_COMPAT` variable
+ allows the layer maintainer to indicate which combinations of the
+ layer and OE-Core can be expected to work. The variable gives the
+ system a way to detect when a layer has not been tested with new
+ releases of OE-Core (e.g. the layer is not maintained).
+
+ To specify the OE-Core versions for which a layer is compatible, use
+ this variable in your layer's ``conf/layer.conf`` configuration file.
+ For the list, use the Yocto Project release name (e.g. "kirkstone",
+ "mickledore"). To specify multiple OE-Core versions for the layer, use
+ a space-separated list::
+
+ LAYERSERIES_COMPAT_layer_root_name = "kirkstone mickledore"
+
+ .. note::
+
+ Setting :term:`LAYERSERIES_COMPAT` is required by the Yocto Project
+ Compatible version 2 standard.
+ The OpenEmbedded build system produces a warning if the variable
+ is not set for any given layer.
+
:term:`LAYERVERSION`
Optionally specifies the version of a layer as a single number. You
can use this variable within
@@ -1004,29 +1175,16 @@ overview of their function and contents.
the build system searches for source code, it first tries the local
download directory. If that location fails, the build system tries
locations defined by :term:`PREMIRRORS`, the
- upstream source, and then locations specified by ``MIRRORS`` in that
+ upstream source, and then locations specified by :term:`MIRRORS` in that
order.
- :term:`MULTI_PROVIDER_WHITELIST`
- Allows you to suppress BitBake warnings caused when building two
- separate recipes that provide the same output.
-
- BitBake normally issues a warning when building two different recipes
- where each provides the same output. This scenario is usually
- something the user does not want. However, cases do exist where it
- makes sense, particularly in the ``virtual/*`` namespace. You can use
- this variable to suppress BitBake's warnings.
-
- To use the variable, list provider names (e.g. recipe names,
- ``virtual/kernel``, and so forth).
-
:term:`OVERRIDES`
- BitBake uses ``OVERRIDES`` to control what variables are overridden
- after BitBake parses recipes and configuration files.
+ A colon-separated list that BitBake uses to control what variables are
+ overridden after BitBake parses recipes and configuration files.
Following is a simple example that uses an overrides list based on
machine architectures: OVERRIDES = "arm:x86:mips:powerpc" You can
- find information on how to use ``OVERRIDES`` in the
+ find information on how to use :term:`OVERRIDES` in the
":ref:`bitbake-user-manual/bitbake-user-manual-metadata:conditional syntax
(overrides)`" section.
@@ -1040,11 +1198,11 @@ overview of their function and contents.
:term:`PACKAGES_DYNAMIC`
A promise that your recipe satisfies runtime dependencies for
optional modules that are found in other recipes.
- ``PACKAGES_DYNAMIC`` does not actually satisfy the dependencies, it
+ :term:`PACKAGES_DYNAMIC` does not actually satisfy the dependencies, it
only states that they should be satisfied. For example, if a hard,
runtime dependency (:term:`RDEPENDS`) of another
package is satisfied during the build through the
- ``PACKAGES_DYNAMIC`` variable, but a package with the module name is
+ :term:`PACKAGES_DYNAMIC` variable, but a package with the module name is
never actually produced, then the other package will be broken.
:term:`PE`
@@ -1083,8 +1241,8 @@ overview of their function and contents.
:term:`PREFERRED_PROVIDERS`
Determines which recipe should be given preference for cases where
multiple recipes provide the same item. Functionally,
- ``PREFERRED_PROVIDERS`` is identical to
- :term:`PREFERRED_PROVIDER`. However, the ``PREFERRED_PROVIDERS`` variable
+ :term:`PREFERRED_PROVIDERS` is identical to
+ :term:`PREFERRED_PROVIDER`. However, the :term:`PREFERRED_PROVIDERS` variable
lets you define preferences for multiple situations using the following
form::
@@ -1102,7 +1260,7 @@ overview of their function and contents.
select, and you should set :term:`PV` accordingly for
precedence.
- The ``PREFERRED_VERSION`` variable supports limited wildcard use
+ The :term:`PREFERRED_VERSION` variable supports limited wildcard use
through the "``%``" character. You can use the character to match any
number of characters, which can be useful when specifying versions
that contain long revision numbers that potentially change. Here are
@@ -1125,18 +1283,18 @@ overview of their function and contents.
Specifies additional paths from which BitBake gets source code. When
the build system searches for source code, it first tries the local
download directory. If that location fails, the build system tries
- locations defined by ``PREMIRRORS``, the upstream source, and then
+ locations defined by :term:`PREMIRRORS`, the upstream source, and then
locations specified by :term:`MIRRORS` in that order.
Typically, you would add a specific server for the build system to
attempt before any others by adding something like the following to
your configuration::
- PREMIRRORS_prepend = "\
- git://.*/.* http://www.yoctoproject.org/sources/ \n \
- ftp://.*/.* http://www.yoctoproject.org/sources/ \n \
- http://.*/.* http://www.yoctoproject.org/sources/ \n \
- https://.*/.* http://www.yoctoproject.org/sources/ \n"
+ PREMIRRORS:prepend = "\
+ git://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \
+ ftp://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \
+ http://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \
+ https://.*/.* http://downloads.yoctoproject.org/mirror/sources/"
These changes cause the build system to intercept Git, FTP, HTTP, and
HTTPS requests and direct them to the ``http://`` sources mirror. You can
@@ -1145,25 +1303,25 @@ overview of their function and contents.
:term:`PROVIDES`
A list of aliases by which a particular recipe can be known. By
- default, a recipe's own ``PN`` is implicitly already in its
- ``PROVIDES`` list. If a recipe uses ``PROVIDES``, the additional
+ default, a recipe's own :term:`PN` is implicitly already in its
+ :term:`PROVIDES` list. If a recipe uses :term:`PROVIDES`, the additional
aliases are synonyms for the recipe and can be useful satisfying
dependencies of other recipes during the build as specified by
- ``DEPENDS``.
+ :term:`DEPENDS`.
- Consider the following example ``PROVIDES`` statement from a recipe
+ Consider the following example :term:`PROVIDES` statement from a recipe
file ``libav_0.8.11.bb``::
PROVIDES += "libpostproc"
- The ``PROVIDES`` statement results in the "libav" recipe also being known
+ The :term:`PROVIDES` statement results in the "libav" recipe also being known
as "libpostproc".
In addition to providing recipes under alternate names, the
- ``PROVIDES`` mechanism is also used to implement virtual targets. A
+ :term:`PROVIDES` mechanism is also used to implement virtual targets. A
virtual target is a name that corresponds to some particular
functionality (e.g. a Linux kernel). Recipes that provide the
- functionality in question list the virtual target in ``PROVIDES``.
+ functionality in question list the virtual target in :term:`PROVIDES`.
Recipes that depend on the functionality in question can include the
virtual target in :term:`DEPENDS` to leave the
choice of provider open.
@@ -1175,12 +1333,12 @@ overview of their function and contents.
:term:`PRSERV_HOST`
The network based :term:`PR` service host and port.
- Following is an example of how the ``PRSERV_HOST`` variable is set::
+ Following is an example of how the :term:`PRSERV_HOST` variable is set::
PRSERV_HOST = "localhost:0"
You must set the variable if you want to automatically start a local PR
- service. You can set ``PRSERV_HOST`` to other values to use a remote PR
+ service. You can set :term:`PRSERV_HOST` to other values to use a remote PR
service.
:term:`PV`
@@ -1192,24 +1350,24 @@ overview of their function and contents.
a package in this list cannot be found during the build, you will get
a build error.
- Because the ``RDEPENDS`` variable applies to packages being built,
+ Because the :term:`RDEPENDS` variable applies to packages being built,
you should always use the variable in a form with an attached package
name. For example, suppose you are building a development package
that depends on the ``perl`` package. In this case, you would use the
- following ``RDEPENDS`` statement::
+ following :term:`RDEPENDS` statement::
- RDEPENDS_${PN}-dev += "perl"
+ RDEPENDS:${PN}-dev += "perl"
In the example, the development package depends on the ``perl`` package.
- Thus, the ``RDEPENDS`` variable has the ``${PN}-dev`` package name as part
+ Thus, the :term:`RDEPENDS` variable has the ``${PN}-dev`` package name as part
of the variable.
BitBake supports specifying versioned dependencies. Although the
syntax varies depending on the packaging format, BitBake hides these
differences from you. Here is the general syntax to specify versions
- with the ``RDEPENDS`` variable::
+ with the :term:`RDEPENDS` variable::
- RDEPENDS_${PN} = "package (operator version)"
+ RDEPENDS:${PN} = "package (operator version)"
For ``operator``, you can specify the following::
@@ -1222,7 +1380,7 @@ overview of their function and contents.
For example, the following sets up a dependency on version 1.2 or
greater of the package ``foo``::
- RDEPENDS_${PN} = "foo (>= 1.2)"
+ RDEPENDS:${PN} = "foo (>= 1.2)"
For information on build-time dependencies, see the :term:`DEPENDS`
variable.
@@ -1233,39 +1391,39 @@ overview of their function and contents.
:term:`REQUIRED_VERSION`
If there are multiple versions of a recipe available, this variable
- determines which version should be given preference. ``REQUIRED_VERSION``
+ determines which version should be given preference. :term:`REQUIRED_VERSION`
works in exactly the same manner as :term:`PREFERRED_VERSION`, except
that if the specified version is not available then an error message
is shown and the build fails immediately.
- If both ``REQUIRED_VERSION`` and ``PREFERRED_VERSION`` are set for
- the same recipe, the ``REQUIRED_VERSION`` value applies.
+ If both :term:`REQUIRED_VERSION` and :term:`PREFERRED_VERSION` are set for
+ the same recipe, the :term:`REQUIRED_VERSION` value applies.
:term:`RPROVIDES`
A list of package name aliases that a package also provides. These
aliases are useful for satisfying runtime dependencies of other
packages both during the build and on the target (as specified by
- ``RDEPENDS``).
+ :term:`RDEPENDS`).
As with all package-controlling variables, you must always use the
variable in conjunction with a package name override. Here is an
example::
- RPROVIDES_${PN} = "widget-abi-2"
+ RPROVIDES:${PN} = "widget-abi-2"
:term:`RRECOMMENDS`
A list of packages that extends the usability of a package being
built. The package being built does not depend on this list of
packages in order to successfully build, but needs them for the
extended usability. To specify runtime dependencies for packages, see
- the ``RDEPENDS`` variable.
+ the :term:`RDEPENDS` variable.
BitBake supports specifying versioned recommends. Although the syntax
varies depending on the packaging format, BitBake hides these
differences from you. Here is the general syntax to specify versions
- with the ``RRECOMMENDS`` variable::
+ with the :term:`RRECOMMENDS` variable::
- RRECOMMENDS_${PN} = "package (operator version)"
+ RRECOMMENDS:${PN} = "package (operator version)"
For ``operator``, you can specify the following::
@@ -1278,76 +1436,112 @@ overview of their function and contents.
For example, the following sets up a recommend on version
1.2 or greater of the package ``foo``::
- RRECOMMENDS_${PN} = "foo (>= 1.2)"
+ RRECOMMENDS:${PN} = "foo (>= 1.2)"
:term:`SECTION`
The section in which packages should be categorized.
:term:`SRC_URI`
- The list of source files - local or remote. This variable tells
+ The list of source files --- local or remote. This variable tells
BitBake which bits to pull for the build and how to pull them. For
example, if the recipe or append file needs to fetch a single tarball
- from the Internet, the recipe or append file uses a ``SRC_URI`` entry
- that specifies that tarball. On the other hand, if the recipe or
- append file needs to fetch a tarball and include a custom file, the
- recipe or append file needs an ``SRC_URI`` variable that specifies
- all those sources.
-
- The following list explains the available URI protocols:
+ from the Internet, the recipe or append file uses a :term:`SRC_URI`
+ entry that specifies that tarball. On the other hand, if the recipe or
+ append file needs to fetch a tarball, apply two patches, and include
+ a custom file, the recipe or append file needs an :term:`SRC_URI`
+ variable that specifies all those sources.
+
+ The following list explains the available URI protocols. URI
+ protocols are highly dependent on particular BitBake Fetcher
+ submodules. Depending on the fetcher BitBake uses, various URL
+ parameters are employed. For specifics on the supported Fetchers, see
+ the :ref:`bitbake-user-manual/bitbake-user-manual-fetching:fetchers`
+ section.
- - ``file://`` : Fetches files, which are usually files shipped
- with the metadata, from the local machine. The path is relative to
- the :term:`FILESPATH` variable.
+ - ``az://``: Fetches files from an Azure Storage account using HTTPS.
- - ``bzr://`` : Fetches files from a Bazaar revision control
+ - ``bzr://``: Fetches files from a Bazaar revision control
repository.
- - ``git://`` : Fetches files from a Git revision control
+ - ``ccrc://``: Fetches files from a ClearCase repository.
+
+ - ``cvs://``: Fetches files from a CVS revision control
repository.
- - ``osc://`` : Fetches files from an OSC (OpenSUSE Build service)
- revision control repository.
+ - ``file://``: Fetches files, which are usually files shipped
+ with the Metadata, from the local machine.
+ The path is relative to the :term:`FILESPATH`
+ variable. Thus, the build system searches, in order, from the
+ following directories, which are assumed to be a subdirectories of
+ the directory in which the recipe file (``.bb``) or append file
+ (``.bbappend``) resides:
- - ``repo://`` : Fetches files from a repo (Git) repository.
+ - ``${BPN}``: the base recipe name without any special suffix
+ or version numbers.
- - ``http://`` : Fetches files from the Internet using HTTP.
+ - ``${BP}`` - ``${BPN}-${PV}``: the base recipe name and
+ version but without any special package name suffix.
- - ``https://`` : Fetches files from the Internet using HTTPS.
+ - ``files``: files within a directory, which is named ``files``
+ and is also alongside the recipe or append file.
- - ``ftp://`` : Fetches files from the Internet using FTP.
+ - ``ftp://``: Fetches files from the Internet using FTP.
- - ``cvs://`` : Fetches files from a CVS revision control
+ - ``git://``: Fetches files from a Git revision control
repository.
- - ``hg://`` : Fetches files from a Mercurial (``hg``) revision
- control repository.
+ - ``gitsm://``: Fetches submodules from a Git revision control
+ repository.
- - ``p4://`` : Fetches files from a Perforce (``p4``) revision
+ - ``hg://``: Fetches files from a Mercurial (``hg``) revision
control repository.
- - ``ssh://`` : Fetches files from a secure shell.
+ - ``http://``: Fetches files from the Internet using HTTP.
+
+ - ``https://``: Fetches files from the Internet using HTTPS.
+
+ - ``npm://``: Fetches JavaScript modules from a registry.
- - ``svn://`` : Fetches files from a Subversion (``svn``) revision
+ - ``osc://``: Fetches files from an OSC (OpenSUSE Build service)
+ revision control repository.
+
+ - ``p4://``: Fetches files from a Perforce (``p4``) revision
control repository.
- - ``az://`` : Fetches files from an Azure Storage account using HTTPS.
+ - ``repo://``: Fetches files from a repo (Git) repository.
+
+ - ``ssh://``: Fetches files from a secure shell.
+
+ - ``svn://``: Fetches files from a Subversion (``svn``) revision
+ control repository.
Here are some additional options worth mentioning:
- - ``unpack`` : Controls whether or not to unpack the file if it is
- an archive. The default action is to unpack the file.
+ - ``downloadfilename``: Specifies the filename used when storing
+ the downloaded file.
+
+ - ``name``: Specifies a name to be used for association with
+ :term:`SRC_URI` checksums or :term:`SRCREV` when you have more than one
+ file or git repository specified in :term:`SRC_URI`. For example::
- - ``subdir`` : Places the file (or extracts its contents) into the
+ SRC_URI = "git://example.com/foo.git;branch=main;name=first \
+ git://example.com/bar.git;branch=main;name=second \
+ http://example.com/file.tar.gz;name=third"
+
+ SRCREV_first = "f1d2d2f924e986ac86fdf7b36c94bcdf32beec15"
+ SRCREV_second = "e242ed3bffccdf271b7fbaf34ed72d089537b42f"
+ SRC_URI[third.sha256sum] = "13550350a8681c84c861aac2e5b440161c2b33a3e4f302ac680ca5b686de48de"
+
+ - ``subdir``: Places the file (or extracts its contents) into the
specified subdirectory. This option is useful for unusual tarballs
or other archives that do not have their files already in a
subdirectory within the archive.
- - ``name`` : Specifies a name to be used for association with
- ``SRC_URI`` checksums when you have more than one file specified
- in ``SRC_URI``.
+ - ``subpath``: Limits the checkout to a specific subpath of the
+ tree when using the Git fetcher is used.
- - ``downloadfilename`` : Specifies the filename used when storing
- the downloaded file.
+ - ``unpack``: Controls whether or not to unpack the file if it is
+ an archive. The default action is to unpack the file.
:term:`SRCDATE`
The date of the source code used to build the package. This variable
@@ -1359,7 +1553,7 @@ overview of their function and contents.
variable applies only when using Subversion, Git, Mercurial and
Bazaar. If you want to build a fixed revision and you want to avoid
performing a query on the remote repository every time BitBake parses
- your recipe, you should specify a ``SRCREV`` that is a full revision
+ your recipe, you should specify a :term:`SRCREV` that is a full revision
identifier and not just a tag.
:term:`SRCREV_FORMAT`
@@ -1368,10 +1562,10 @@ overview of their function and contents.
:term:`SRC_URI`.
The system needs help constructing these values under these
- circumstances. Each component in the ``SRC_URI`` is assigned a name
- and these are referenced in the ``SRCREV_FORMAT`` variable. Consider
+ circumstances. Each component in the :term:`SRC_URI` is assigned a name
+ and these are referenced in the :term:`SRCREV_FORMAT` variable. Consider
an example with URLs named "machine" and "meta". In this case,
- ``SRCREV_FORMAT`` could look like "machine_meta" and those names
+ :term:`SRCREV_FORMAT` could look like "machine_meta" and those names
would have the SCM versions substituted into each position. Only one
``AUTOINC`` placeholder is added and if needed. And, this placeholder
is placed at the start of the returned string.
@@ -1383,7 +1577,7 @@ overview of their function and contents.
:term:`STAMPCLEAN`
Specifies the base path used to create recipe stamp files. Unlike the
- :term:`STAMP` variable, ``STAMPCLEAN`` can contain
+ :term:`STAMP` variable, :term:`STAMPCLEAN` can contain
wildcards to match the range of files a clean operation should
remove. BitBake uses a clean operation to remove any other stamps it
should be removing when creating a new stamp.
diff --git a/doc/index.rst b/doc/index.rst
index 3ff8b1580..ee1660ac1 100644
--- a/doc/index.rst
+++ b/doc/index.rst
@@ -13,6 +13,7 @@ BitBake User Manual
bitbake-user-manual/bitbake-user-manual-intro
bitbake-user-manual/bitbake-user-manual-execution
bitbake-user-manual/bitbake-user-manual-metadata
+ bitbake-user-manual/bitbake-user-manual-ref-variables-context
bitbake-user-manual/bitbake-user-manual-fetching
bitbake-user-manual/bitbake-user-manual-ref-variables
bitbake-user-manual/bitbake-user-manual-hello
diff --git a/doc/releases.rst b/doc/releases.rst
index d68d71599..b38b1c065 100644
--- a/doc/releases.rst
+++ b/doc/releases.rst
@@ -1,32 +1,76 @@
.. SPDX-License-Identifier: CC-BY-2.5
-=========================
- Current Release Manuals
-=========================
+=================================
+BitBake Supported Release Manuals
+=================================
+
+*******************************
+Release Series 4.2 (mickledore)
+*******************************
+
+- :yocto_docs:`BitBake 2.4 User Manual </bitbake/2.4/>`
+
+******************************
+Release Series 4.0 (kirkstone)
+******************************
+
+- :yocto_docs:`BitBake 2.0 User Manual </bitbake/2.0/>`
****************************
-3.1 'dunfell' Release Series
+Release Series 3.1 (dunfell)
****************************
+- :yocto_docs:`BitBake 1.46 User Manual </bitbake/1.46/>`
+
+================================
+BitBake Outdated Release Manuals
+================================
+
+*****************************
+Release Series 4.1 (langdale)
+*****************************
+
+- :yocto_docs:`BitBake 2.2 User Manual </bitbake/2.2/>`
+
+******************************
+Release Series 3.4 (honister)
+******************************
+
+- :yocto_docs:`BitBake 1.52 User Manual </bitbake/1.52/>`
+
+******************************
+Release Series 3.3 (hardknott)
+******************************
+
+- :yocto_docs:`BitBake 1.50 User Manual </bitbake/1.50/>`
+
+*******************************
+Release Series 3.2 (gatesgarth)
+*******************************
+
+- :yocto_docs:`BitBake 1.48 User Manual </bitbake/1.48/>`
+
+*******************************************
+Release Series 3.1 (dunfell first versions)
+*******************************************
+
- :yocto_docs:`3.1 BitBake User Manual </3.1/bitbake-user-manual/bitbake-user-manual.html>`
- :yocto_docs:`3.1.1 BitBake User Manual </3.1.1/bitbake-user-manual/bitbake-user-manual.html>`
- :yocto_docs:`3.1.2 BitBake User Manual </3.1.2/bitbake-user-manual/bitbake-user-manual.html>`
-
-==========================
- Previous Release Manuals
-==========================
+- :yocto_docs:`3.1.3 BitBake User Manual </3.1.3/bitbake-user-manual/bitbake-user-manual.html>`
*************************
-3.0 'zeus' Release Series
+Release Series 3.0 (zeus)
*************************
- :yocto_docs:`3.0 BitBake User Manual </3.0/bitbake-user-manual/bitbake-user-manual.html>`
- :yocto_docs:`3.0.1 BitBake User Manual </3.0.1/bitbake-user-manual/bitbake-user-manual.html>`
- :yocto_docs:`3.0.2 BitBake User Manual </3.0.2/bitbake-user-manual/bitbake-user-manual.html>`
- :yocto_docs:`3.0.3 BitBake User Manual </3.0.3/bitbake-user-manual/bitbake-user-manual.html>`
+- :yocto_docs:`3.0.4 BitBake User Manual </3.0.4/bitbake-user-manual/bitbake-user-manual.html>`
****************************
-2.7 'warrior' Release Series
+Release Series 2.7 (warrior)
****************************
- :yocto_docs:`2.7 BitBake User Manual </2.7/bitbake-user-manual/bitbake-user-manual.html>`
@@ -36,7 +80,7 @@
- :yocto_docs:`2.7.4 BitBake User Manual </2.7.4/bitbake-user-manual/bitbake-user-manual.html>`
*************************
-2.6 'thud' Release Series
+Release Series 2.6 (thud)
*************************
- :yocto_docs:`2.6 BitBake User Manual </2.6/bitbake-user-manual/bitbake-user-manual.html>`
@@ -46,16 +90,16 @@
- :yocto_docs:`2.6.4 BitBake User Manual </2.6.4/bitbake-user-manual/bitbake-user-manual.html>`
*************************
-2.5 'sumo' Release Series
+Release Series 2.5 (sumo)
*************************
-- :yocto_docs:`2.5 BitBake User Manual </2.5/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`2.5.1 BitBake User Manual </2.5.1/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`2.5.2 BitBake User Manual </2.5.2/bitbake-user-manual/bitbake-user-manual.html>`
-- :yocto_docs:`2.5.3 BitBake User Manual </2.5.3/bitbake-user-manual/bitbake-user-manual.html>`
+- :yocto_docs:`2.5 Documentation </2.5>`
+- :yocto_docs:`2.5.1 Documentation </2.5.1>`
+- :yocto_docs:`2.5.2 Documentation </2.5.2>`
+- :yocto_docs:`2.5.3 Documentation </2.5.3>`
**************************
-2.4 'rocko' Release Series
+Release Series 2.4 (rocko)
**************************
- :yocto_docs:`2.4 BitBake User Manual </2.4/bitbake-user-manual/bitbake-user-manual.html>`
@@ -65,7 +109,7 @@
- :yocto_docs:`2.4.4 BitBake User Manual </2.4.4/bitbake-user-manual/bitbake-user-manual.html>`
*************************
-2.3 'pyro' Release Series
+Release Series 2.3 (pyro)
*************************
- :yocto_docs:`2.3 BitBake User Manual </2.3/bitbake-user-manual/bitbake-user-manual.html>`
@@ -75,7 +119,7 @@
- :yocto_docs:`2.3.4 BitBake User Manual </2.3.4/bitbake-user-manual/bitbake-user-manual.html>`
**************************
-2.2 'morty' Release Series
+Release Series 2.2 (morty)
**************************
- :yocto_docs:`2.2 BitBake User Manual </2.2/bitbake-user-manual/bitbake-user-manual.html>`
@@ -84,7 +128,7 @@
- :yocto_docs:`2.2.3 BitBake User Manual </2.2.3/bitbake-user-manual/bitbake-user-manual.html>`
****************************
-2.1 'krogoth' Release Series
+Release Series 2.1 (krogoth)
****************************
- :yocto_docs:`2.1 BitBake User Manual </2.1/bitbake-user-manual/bitbake-user-manual.html>`
@@ -93,7 +137,7 @@
- :yocto_docs:`2.1.3 BitBake User Manual </2.1.3/bitbake-user-manual/bitbake-user-manual.html>`
***************************
-2.0 'jethro' Release Series
+Release Series 2.0 (jethro)
***************************
- :yocto_docs:`1.9 BitBake User Manual </1.9/bitbake-user-manual/bitbake-user-manual.html>`
@@ -103,7 +147,7 @@
- :yocto_docs:`2.0.3 BitBake User Manual </2.0.3/bitbake-user-manual/bitbake-user-manual.html>`
*************************
-1.8 'fido' Release Series
+Release Series 1.8 (fido)
*************************
- :yocto_docs:`1.8 BitBake User Manual </1.8/bitbake-user-manual/bitbake-user-manual.html>`
@@ -111,7 +155,7 @@
- :yocto_docs:`1.8.2 BitBake User Manual </1.8.2/bitbake-user-manual/bitbake-user-manual.html>`
**************************
-1.7 'dizzy' Release Series
+Release Series 1.7 (dizzy)
**************************
- :yocto_docs:`1.7 BitBake User Manual </1.7/bitbake-user-manual/bitbake-user-manual.html>`
@@ -120,7 +164,7 @@
- :yocto_docs:`1.7.3 BitBake User Manual </1.7.3/bitbake-user-manual/bitbake-user-manual.html>`
**************************
-1.6 'daisy' Release Series
+Release Series 1.6 (daisy)
**************************
- :yocto_docs:`1.6 BitBake User Manual </1.6/bitbake-user-manual/bitbake-user-manual.html>`
diff --git a/lib/bb/COW.py b/lib/bb/COW.py
index 23c22b65e..76bc08a3e 100644
--- a/lib/bb/COW.py
+++ b/lib/bb/COW.py
@@ -3,6 +3,8 @@
#
# Copyright (C) 2006 Tim Ansell
#
+# SPDX-License-Identifier: GPL-2.0-only
+#
# Please Note:
# Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW.
# Assign a file to __warn__ to get warnings about slow operations.
diff --git a/lib/bb/__init__.py b/lib/bb/__init__.py
index a144bd609..15013540c 100644
--- a/lib/bb/__init__.py
+++ b/lib/bb/__init__.py
@@ -9,12 +9,19 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-__version__ = "1.51.0"
+__version__ = "2.9.0"
import sys
-if sys.version_info < (3, 5, 0):
- raise RuntimeError("Sorry, python 3.5.0 or later is required for this version of bitbake")
+if sys.version_info < (3, 8, 0):
+ raise RuntimeError("Sorry, python 3.8.0 or later is required for this version of bitbake")
+if sys.version_info < (3, 10, 0):
+ # With python 3.8 and 3.9, we see errors of "libgcc_s.so.1 must be installed for pthread_cancel to work"
+ # https://stackoverflow.com/questions/64797838/libgcc-s-so-1-must-be-installed-for-pthread-cancel-to-work
+ # https://bugs.ams1.psf.io/issue42888
+ # so ensure libgcc_s is loaded early on
+ import ctypes
+ libgcc_s = ctypes.CDLL('libgcc_s.so.1')
class BBHandledException(Exception):
"""
@@ -60,6 +67,10 @@ class BBLoggerMixin(object):
return
if loglevel < bb.msg.loggerDefaultLogLevel:
return
+
+ if not isinstance(level, int) or not isinstance(msg, str):
+ mainlogger.warning("Invalid arguments in bbdebug: %s" % repr((level, msg,) + args))
+
return self.log(loglevel, msg, *args, **kwargs)
def plain(self, msg, *args, **kwargs):
@@ -71,6 +82,13 @@ class BBLoggerMixin(object):
def verbnote(self, msg, *args, **kwargs):
return self.log(logging.INFO + 2, msg, *args, **kwargs)
+ def warnonce(self, msg, *args, **kwargs):
+ return self.log(logging.WARNING - 1, msg, *args, **kwargs)
+
+ def erroronce(self, msg, *args, **kwargs):
+ return self.log(logging.ERROR - 1, msg, *args, **kwargs)
+
+
Logger = logging.getLoggerClass()
class BBLogger(Logger, BBLoggerMixin):
def __init__(self, name, *args, **kwargs):
@@ -157,9 +175,15 @@ def verbnote(*args):
def warn(*args):
mainlogger.warning(''.join(args))
+def warnonce(*args):
+ mainlogger.warnonce(''.join(args))
+
def error(*args, **kwargs):
mainlogger.error(''.join(args), extra=kwargs)
+def erroronce(*args):
+ mainlogger.erroronce(''.join(args))
+
def fatal(*args, **kwargs):
mainlogger.critical(''.join(args), extra=kwargs)
raise BBHandledException()
diff --git a/lib/bb/acl.py b/lib/bb/acl.py
new file mode 100755
index 000000000..0f41b275c
--- /dev/null
+++ b/lib/bb/acl.py
@@ -0,0 +1,215 @@
+#! /usr/bin/env python3
+#
+# Copyright 2023 by Garmin Ltd. or its subsidiaries
+#
+# SPDX-License-Identifier: MIT
+
+
+import sys
+import ctypes
+import os
+import errno
+import pwd
+import grp
+
+libacl = ctypes.CDLL("libacl.so.1", use_errno=True)
+
+
+ACL_TYPE_ACCESS = 0x8000
+ACL_TYPE_DEFAULT = 0x4000
+
+ACL_FIRST_ENTRY = 0
+ACL_NEXT_ENTRY = 1
+
+ACL_UNDEFINED_TAG = 0x00
+ACL_USER_OBJ = 0x01
+ACL_USER = 0x02
+ACL_GROUP_OBJ = 0x04
+ACL_GROUP = 0x08
+ACL_MASK = 0x10
+ACL_OTHER = 0x20
+
+ACL_READ = 0x04
+ACL_WRITE = 0x02
+ACL_EXECUTE = 0x01
+
+acl_t = ctypes.c_void_p
+acl_entry_t = ctypes.c_void_p
+acl_permset_t = ctypes.c_void_p
+acl_perm_t = ctypes.c_uint
+
+acl_tag_t = ctypes.c_int
+
+libacl.acl_free.argtypes = [acl_t]
+
+
+def acl_free(acl):
+ libacl.acl_free(acl)
+
+
+libacl.acl_get_file.restype = acl_t
+libacl.acl_get_file.argtypes = [ctypes.c_char_p, ctypes.c_uint]
+
+
+def acl_get_file(path, typ):
+ acl = libacl.acl_get_file(os.fsencode(path), typ)
+ if acl is None:
+ err = ctypes.get_errno()
+ raise OSError(err, os.strerror(err), str(path))
+
+ return acl
+
+
+libacl.acl_get_entry.argtypes = [acl_t, ctypes.c_int, ctypes.c_void_p]
+
+
+def acl_get_entry(acl, entry_id):
+ entry = acl_entry_t()
+ ret = libacl.acl_get_entry(acl, entry_id, ctypes.byref(entry))
+ if ret < 0:
+ err = ctypes.get_errno()
+ raise OSError(err, os.strerror(err))
+
+ if ret == 0:
+ return None
+
+ return entry
+
+
+libacl.acl_get_tag_type.argtypes = [acl_entry_t, ctypes.c_void_p]
+
+
+def acl_get_tag_type(entry_d):
+ tag = acl_tag_t()
+ ret = libacl.acl_get_tag_type(entry_d, ctypes.byref(tag))
+ if ret < 0:
+ err = ctypes.get_errno()
+ raise OSError(err, os.strerror(err))
+ return tag.value
+
+
+libacl.acl_get_qualifier.restype = ctypes.c_void_p
+libacl.acl_get_qualifier.argtypes = [acl_entry_t]
+
+
+def acl_get_qualifier(entry_d):
+ ret = libacl.acl_get_qualifier(entry_d)
+ if ret is None:
+ err = ctypes.get_errno()
+ raise OSError(err, os.strerror(err))
+ return ctypes.c_void_p(ret)
+
+
+libacl.acl_get_permset.argtypes = [acl_entry_t, ctypes.c_void_p]
+
+
+def acl_get_permset(entry_d):
+ permset = acl_permset_t()
+ ret = libacl.acl_get_permset(entry_d, ctypes.byref(permset))
+ if ret < 0:
+ err = ctypes.get_errno()
+ raise OSError(err, os.strerror(err))
+
+ return permset
+
+
+libacl.acl_get_perm.argtypes = [acl_permset_t, acl_perm_t]
+
+
+def acl_get_perm(permset_d, perm):
+ ret = libacl.acl_get_perm(permset_d, perm)
+ if ret < 0:
+ err = ctypes.get_errno()
+ raise OSError(err, os.strerror(err))
+ return bool(ret)
+
+
+class Entry(object):
+ def __init__(self, tag, qualifier, mode):
+ self.tag = tag
+ self.qualifier = qualifier
+ self.mode = mode
+
+ def __str__(self):
+ typ = ""
+ qual = ""
+ if self.tag == ACL_USER:
+ typ = "user"
+ qual = pwd.getpwuid(self.qualifier).pw_name
+ elif self.tag == ACL_GROUP:
+ typ = "group"
+ qual = grp.getgrgid(self.qualifier).gr_name
+ elif self.tag == ACL_USER_OBJ:
+ typ = "user"
+ elif self.tag == ACL_GROUP_OBJ:
+ typ = "group"
+ elif self.tag == ACL_MASK:
+ typ = "mask"
+ elif self.tag == ACL_OTHER:
+ typ = "other"
+
+ r = "r" if self.mode & ACL_READ else "-"
+ w = "w" if self.mode & ACL_WRITE else "-"
+ x = "x" if self.mode & ACL_EXECUTE else "-"
+
+ return f"{typ}:{qual}:{r}{w}{x}"
+
+
+class ACL(object):
+ def __init__(self, acl):
+ self.acl = acl
+
+ def __del__(self):
+ acl_free(self.acl)
+
+ def entries(self):
+ entry_id = ACL_FIRST_ENTRY
+ while True:
+ entry = acl_get_entry(self.acl, entry_id)
+ if entry is None:
+ break
+
+ permset = acl_get_permset(entry)
+
+ mode = 0
+ for m in (ACL_READ, ACL_WRITE, ACL_EXECUTE):
+ if acl_get_perm(permset, m):
+ mode |= m
+
+ qualifier = None
+ tag = acl_get_tag_type(entry)
+
+ if tag == ACL_USER or tag == ACL_GROUP:
+ qual = acl_get_qualifier(entry)
+ qualifier = ctypes.cast(qual, ctypes.POINTER(ctypes.c_int))[0]
+
+ yield Entry(tag, qualifier, mode)
+
+ entry_id = ACL_NEXT_ENTRY
+
+ @classmethod
+ def from_path(cls, path, typ):
+ acl = acl_get_file(path, typ)
+ return cls(acl)
+
+
+def main():
+ import argparse
+ import pwd
+ import grp
+ from pathlib import Path
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("path", help="File Path", type=Path)
+
+ args = parser.parse_args()
+
+ acl = ACL.from_path(args.path, ACL_TYPE_ACCESS)
+ for entry in acl.entries():
+ print(str(entry))
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/lib/bb/asyncrpc/__init__.py b/lib/bb/asyncrpc/__init__.py
index b2bec31ab..639e1607f 100644
--- a/lib/bb/asyncrpc/__init__.py
+++ b/lib/bb/asyncrpc/__init__.py
@@ -1,31 +1,16 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
-import itertools
-import json
-
-# The Python async server defaults to a 64K receive buffer, so we hardcode our
-# maximum chunk size. It would be better if the client and server reported to
-# each other what the maximum chunk sizes were, but that will slow down the
-# connection setup with a round trip delay so I'd rather not do that unless it
-# is necessary
-DEFAULT_MAX_CHUNK = 32 * 1024
-
-
-def chunkify(msg, max_chunk):
- if len(msg) < max_chunk - 1:
- yield ''.join((msg, "\n"))
- else:
- yield ''.join((json.dumps({
- 'chunk-stream': None
- }), "\n"))
-
- args = [iter(msg)] * (max_chunk - 1)
- for m in map(''.join, itertools.zip_longest(*args, fillvalue='')):
- yield ''.join(itertools.chain(m, "\n"))
- yield "\n"
-
-from .client import AsyncClient, Client
+from .client import AsyncClient, Client, ClientPool
from .serv import AsyncServer, AsyncServerConnection
+from .connection import DEFAULT_MAX_CHUNK
+from .exceptions import (
+ ClientError,
+ ServerError,
+ ConnectionClosedError,
+ InvokeError,
+)
diff --git a/lib/bb/asyncrpc/client.py b/lib/bb/asyncrpc/client.py
index 4cdad9ac3..a350b4fb1 100644
--- a/lib/bb/asyncrpc/client.py
+++ b/lib/bb/asyncrpc/client.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -7,46 +9,127 @@ import asyncio
import json
import os
import socket
-from . import chunkify, DEFAULT_MAX_CHUNK
-
+import sys
+import re
+import contextlib
+from threading import Thread
+from .connection import StreamConnection, WebsocketConnection, DEFAULT_MAX_CHUNK
+from .exceptions import ConnectionClosedError, InvokeError
+
+UNIX_PREFIX = "unix://"
+WS_PREFIX = "ws://"
+WSS_PREFIX = "wss://"
+
+ADDR_TYPE_UNIX = 0
+ADDR_TYPE_TCP = 1
+ADDR_TYPE_WS = 2
+
+def parse_address(addr):
+ if addr.startswith(UNIX_PREFIX):
+ return (ADDR_TYPE_UNIX, (addr[len(UNIX_PREFIX) :],))
+ elif addr.startswith(WS_PREFIX) or addr.startswith(WSS_PREFIX):
+ return (ADDR_TYPE_WS, (addr,))
+ else:
+ m = re.match(r"\[(?P<host>[^\]]*)\]:(?P<port>\d+)$", addr)
+ if m is not None:
+ host = m.group("host")
+ port = m.group("port")
+ else:
+ host, port = addr.split(":")
+
+ return (ADDR_TYPE_TCP, (host, int(port)))
class AsyncClient(object):
- def __init__(self, proto_name, proto_version, logger):
- self.reader = None
- self.writer = None
+ def __init__(
+ self,
+ proto_name,
+ proto_version,
+ logger,
+ timeout=30,
+ server_headers=False,
+ headers={},
+ ):
+ self.socket = None
self.max_chunk = DEFAULT_MAX_CHUNK
self.proto_name = proto_name
self.proto_version = proto_version
self.logger = logger
+ self.timeout = timeout
+ self.needs_server_headers = server_headers
+ self.server_headers = {}
+ self.headers = headers
async def connect_tcp(self, address, port):
async def connect_sock():
- return await asyncio.open_connection(address, port)
+ reader, writer = await asyncio.open_connection(address, port)
+ return StreamConnection(reader, writer, self.timeout, self.max_chunk)
self._connect_sock = connect_sock
async def connect_unix(self, path):
async def connect_sock():
- return await asyncio.open_unix_connection(path)
+ # AF_UNIX has path length issues so chdir here to workaround
+ cwd = os.getcwd()
+ try:
+ os.chdir(os.path.dirname(path))
+ # The socket must be opened synchronously so that CWD doesn't get
+ # changed out from underneath us so we pass as a sock into asyncio
+ sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM, 0)
+ sock.connect(os.path.basename(path))
+ finally:
+ os.chdir(cwd)
+ reader, writer = await asyncio.open_unix_connection(sock=sock)
+ return StreamConnection(reader, writer, self.timeout, self.max_chunk)
+
+ self._connect_sock = connect_sock
+
+ async def connect_websocket(self, uri):
+ import websockets
+
+ async def connect_sock():
+ websocket = await websockets.connect(uri, ping_interval=None)
+ return WebsocketConnection(websocket, self.timeout)
self._connect_sock = connect_sock
async def setup_connection(self):
- s = '%s %s\n\n' % (self.proto_name, self.proto_version)
- self.writer.write(s.encode("utf-8"))
- await self.writer.drain()
+ # Send headers
+ await self.socket.send("%s %s" % (self.proto_name, self.proto_version))
+ await self.socket.send(
+ "needs-headers: %s" % ("true" if self.needs_server_headers else "false")
+ )
+ for k, v in self.headers.items():
+ await self.socket.send("%s: %s" % (k, v))
+
+ # End of headers
+ await self.socket.send("")
+
+ self.server_headers = {}
+ if self.needs_server_headers:
+ while True:
+ line = await self.socket.recv()
+ if not line:
+ # End headers
+ break
+ tag, value = line.split(":", 1)
+ self.server_headers[tag.lower()] = value.strip()
+
+ async def get_header(self, tag, default):
+ await self.connect()
+ return self.server_headers.get(tag, default)
async def connect(self):
- if self.reader is None or self.writer is None:
- (self.reader, self.writer) = await self._connect_sock()
+ if self.socket is None:
+ self.socket = await self._connect_sock()
await self.setup_connection()
- async def close(self):
- self.reader = None
+ async def disconnect(self):
+ if self.socket is not None:
+ await self.socket.close()
+ self.socket = None
- if self.writer is not None:
- self.writer.close()
- self.writer = None
+ async def close(self):
+ await self.disconnect()
async def _send_wrapper(self, proc):
count = 0
@@ -57,6 +140,7 @@ class AsyncClient(object):
except (
OSError,
ConnectionError,
+ ConnectionClosedError,
json.JSONDecodeError,
UnicodeDecodeError,
) as e:
@@ -68,40 +152,27 @@ class AsyncClient(object):
await self.close()
count += 1
- async def send_message(self, msg):
- async def get_line():
- line = await self.reader.readline()
- if not line:
- raise ConnectionError("Connection closed")
-
- line = line.decode("utf-8")
-
- if not line.endswith("\n"):
- raise ConnectionError("Bad message %r" % msg)
-
- return line
+ def check_invoke_error(self, msg):
+ if isinstance(msg, dict) and "invoke-error" in msg:
+ raise InvokeError(msg["invoke-error"]["message"])
+ async def invoke(self, msg):
async def proc():
- for c in chunkify(json.dumps(msg), self.max_chunk):
- self.writer.write(c.encode("utf-8"))
- await self.writer.drain()
-
- l = await get_line()
+ await self.socket.send_message(msg)
+ return await self.socket.recv_message()
- m = json.loads(l)
- if m and "chunk-stream" in m:
- lines = []
- while True:
- l = (await get_line()).rstrip("\n")
- if not l:
- break
- lines.append(l)
+ result = await self._send_wrapper(proc)
+ self.check_invoke_error(result)
+ return result
- m = json.loads("".join(lines))
+ async def ping(self):
+ return await self.invoke({"ping": {}})
- return m
+ async def __aenter__(self):
+ return self
- return await self._send_wrapper(proc)
+ async def __aexit__(self, exc_type, exc_value, traceback):
+ await self.close()
class Client(object):
@@ -109,7 +180,17 @@ class Client(object):
self.client = self._get_async_client()
self.loop = asyncio.new_event_loop()
- self._add_methods('connect_tcp', 'close')
+ # Override any pre-existing loop.
+ # Without this, the PR server export selftest triggers a hang
+ # when running with Python 3.7. The drawback is that there is
+ # potential for issues if the PR and hash equiv (or some new)
+ # clients need to both be instantiated in the same process.
+ # This should be revisited if/when Python 3.9 becomes the
+ # minimum required version for BitBake, as it seems not
+ # required (but harmless) with it.
+ asyncio.set_event_loop(self.loop)
+
+ self._add_methods("connect_tcp", "ping")
@abc.abstractmethod
def _get_async_client(self):
@@ -127,14 +208,8 @@ class Client(object):
setattr(self, m, self._get_downcall_wrapper(downcall))
def connect_unix(self, path):
- # AF_UNIX has path length issues so chdir here to workaround
- cwd = os.getcwd()
- try:
- os.chdir(os.path.dirname(path))
- self.loop.run_until_complete(self.client.connect_unix(os.path.basename(path)))
- self.loop.run_until_complete(self.client.connect())
- finally:
- os.chdir(cwd)
+ self.loop.run_until_complete(self.client.connect_unix(path))
+ self.loop.run_until_complete(self.client.connect())
@property
def max_chunk(self):
@@ -143,3 +218,96 @@ class Client(object):
@max_chunk.setter
def max_chunk(self, value):
self.client.max_chunk = value
+
+ def disconnect(self):
+ self.loop.run_until_complete(self.client.close())
+
+ def close(self):
+ if self.loop:
+ self.loop.run_until_complete(self.client.close())
+ if sys.version_info >= (3, 6):
+ self.loop.run_until_complete(self.loop.shutdown_asyncgens())
+ self.loop.close()
+ self.loop = None
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.close()
+ return False
+
+
+class ClientPool(object):
+ def __init__(self, max_clients):
+ self.avail_clients = []
+ self.num_clients = 0
+ self.max_clients = max_clients
+ self.loop = None
+ self.client_condition = None
+
+ @abc.abstractmethod
+ async def _new_client(self):
+ raise NotImplementedError("Must be implemented in derived class")
+
+ def close(self):
+ if self.client_condition:
+ self.client_condition = None
+
+ if self.loop:
+ self.loop.run_until_complete(self.__close_clients())
+ self.loop.run_until_complete(self.loop.shutdown_asyncgens())
+ self.loop.close()
+ self.loop = None
+
+ def run_tasks(self, tasks):
+ if not self.loop:
+ self.loop = asyncio.new_event_loop()
+
+ thread = Thread(target=self.__thread_main, args=(tasks,))
+ thread.start()
+ thread.join()
+
+ @contextlib.asynccontextmanager
+ async def get_client(self):
+ async with self.client_condition:
+ if self.avail_clients:
+ client = self.avail_clients.pop()
+ elif self.num_clients < self.max_clients:
+ self.num_clients += 1
+ client = await self._new_client()
+ else:
+ while not self.avail_clients:
+ await self.client_condition.wait()
+ client = self.avail_clients.pop()
+
+ try:
+ yield client
+ finally:
+ async with self.client_condition:
+ self.avail_clients.append(client)
+ self.client_condition.notify()
+
+ def __thread_main(self, tasks):
+ async def process_task(task):
+ async with self.get_client() as client:
+ await task(client)
+
+ asyncio.set_event_loop(self.loop)
+ if not self.client_condition:
+ self.client_condition = asyncio.Condition()
+ tasks = [process_task(t) for t in tasks]
+ self.loop.run_until_complete(asyncio.gather(*tasks))
+
+ async def __close_clients(self):
+ for c in self.avail_clients:
+ await c.close()
+ self.avail_clients = []
+ self.num_clients = 0
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.close()
+ return False
diff --git a/lib/bb/asyncrpc/connection.py b/lib/bb/asyncrpc/connection.py
new file mode 100644
index 000000000..7f0cf6ba9
--- /dev/null
+++ b/lib/bb/asyncrpc/connection.py
@@ -0,0 +1,146 @@
+#
+# Copyright BitBake Contributors
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import asyncio
+import itertools
+import json
+from datetime import datetime
+from .exceptions import ClientError, ConnectionClosedError
+
+
+# The Python async server defaults to a 64K receive buffer, so we hardcode our
+# maximum chunk size. It would be better if the client and server reported to
+# each other what the maximum chunk sizes were, but that will slow down the
+# connection setup with a round trip delay so I'd rather not do that unless it
+# is necessary
+DEFAULT_MAX_CHUNK = 32 * 1024
+
+
+def chunkify(msg, max_chunk):
+ if len(msg) < max_chunk - 1:
+ yield "".join((msg, "\n"))
+ else:
+ yield "".join((json.dumps({"chunk-stream": None}), "\n"))
+
+ args = [iter(msg)] * (max_chunk - 1)
+ for m in map("".join, itertools.zip_longest(*args, fillvalue="")):
+ yield "".join(itertools.chain(m, "\n"))
+ yield "\n"
+
+
+def json_serialize(obj):
+ if isinstance(obj, datetime):
+ return obj.isoformat()
+ raise TypeError("Type %s not serializeable" % type(obj))
+
+
+class StreamConnection(object):
+ def __init__(self, reader, writer, timeout, max_chunk=DEFAULT_MAX_CHUNK):
+ self.reader = reader
+ self.writer = writer
+ self.timeout = timeout
+ self.max_chunk = max_chunk
+
+ @property
+ def address(self):
+ return self.writer.get_extra_info("peername")
+
+ async def send_message(self, msg):
+ for c in chunkify(json.dumps(msg, default=json_serialize), self.max_chunk):
+ self.writer.write(c.encode("utf-8"))
+ await self.writer.drain()
+
+ async def recv_message(self):
+ l = await self.recv()
+
+ m = json.loads(l)
+ if not m:
+ return m
+
+ if "chunk-stream" in m:
+ lines = []
+ while True:
+ l = await self.recv()
+ if not l:
+ break
+ lines.append(l)
+
+ m = json.loads("".join(lines))
+
+ return m
+
+ async def send(self, msg):
+ self.writer.write(("%s\n" % msg).encode("utf-8"))
+ await self.writer.drain()
+
+ async def recv(self):
+ if self.timeout < 0:
+ line = await self.reader.readline()
+ else:
+ try:
+ line = await asyncio.wait_for(self.reader.readline(), self.timeout)
+ except asyncio.TimeoutError:
+ raise ConnectionError("Timed out waiting for data")
+
+ if not line:
+ raise ConnectionClosedError("Connection closed")
+
+ line = line.decode("utf-8")
+
+ if not line.endswith("\n"):
+ raise ConnectionError("Bad message %r" % (line))
+
+ return line.rstrip()
+
+ async def close(self):
+ self.reader = None
+ if self.writer is not None:
+ self.writer.close()
+ self.writer = None
+
+
+class WebsocketConnection(object):
+ def __init__(self, socket, timeout):
+ self.socket = socket
+ self.timeout = timeout
+
+ @property
+ def address(self):
+ return ":".join(str(s) for s in self.socket.remote_address)
+
+ async def send_message(self, msg):
+ await self.send(json.dumps(msg, default=json_serialize))
+
+ async def recv_message(self):
+ m = await self.recv()
+ return json.loads(m)
+
+ async def send(self, msg):
+ import websockets.exceptions
+
+ try:
+ await self.socket.send(msg)
+ except websockets.exceptions.ConnectionClosed:
+ raise ConnectionClosedError("Connection closed")
+
+ async def recv(self):
+ import websockets.exceptions
+
+ try:
+ if self.timeout < 0:
+ return await self.socket.recv()
+
+ try:
+ return await asyncio.wait_for(self.socket.recv(), self.timeout)
+ except asyncio.TimeoutError:
+ raise ConnectionError("Timed out waiting for data")
+ except websockets.exceptions.ConnectionClosed:
+ raise ConnectionClosedError("Connection closed")
+
+ async def close(self):
+ if self.socket is not None:
+ await self.socket.close()
+ self.socket = None
diff --git a/lib/bb/asyncrpc/exceptions.py b/lib/bb/asyncrpc/exceptions.py
new file mode 100644
index 000000000..ae1043a38
--- /dev/null
+++ b/lib/bb/asyncrpc/exceptions.py
@@ -0,0 +1,21 @@
+#
+# Copyright BitBake Contributors
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+
+class ClientError(Exception):
+ pass
+
+
+class InvokeError(Exception):
+ pass
+
+
+class ServerError(Exception):
+ pass
+
+
+class ConnectionClosedError(Exception):
+ pass
diff --git a/lib/bb/asyncrpc/serv.py b/lib/bb/asyncrpc/serv.py
index cb3384639..a66117aca 100644
--- a/lib/bb/asyncrpc/serv.py
+++ b/lib/bb/asyncrpc/serv.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -9,210 +11,381 @@ import os
import signal
import socket
import sys
-from . import chunkify, DEFAULT_MAX_CHUNK
-
-
-class ClientError(Exception):
- pass
+import multiprocessing
+import logging
+from .connection import StreamConnection, WebsocketConnection
+from .exceptions import ClientError, ServerError, ConnectionClosedError, InvokeError
-class ServerError(Exception):
- pass
+class ClientLoggerAdapter(logging.LoggerAdapter):
+ def process(self, msg, kwargs):
+ return f"[Client {self.extra['address']}] {msg}", kwargs
class AsyncServerConnection(object):
- def __init__(self, reader, writer, proto_name, logger):
- self.reader = reader
- self.writer = writer
+ # If a handler returns this object (e.g. `return self.NO_RESPONSE`), no
+ # return message will be automatically be sent back to the client
+ NO_RESPONSE = object()
+
+ def __init__(self, socket, proto_name, logger):
+ self.socket = socket
self.proto_name = proto_name
- self.max_chunk = DEFAULT_MAX_CHUNK
self.handlers = {
- 'chunk-stream': self.handle_chunk,
+ "ping": self.handle_ping,
}
- self.logger = logger
+ self.logger = ClientLoggerAdapter(
+ logger,
+ {
+ "address": socket.address,
+ },
+ )
+ self.client_headers = {}
+
+ async def close(self):
+ await self.socket.close()
+
+ async def handle_headers(self, headers):
+ return {}
async def process_requests(self):
try:
- self.addr = self.writer.get_extra_info('peername')
- self.logger.debug('Client %r connected' % (self.addr,))
+ self.logger.info("Client %r connected" % (self.socket.address,))
# Read protocol and version
- client_protocol = await self.reader.readline()
- if client_protocol is None:
+ client_protocol = await self.socket.recv()
+ if not client_protocol:
return
- (client_proto_name, client_proto_version) = client_protocol.decode('utf-8').rstrip().split()
+ (client_proto_name, client_proto_version) = client_protocol.split()
if client_proto_name != self.proto_name:
- self.logger.debug('Rejecting invalid protocol %s' % (self.proto_name))
+ self.logger.debug("Rejecting invalid protocol %s" % (self.proto_name))
return
- self.proto_version = tuple(int(v) for v in client_proto_version.split('.'))
+ self.proto_version = tuple(int(v) for v in client_proto_version.split("."))
if not self.validate_proto_version():
- self.logger.debug('Rejecting invalid protocol version %s' % (client_proto_version))
+ self.logger.debug(
+ "Rejecting invalid protocol version %s" % (client_proto_version)
+ )
return
- # Read headers. Currently, no headers are implemented, so look for
- # an empty line to signal the end of the headers
+ # Read headers
+ self.client_headers = {}
while True:
- line = await self.reader.readline()
- if line is None:
- return
-
- line = line.decode('utf-8').rstrip()
- if not line:
+ header = await self.socket.recv()
+ if not header:
+ # Empty line. End of headers
break
+ tag, value = header.split(":", 1)
+ self.client_headers[tag.lower()] = value.strip()
+
+ if self.client_headers.get("needs-headers", "false") == "true":
+ for k, v in (await self.handle_headers(self.client_headers)).items():
+ await self.socket.send("%s: %s" % (k, v))
+ await self.socket.send("")
# Handle messages
while True:
- d = await self.read_message()
+ d = await self.socket.recv_message()
if d is None:
break
- await self.dispatch_message(d)
- await self.writer.drain()
- except ClientError as e:
+ try:
+ response = await self.dispatch_message(d)
+ except InvokeError as e:
+ await self.socket.send_message(
+ {"invoke-error": {"message": str(e)}}
+ )
+ break
+
+ if response is not self.NO_RESPONSE:
+ await self.socket.send_message(response)
+
+ except ConnectionClosedError as e:
+ self.logger.info(str(e))
+ except (ClientError, ConnectionError) as e:
self.logger.error(str(e))
finally:
- self.writer.close()
+ await self.close()
async def dispatch_message(self, msg):
for k in self.handlers.keys():
if k in msg:
- self.logger.debug('Handling %s' % k)
- await self.handlers[k](msg[k])
- return
+ self.logger.debug("Handling %s" % k)
+ return await self.handlers[k](msg[k])
raise ClientError("Unrecognized command %r" % msg)
- def write_message(self, msg):
- for c in chunkify(json.dumps(msg), self.max_chunk):
- self.writer.write(c.encode('utf-8'))
-
- async def read_message(self):
- l = await self.reader.readline()
- if not l:
- return None
-
- try:
- message = l.decode('utf-8')
-
- if not message.endswith('\n'):
- return None
+ async def handle_ping(self, request):
+ return {"alive": True}
- return json.loads(message)
- except (json.JSONDecodeError, UnicodeDecodeError) as e:
- self.logger.error('Bad message from client: %r' % message)
- raise e
- async def handle_chunk(self, request):
- lines = []
- try:
- while True:
- l = await self.reader.readline()
- l = l.rstrip(b"\n").decode("utf-8")
- if not l:
- break
- lines.append(l)
-
- msg = json.loads(''.join(lines))
- except (json.JSONDecodeError, UnicodeDecodeError) as e:
- self.logger.error('Bad message from client: %r' % lines)
- raise e
+class StreamServer(object):
+ def __init__(self, handler, logger):
+ self.handler = handler
+ self.logger = logger
+ self.closed = False
- if 'chunk-stream' in msg:
- raise ClientError("Nested chunks are not allowed")
+ async def handle_stream_client(self, reader, writer):
+ # writer.transport.set_write_buffer_limits(0)
+ socket = StreamConnection(reader, writer, -1)
+ if self.closed:
+ await socket.close()
+ return
- await self.dispatch_message(msg)
+ await self.handler(socket)
+ async def stop(self):
+ self.closed = True
-class AsyncServer(object):
- def __init__(self, logger, loop=None):
- if loop is None:
- self.loop = asyncio.new_event_loop()
- self.close_loop = True
- else:
- self.loop = loop
- self.close_loop = False
- self._cleanup_socket = None
- self.logger = logger
+class TCPStreamServer(StreamServer):
+ def __init__(self, host, port, handler, logger):
+ super().__init__(handler, logger)
+ self.host = host
+ self.port = port
- def start_tcp_server(self, host, port):
- self.server = self.loop.run_until_complete(
- asyncio.start_server(self.handle_client, host, port, loop=self.loop)
+ def start(self, loop):
+ self.server = loop.run_until_complete(
+ asyncio.start_server(self.handle_stream_client, self.host, self.port)
)
for s in self.server.sockets:
- self.logger.info('Listening on %r' % (s.getsockname(),))
+ self.logger.debug("Listening on %r" % (s.getsockname(),))
# Newer python does this automatically. Do it manually here for
# maximum compatibility
s.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
s.setsockopt(socket.SOL_TCP, socket.TCP_QUICKACK, 1)
+ # Enable keep alives. This prevents broken client connections
+ # from persisting on the server for long periods of time.
+ s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+ s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 30)
+ s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 15)
+ s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 4)
+
name = self.server.sockets[0].getsockname()
if self.server.sockets[0].family == socket.AF_INET6:
self.address = "[%s]:%d" % (name[0], name[1])
else:
self.address = "%s:%d" % (name[0], name[1])
- def start_unix_server(self, path):
- def cleanup():
- os.unlink(path)
+ return [self.server.wait_closed()]
+
+ async def stop(self):
+ await super().stop()
+ self.server.close()
+ def cleanup(self):
+ pass
+
+
+class UnixStreamServer(StreamServer):
+ def __init__(self, path, handler, logger):
+ super().__init__(handler, logger)
+ self.path = path
+
+ def start(self, loop):
cwd = os.getcwd()
try:
# Work around path length limits in AF_UNIX
- os.chdir(os.path.dirname(path))
- self.server = self.loop.run_until_complete(
- asyncio.start_unix_server(self.handle_client, os.path.basename(path), loop=self.loop)
+ os.chdir(os.path.dirname(self.path))
+ self.server = loop.run_until_complete(
+ asyncio.start_unix_server(
+ self.handle_stream_client, os.path.basename(self.path)
+ )
)
finally:
os.chdir(cwd)
- self.logger.info('Listening on %r' % path)
+ self.logger.debug("Listening on %r" % self.path)
+ self.address = "unix://%s" % os.path.abspath(self.path)
+ return [self.server.wait_closed()]
- self._cleanup_socket = cleanup
- self.address = "unix://%s" % os.path.abspath(path)
+ async def stop(self):
+ await super().stop()
+ self.server.close()
- @abc.abstractmethod
- def accept_client(self, reader, writer):
+ def cleanup(self):
+ os.unlink(self.path)
+
+
+class WebsocketsServer(object):
+ def __init__(self, host, port, handler, logger):
+ self.host = host
+ self.port = port
+ self.handler = handler
+ self.logger = logger
+
+ def start(self, loop):
+ import websockets.server
+
+ self.server = loop.run_until_complete(
+ websockets.server.serve(
+ self.client_handler,
+ self.host,
+ self.port,
+ ping_interval=None,
+ )
+ )
+
+ for s in self.server.sockets:
+ self.logger.debug("Listening on %r" % (s.getsockname(),))
+
+ # Enable keep alives. This prevents broken client connections
+ # from persisting on the server for long periods of time.
+ s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+ s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 30)
+ s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 15)
+ s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 4)
+
+ name = self.server.sockets[0].getsockname()
+ if self.server.sockets[0].family == socket.AF_INET6:
+ self.address = "ws://[%s]:%d" % (name[0], name[1])
+ else:
+ self.address = "ws://%s:%d" % (name[0], name[1])
+
+ return [self.server.wait_closed()]
+
+ async def stop(self):
+ self.server.close()
+
+ def cleanup(self):
pass
- async def handle_client(self, reader, writer):
- # writer.transport.set_write_buffer_limits(0)
+ async def client_handler(self, websocket):
+ socket = WebsocketConnection(websocket, -1)
+ await self.handler(socket)
+
+
+class AsyncServer(object):
+ def __init__(self, logger):
+ self.logger = logger
+ self.loop = None
+ self.run_tasks = []
+
+ def start_tcp_server(self, host, port):
+ self.server = TCPStreamServer(host, port, self._client_handler, self.logger)
+
+ def start_unix_server(self, path):
+ self.server = UnixStreamServer(path, self._client_handler, self.logger)
+
+ def start_websocket_server(self, host, port):
+ self.server = WebsocketsServer(host, port, self._client_handler, self.logger)
+
+ async def _client_handler(self, socket):
+ address = socket.address
try:
- client = self.accept_client(reader, writer)
+ client = self.accept_client(socket)
await client.process_requests()
except Exception as e:
import traceback
- self.logger.error('Error from client: %s' % str(e), exc_info=True)
+
+ self.logger.error(
+ "Error from client %s: %s" % (address, str(e)), exc_info=True
+ )
traceback.print_exc()
- writer.close()
- self.logger.info('Client disconnected')
+ finally:
+ self.logger.debug("Client %s disconnected", address)
+ await socket.close()
- def run_loop_forever(self):
- try:
- self.loop.run_forever()
- except KeyboardInterrupt:
- pass
+ @abc.abstractmethod
+ def accept_client(self, socket):
+ pass
+
+ async def stop(self):
+ self.logger.debug("Stopping server")
+ await self.server.stop()
+
+ def start(self):
+ tasks = self.server.start(self.loop)
+ self.address = self.server.address
+ return tasks
def signal_handler(self):
- self.loop.stop()
+ self.logger.debug("Got exit signal")
+ self.loop.create_task(self.stop())
- def serve_forever(self):
- asyncio.set_event_loop(self.loop)
+ def _serve_forever(self, tasks):
try:
self.loop.add_signal_handler(signal.SIGTERM, self.signal_handler)
+ self.loop.add_signal_handler(signal.SIGINT, self.signal_handler)
+ self.loop.add_signal_handler(signal.SIGQUIT, self.signal_handler)
+ signal.pthread_sigmask(signal.SIG_UNBLOCK, [signal.SIGTERM])
- self.run_loop_forever()
- self.server.close()
+ self.loop.run_until_complete(asyncio.gather(*tasks))
- self.loop.run_until_complete(self.server.wait_closed())
- self.logger.info('Server shutting down')
+ self.logger.debug("Server shutting down")
finally:
- if self.close_loop:
- if sys.version_info >= (3, 6):
- self.loop.run_until_complete(self.loop.shutdown_asyncgens())
- self.loop.close()
+ self.server.cleanup()
+
+ def serve_forever(self):
+ """
+ Serve requests in the current process
+ """
+ self._create_loop()
+ tasks = self.start()
+ self._serve_forever(tasks)
+ self.loop.close()
+
+ def _create_loop(self):
+ # Create loop and override any loop that may have existed in
+ # a parent process. It is possible that the usecases of
+ # serve_forever might be constrained enough to allow using
+ # get_event_loop here, but better safe than sorry for now.
+ self.loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(self.loop)
+
+ def serve_as_process(self, *, prefunc=None, args=(), log_level=None):
+ """
+ Serve requests in a child process
+ """
+
+ def run(queue):
+ # Create loop and override any loop that may have existed
+ # in a parent process. Without doing this and instead
+ # using get_event_loop, at the very minimum the hashserv
+ # unit tests will hang when running the second test.
+ # This happens since get_event_loop in the spawned server
+ # process for the second testcase ends up with the loop
+ # from the hashserv client created in the unit test process
+ # when running the first testcase. The problem is somewhat
+ # more general, though, as any potential use of asyncio in
+ # Cooker could create a loop that needs to replaced in this
+ # new process.
+ self._create_loop()
+ try:
+ self.address = None
+ tasks = self.start()
+ finally:
+ # Always put the server address to wake up the parent task
+ queue.put(self.address)
+ queue.close()
+
+ if prefunc is not None:
+ prefunc(self, *args)
+
+ if log_level is not None:
+ self.logger.setLevel(log_level)
+
+ self._serve_forever(tasks)
+
+ if sys.version_info >= (3, 6):
+ self.loop.run_until_complete(self.loop.shutdown_asyncgens())
+ self.loop.close()
+
+ queue = multiprocessing.Queue()
+
+ # Temporarily block SIGTERM. The server process will inherit this
+ # block which will ensure it doesn't receive the SIGTERM until the
+ # handler is ready for it
+ mask = signal.pthread_sigmask(signal.SIG_BLOCK, [signal.SIGTERM])
+ try:
+ self.process = multiprocessing.Process(target=run, args=(queue,))
+ self.process.start()
- if self._cleanup_socket is not None:
- self._cleanup_socket()
+ self.address = queue.get()
+ queue.close()
+ queue.join_thread()
+
+ return self.process
+ finally:
+ signal.pthread_sigmask(signal.SIG_SETMASK, mask)
diff --git a/lib/bb/build.py b/lib/bb/build.py
index b2715fc53..44d08f5c5 100644
--- a/lib/bb/build.py
+++ b/lib/bb/build.py
@@ -20,10 +20,12 @@ import itertools
import time
import re
import stat
+import datetime
import bb
import bb.msg
import bb.process
import bb.progress
+from io import StringIO
from bb import data, event, utils
bblogger = logging.getLogger('BitBake')
@@ -176,7 +178,9 @@ class StdoutNoopContextManager:
@property
def name(self):
- return sys.stdout.name
+ if "name" in dir(sys.stdout):
+ return sys.stdout.name
+ return "<mem>"
def exec_func(func, d, dirs = None):
@@ -295,9 +299,25 @@ def exec_func_python(func, d, runfile, cwd=None):
lineno = int(d.getVarFlag(func, "lineno", False))
bb.methodpool.insert_method(func, text, fn, lineno - 1)
- comp = utils.better_compile(code, func, "exec_python_func() autogenerated")
- utils.better_exec(comp, {"d": d}, code, "exec_python_func() autogenerated")
+ if verboseStdoutLogging:
+ sys.stdout.flush()
+ sys.stderr.flush()
+ currout = sys.stdout
+ currerr = sys.stderr
+ sys.stderr = sys.stdout = execio = StringIO()
+ comp = utils.better_compile(code, func, "exec_func_python() autogenerated")
+ utils.better_exec(comp, {"d": d}, code, "exec_func_python() autogenerated")
finally:
+ if verboseStdoutLogging:
+ execio.flush()
+ logger.plain("%s" % execio.getvalue())
+ sys.stdout = currout
+ sys.stderr = currerr
+ execio.close()
+ # We want any stdout/stderr to be printed before any other log messages to make debugging
+ # more accurate. In some cases we seem to lose stdout/stderr entirely in logging tests without this.
+ sys.stdout.flush()
+ sys.stderr.flush()
bb.debug(2, "Python function %s finished" % func)
if cwd and olddir:
@@ -436,7 +456,11 @@ exit $ret
if fakerootcmd:
cmd = [fakerootcmd, runfile]
- if verboseStdoutLogging:
+ # We only want to output to logger via LogTee if stdout is sys.__stdout__ (which will either
+ # be real stdout or subprocess PIPE or similar). In other cases we are being run "recursively",
+ # ie. inside another function, in which case stdout is already being captured so we don't
+ # want to Tee here as output would be printed twice, and out of order.
+ if verboseStdoutLogging and sys.stdout == sys.__stdout__:
logfile = LogTee(logger, StdoutNoopContextManager())
else:
logfile = StdoutNoopContextManager()
@@ -565,10 +589,8 @@ exit $ret
def _task_data(fn, task, d):
localdata = bb.data.createCopy(d)
localdata.setVar('BB_FILENAME', fn)
- localdata.setVar('BB_CURRENTTASK', task[3:])
localdata.setVar('OVERRIDES', 'task-%s:%s' %
(task[3:].replace('_', '-'), d.getVar('OVERRIDES', False)))
- localdata.finalize()
bb.data.expandKeys(localdata)
return localdata
@@ -579,7 +601,7 @@ def _exec_task(fn, task, d, quieterr):
running it with its own local metadata, and with some useful variables set.
"""
if not d.getVarFlag(task, 'task', False):
- event.fire(TaskInvalid(task, d), d)
+ event.fire(TaskInvalid(task, fn, d), d)
logger.error("No such task: %s" % task)
return 1
@@ -615,7 +637,8 @@ def _exec_task(fn, task, d, quieterr):
logorder = os.path.join(tempdir, 'log.task_order')
try:
with open(logorder, 'a') as logorderfile:
- logorderfile.write('{0} ({1}): {2}\n'.format(task, os.getpid(), logbase))
+ timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S.%f")
+ logorderfile.write('{0} {1} ({2}): {3}\n'.format(timestamp, task, os.getpid(), logbase))
except OSError:
logger.exception("Opening log file '%s'", logorder)
pass
@@ -682,47 +705,55 @@ def _exec_task(fn, task, d, quieterr):
try:
try:
event.fire(TaskStarted(task, fn, logfn, flags, localdata), localdata)
- except (bb.BBHandledException, SystemExit):
- return 1
- try:
for func in (prefuncs or '').split():
exec_func(func, localdata)
exec_func(task, localdata)
for func in (postfuncs or '').split():
exec_func(func, localdata)
- except bb.BBHandledException:
- event.fire(TaskFailed(task, fn, logfn, localdata, True), localdata)
- return 1
- except Exception as exc:
- if quieterr:
- event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata)
- else:
- errprinted = errchk.triggered
- logger.error(str(exc))
- event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata)
- return 1
- finally:
- sys.stdout.flush()
- sys.stderr.flush()
-
- bblogger.removeHandler(handler)
-
- # Restore the backup fds
- os.dup2(osi[0], osi[1])
- os.dup2(oso[0], oso[1])
- os.dup2(ose[0], ose[1])
-
- # Close the backup fds
- os.close(osi[0])
- os.close(oso[0])
- os.close(ose[0])
+ finally:
+ # Need to flush and close the logs before sending events where the
+ # UI may try to look at the logs.
+ sys.stdout.flush()
+ sys.stderr.flush()
+
+ bblogger.removeHandler(handler)
+
+ # Restore the backup fds
+ os.dup2(osi[0], osi[1])
+ os.dup2(oso[0], oso[1])
+ os.dup2(ose[0], ose[1])
+
+ # Close the backup fds
+ os.close(osi[0])
+ os.close(oso[0])
+ os.close(ose[0])
+
+ logfile.close()
+ if os.path.exists(logfn) and os.path.getsize(logfn) == 0:
+ logger.debug2("Zero size logfn %s, removing", logfn)
+ bb.utils.remove(logfn)
+ bb.utils.remove(loglink)
+ except (Exception, SystemExit) as exc:
+ handled = False
+ if isinstance(exc, bb.BBHandledException):
+ handled = True
+
+ if quieterr:
+ if not handled:
+ logger.warning(repr(exc))
+ event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata)
+ else:
+ errprinted = errchk.triggered
+ # If the output is already on stdout, we've printed the information in the
+ # logs once already so don't duplicate
+ if verboseStdoutLogging or handled:
+ errprinted = True
+ if not handled:
+ logger.error(repr(exc))
+ event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata)
+ return 1
- logfile.close()
- if os.path.exists(logfn) and os.path.getsize(logfn) == 0:
- logger.debug2("Zero size logfn %s, removing", logfn)
- bb.utils.remove(logfn)
- bb.utils.remove(loglink)
event.fire(TaskSucceeded(task, fn, logfn, localdata), localdata)
if not localdata.getVarFlag(task, 'nostamp', False) and not localdata.getVarFlag(task, 'selfstamp', False):
@@ -760,44 +791,7 @@ def exec_task(fn, task, d, profile = False):
event.fire(failedevent, d)
return 1
-def stamp_internal(taskname, d, file_name, baseonly=False, noextra=False):
- """
- Internal stamp helper function
- Makes sure the stamp directory exists
- Returns the stamp path+filename
-
- In the bitbake core, d can be a CacheData and file_name will be set.
- When called in task context, d will be a data store, file_name will not be set
- """
- taskflagname = taskname
- if taskname.endswith("_setscene") and taskname != "do_setscene":
- taskflagname = taskname.replace("_setscene", "")
-
- if file_name:
- stamp = d.stamp[file_name]
- extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
- else:
- stamp = d.getVar('STAMP')
- file_name = d.getVar('BB_FILENAME')
- extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info') or ""
-
- if baseonly:
- return stamp
- if noextra:
- extrainfo = ""
-
- if not stamp:
- return
-
- stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo)
-
- stampdir = os.path.dirname(stamp)
- if cached_mtime_noerror(stampdir) == 0:
- bb.utils.mkdirhier(stampdir)
-
- return stamp
-
-def stamp_cleanmask_internal(taskname, d, file_name):
+def _get_cleanmask(taskname, mcfn):
"""
Internal stamp helper function to generate stamp cleaning mask
Returns the stamp path+filename
@@ -805,31 +799,14 @@ def stamp_cleanmask_internal(taskname, d, file_name):
In the bitbake core, d can be a CacheData and file_name will be set.
When called in task context, d will be a data store, file_name will not be set
"""
- taskflagname = taskname
- if taskname.endswith("_setscene") and taskname != "do_setscene":
- taskflagname = taskname.replace("_setscene", "")
-
- if file_name:
- stamp = d.stampclean[file_name]
- extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
- else:
- stamp = d.getVar('STAMPCLEAN')
- file_name = d.getVar('BB_FILENAME')
- extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info') or ""
-
- if not stamp:
- return []
-
- cleanmask = bb.parse.siggen.stampcleanmask(stamp, file_name, taskname, extrainfo)
-
- return [cleanmask, cleanmask.replace(taskflagname, taskflagname + "_setscene")]
-
-def make_stamp(task, d, file_name = None):
- """
- Creates/updates a stamp for a given task
- (d can be a data dict or dataCache)
- """
- cleanmask = stamp_cleanmask_internal(task, d, file_name)
+ cleanmask = bb.parse.siggen.stampcleanmask_mcfn(taskname, mcfn)
+ taskflagname = taskname.replace("_setscene", "")
+ if cleanmask:
+ return [cleanmask, cleanmask.replace(taskflagname, taskflagname + "_setscene")]
+ return []
+
+def clean_stamp_mcfn(task, mcfn):
+ cleanmask = _get_cleanmask(task, mcfn)
for mask in cleanmask:
for name in glob.glob(mask):
# Preserve sigdata files in the stamps directory
@@ -840,24 +817,45 @@ def make_stamp(task, d, file_name = None):
continue
os.unlink(name)
- stamp = stamp_internal(task, d, file_name)
+def clean_stamp(task, d):
+ mcfn = d.getVar('BB_FILENAME')
+ clean_stamp_mcfn(task, mcfn)
+
+def make_stamp_mcfn(task, mcfn):
+
+ basestamp = bb.parse.siggen.stampfile_mcfn(task, mcfn)
+
+ stampdir = os.path.dirname(basestamp)
+ if cached_mtime_noerror(stampdir) == 0:
+ bb.utils.mkdirhier(stampdir)
+
+ clean_stamp_mcfn(task, mcfn)
+
# Remove the file and recreate to force timestamp
# change on broken NFS filesystems
- if stamp:
- bb.utils.remove(stamp)
- open(stamp, "w").close()
+ if basestamp:
+ bb.utils.remove(basestamp)
+ open(basestamp, "w").close()
+
+def make_stamp(task, d):
+ """
+ Creates/updates a stamp for a given task
+ """
+ mcfn = d.getVar('BB_FILENAME')
+
+ make_stamp_mcfn(task, mcfn)
# If we're in task context, write out a signature file for each task
# as it completes
- if not task.endswith("_setscene") and task != "do_setscene" and not file_name:
- stampbase = stamp_internal(task, d, None, True)
- file_name = d.getVar('BB_FILENAME')
- bb.parse.siggen.dump_sigtask(file_name, task, stampbase, True)
-
-def find_stale_stamps(task, d, file_name=None):
- current = stamp_internal(task, d, file_name)
- current2 = stamp_internal(task + "_setscene", d, file_name)
- cleanmask = stamp_cleanmask_internal(task, d, file_name)
+ if not task.endswith("_setscene"):
+ stampbase = bb.parse.siggen.stampfile_base(mcfn)
+ bb.parse.siggen.dump_sigtask(mcfn, task, stampbase, True)
+
+
+def find_stale_stamps(task, mcfn):
+ current = bb.parse.siggen.stampfile_mcfn(task, mcfn)
+ current2 = bb.parse.siggen.stampfile_mcfn(task + "_setscene", mcfn)
+ cleanmask = _get_cleanmask(task, mcfn)
found = []
for mask in cleanmask:
for name in glob.glob(mask):
@@ -871,38 +869,14 @@ def find_stale_stamps(task, d, file_name=None):
found.append(name)
return found
-def del_stamp(task, d, file_name = None):
- """
- Removes a stamp for a given task
- (d can be a data dict or dataCache)
- """
- stamp = stamp_internal(task, d, file_name)
- bb.utils.remove(stamp)
-
-def write_taint(task, d, file_name = None):
+def write_taint(task, d):
"""
Creates a "taint" file which will force the specified task and its
dependents to be re-run the next time by influencing the value of its
taskhash.
- (d can be a data dict or dataCache)
"""
- import uuid
- if file_name:
- taintfn = d.stamp[file_name] + '.' + task + '.taint'
- else:
- taintfn = d.getVar('STAMP') + '.' + task + '.taint'
- bb.utils.mkdirhier(os.path.dirname(taintfn))
- # The specific content of the taint file is not really important,
- # we just need it to be random, so a random UUID is used
- with open(taintfn, 'w') as taintf:
- taintf.write(str(uuid.uuid4()))
-
-def stampfile(taskname, d, file_name = None, noextra=False):
- """
- Return the stamp for a given task
- (d can be a data dict or dataCache)
- """
- return stamp_internal(taskname, d, file_name, noextra=noextra)
+ mcfn = d.getVar('BB_FILENAME')
+ bb.parse.siggen.invalidate_task(task, mcfn)
def add_tasks(tasklist, d):
task_deps = d.getVar('_task_deps', False)
@@ -927,6 +901,11 @@ def add_tasks(tasklist, d):
task_deps[name] = {}
if name in flags:
deptask = d.expand(flags[name])
+ if name in ['noexec', 'fakeroot', 'nostamp']:
+ if deptask != '1':
+ bb.warn("In a future version of BitBake, setting the '{}' flag to something other than '1' "
+ "will result in the flag not being set. See YP bug #13808.".format(name))
+
task_deps[name][task] = deptask
getTask('mcdepends')
getTask('depends')
@@ -1025,6 +1004,8 @@ def tasksbetween(task_start, task_end, d):
def follow_chain(task, endtask, chain=None):
if not chain:
chain = []
+ if task in chain:
+ bb.fatal("Circular task dependencies as %s depends on itself via the chain %s" % (task, " -> ".join(chain)))
chain.append(task)
for othertask in tasks:
if othertask == task:
diff --git a/lib/bb/cache.py b/lib/bb/cache.py
index 27eb27179..18d5574a3 100644
--- a/lib/bb/cache.py
+++ b/lib/bb/cache.py
@@ -19,14 +19,16 @@
import os
import logging
import pickle
-from collections import defaultdict, Mapping
+from collections import defaultdict
+from collections.abc import Mapping
import bb.utils
from bb import PrefixLoggerAdapter
import re
+import shutil
logger = logging.getLogger("BitBake.Cache")
-__cache_version__ = "154"
+__cache_version__ = "155"
def getCacheFile(path, filename, mc, data_hash):
mcspec = ''
@@ -53,12 +55,12 @@ class RecipeInfoCommon(object):
@classmethod
def pkgvar(cls, var, packages, metadata):
- return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata))
+ return dict((pkg, cls.depvar("%s:%s" % (var, pkg), metadata))
for pkg in packages)
@classmethod
def taskvar(cls, var, tasks, metadata):
- return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
+ return dict((task, cls.getvar("%s:task-%s" % (var, task), metadata))
for task in tasks)
@classmethod
@@ -103,7 +105,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
self.tasks = metadata.getVar('__BBTASKS', False)
- self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
+ self.basetaskhashes = metadata.getVar('__siggen_basehashes', False) or {}
self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
@@ -214,7 +216,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
# Collect files we may need for possible world-dep
# calculations
- if not self.not_world:
+ if not bb.utils.to_boolean(self.not_world):
cachedata.possible_world.append(fn)
#else:
# logger.debug2("EXCLUDE FROM WORLD: %s", fn)
@@ -236,15 +238,113 @@ class CoreRecipeInfo(RecipeInfoCommon):
cachedata.fakerootlogs[fn] = self.fakerootlogs
cachedata.extradepsfunc[fn] = self.extradepsfunc
+
+class SiggenRecipeInfo(RecipeInfoCommon):
+ __slots__ = ()
+
+ classname = "SiggenRecipeInfo"
+ cachefile = "bb_cache_" + classname +".dat"
+ # we don't want to show this information in graph files so don't set cachefields
+ #cachefields = []
+
+ def __init__(self, filename, metadata):
+ self.siggen_gendeps = metadata.getVar("__siggen_gendeps", False)
+ self.siggen_varvals = metadata.getVar("__siggen_varvals", False)
+ self.siggen_taskdeps = metadata.getVar("__siggen_taskdeps", False)
+
+ @classmethod
+ def init_cacheData(cls, cachedata):
+ cachedata.siggen_taskdeps = {}
+ cachedata.siggen_gendeps = {}
+ cachedata.siggen_varvals = {}
+
+ def add_cacheData(self, cachedata, fn):
+ cachedata.siggen_gendeps[fn] = self.siggen_gendeps
+ cachedata.siggen_varvals[fn] = self.siggen_varvals
+ cachedata.siggen_taskdeps[fn] = self.siggen_taskdeps
+
+ # The siggen variable data is large and impacts:
+ # - bitbake's overall memory usage
+ # - the amount of data sent over IPC between parsing processes and the server
+ # - the size of the cache files on disk
+ # - the size of "sigdata" hash information files on disk
+ # The data consists of strings (some large) or frozenset lists of variables
+ # As such, we a) deplicate the data here and b) pass references to the object at second
+ # access (e.g. over IPC or saving into pickle).
+
+ store = {}
+ save_map = {}
+ save_count = 1
+ restore_map = {}
+ restore_count = {}
+
+ @classmethod
+ def reset(cls):
+ # Needs to be called before starting new streamed data in a given process
+ # (e.g. writing out the cache again)
+ cls.save_map = {}
+ cls.save_count = 1
+ cls.restore_map = {}
+
+ @classmethod
+ def _save(cls, deps):
+ ret = []
+ if not deps:
+ return deps
+ for dep in deps:
+ fs = deps[dep]
+ if fs is None:
+ ret.append((dep, None, None))
+ elif fs in cls.save_map:
+ ret.append((dep, None, cls.save_map[fs]))
+ else:
+ cls.save_map[fs] = cls.save_count
+ ret.append((dep, fs, cls.save_count))
+ cls.save_count = cls.save_count + 1
+ return ret
+
+ @classmethod
+ def _restore(cls, deps, pid):
+ ret = {}
+ if not deps:
+ return deps
+ if pid not in cls.restore_map:
+ cls.restore_map[pid] = {}
+ map = cls.restore_map[pid]
+ for dep, fs, mapnum in deps:
+ if fs is None and mapnum is None:
+ ret[dep] = None
+ elif fs is None:
+ ret[dep] = map[mapnum]
+ else:
+ try:
+ fs = cls.store[fs]
+ except KeyError:
+ cls.store[fs] = fs
+ map[mapnum] = fs
+ ret[dep] = fs
+ return ret
+
+ def __getstate__(self):
+ ret = {}
+ for key in ["siggen_gendeps", "siggen_taskdeps", "siggen_varvals"]:
+ ret[key] = self._save(self.__dict__[key])
+ ret['pid'] = os.getpid()
+ return ret
+
+ def __setstate__(self, state):
+ pid = state['pid']
+ for key in ["siggen_gendeps", "siggen_taskdeps", "siggen_varvals"]:
+ setattr(self, key, self._restore(state[key], pid))
+
+
def virtualfn2realfn(virtualfn):
"""
Convert a virtual file name to a real one + the associated subclass keyword
"""
mc = ""
if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2:
- elems = virtualfn.split(':')
- mc = elems[1]
- virtualfn = ":".join(elems[2:])
+ (_, mc, virtualfn) = virtualfn.split(':', 2)
fn = virtualfn
cls = ""
@@ -267,7 +367,7 @@ def realfn2virtual(realfn, cls, mc):
def variant2virtual(realfn, variant):
"""
- Convert a real filename + the associated subclass keyword to a virtual filename
+ Convert a real filename + a variant to a virtual filename
"""
if variant == "":
return realfn
@@ -278,96 +378,18 @@ def variant2virtual(realfn, variant):
return "mc:" + elems[1] + ":" + realfn
return "virtual:" + variant + ":" + realfn
-def parse_recipe(bb_data, bbfile, appends, mc=''):
- """
- Parse a recipe
- """
-
- chdir_back = False
-
- bb_data.setVar("__BBMULTICONFIG", mc)
-
- # expand tmpdir to include this topdir
- bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR') or "")
- bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
- oldpath = os.path.abspath(os.getcwd())
- bb.parse.cached_mtime_noerror(bbfile_loc)
-
- # The ConfHandler first looks if there is a TOPDIR and if not
- # then it would call getcwd().
- # Previously, we chdir()ed to bbfile_loc, called the handler
- # and finally chdir()ed back, a couple of thousand times. We now
- # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
- if not bb_data.getVar('TOPDIR', False):
- chdir_back = True
- bb_data.setVar('TOPDIR', bbfile_loc)
- try:
- if appends:
- bb_data.setVar('__BBAPPEND', " ".join(appends))
- bb_data = bb.parse.handle(bbfile, bb_data)
- if chdir_back:
- os.chdir(oldpath)
- return bb_data
- except:
- if chdir_back:
- os.chdir(oldpath)
- raise
-
-
-
-class NoCache(object):
-
- def __init__(self, databuilder):
- self.databuilder = databuilder
- self.data = databuilder.data
-
- def loadDataFull(self, virtualfn, appends):
- """
- Return a complete set of data for fn.
- To do this, we need to parse the file.
- """
- logger.debug("Parsing %s (full)" % virtualfn)
- (fn, virtual, mc) = virtualfn2realfn(virtualfn)
- bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
- return bb_data[virtual]
-
- def load_bbfile(self, bbfile, appends, virtonly = False, mc=None):
- """
- Load and parse one .bb build file
- Return the data and whether parsing resulted in the file being skipped
- """
-
- if virtonly:
- (bbfile, virtual, mc) = virtualfn2realfn(bbfile)
- bb_data = self.databuilder.mcdata[mc].createCopy()
- bb_data.setVar("__ONLYFINALISE", virtual or "default")
- datastores = parse_recipe(bb_data, bbfile, appends, mc)
- return datastores
-
- if mc is not None:
- bb_data = self.databuilder.mcdata[mc].createCopy()
- return parse_recipe(bb_data, bbfile, appends, mc)
-
- bb_data = self.data.createCopy()
- datastores = parse_recipe(bb_data, bbfile, appends)
-
- for mc in self.databuilder.mcdata:
- if not mc:
- continue
- bb_data = self.databuilder.mcdata[mc].createCopy()
- newstores = parse_recipe(bb_data, bbfile, appends, mc)
- for ns in newstores:
- datastores["mc:%s:%s" % (mc, ns)] = newstores[ns]
-
- return datastores
-
-class Cache(NoCache):
+#
+# Cooker calls cacheValid on its recipe list, then either calls loadCached
+# from it's main thread or parse from separate processes to generate an up to
+# date cache
+#
+class Cache(object):
"""
BitBake Cache implementation
"""
def __init__(self, databuilder, mc, data_hash, caches_array):
- super().__init__(databuilder)
- data = databuilder.data
+ self.databuilder = databuilder
+ self.data = databuilder.data
# Pass caches_array information into Cache Constructor
# It will be used later for deciding whether we
@@ -375,7 +397,7 @@ class Cache(NoCache):
self.mc = mc
self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger)
self.caches_array = caches_array
- self.cachedir = data.getVar("CACHE")
+ self.cachedir = self.data.getVar("CACHE")
self.clean = set()
self.checked = set()
self.depends_cache = {}
@@ -385,20 +407,12 @@ class Cache(NoCache):
self.filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+')
if self.cachedir in [None, '']:
- self.has_cache = False
- self.logger.info("Not using a cache. "
- "Set CACHE = <directory> to enable.")
- return
-
- self.has_cache = True
+ bb.fatal("Please ensure CACHE is set to the cache directory for BitBake to use")
def getCacheFile(self, cachefile):
return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash)
def prepare_cache(self, progress):
- if not self.has_cache:
- return 0
-
loaded = 0
self.cachefile = self.getCacheFile("bb_cache.dat")
@@ -437,9 +451,6 @@ class Cache(NoCache):
return loaded
def cachesize(self):
- if not self.has_cache:
- return 0
-
cachesize = 0
for cache_class in self.caches_array:
cachefile = self.getCacheFile(cache_class.cachefile)
@@ -501,11 +512,11 @@ class Cache(NoCache):
return len(self.depends_cache)
- def parse(self, filename, appends):
+ def parse(self, filename, appends, layername):
"""Parse the specified filename, returning the recipe information"""
self.logger.debug("Parsing %s", filename)
infos = []
- datastores = self.load_bbfile(filename, appends, mc=self.mc)
+ datastores = self.databuilder.parseRecipeVariants(filename, appends, mc=self.mc, layername=layername)
depends = []
variants = []
# Process the "real" fn last so we can store variants list
@@ -527,43 +538,19 @@ class Cache(NoCache):
return infos
- def load(self, filename, appends):
+ def loadCached(self, filename, appends):
"""Obtain the recipe information for the specified filename,
- using cached values if available, otherwise parsing.
-
- Note that if it does parse to obtain the info, it will not
- automatically add the information to the cache or to your
- CacheData. Use the add or add_info method to do so after
- running this, or use loadData instead."""
- cached = self.cacheValid(filename, appends)
- if cached:
- infos = []
- # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
- info_array = self.depends_cache[filename]
- for variant in info_array[0].variants:
- virtualfn = variant2virtual(filename, variant)
- infos.append((virtualfn, self.depends_cache[virtualfn]))
- else:
- return self.parse(filename, appends, configdata, self.caches_array)
-
- return cached, infos
-
- def loadData(self, fn, appends, cacheData):
- """Load the recipe info for the specified filename,
- parsing and adding to the cache if necessary, and adding
- the recipe information to the supplied CacheData instance."""
- skipped, virtuals = 0, 0
+ using cached values.
+ """
- cached, infos = self.load(fn, appends)
- for virtualfn, info_array in infos:
- if info_array[0].skipped:
- self.logger.debug("Skipping %s: %s", virtualfn, info_array[0].skipreason)
- skipped += 1
- else:
- self.add_info(virtualfn, info_array, cacheData, not cached)
- virtuals += 1
+ infos = []
+ # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
+ info_array = self.depends_cache[filename]
+ for variant in info_array[0].variants:
+ virtualfn = variant2virtual(filename, variant)
+ infos.append((virtualfn, self.depends_cache[virtualfn]))
- return cached, skipped, virtuals
+ return infos
def cacheValid(self, fn, appends):
"""
@@ -572,10 +559,6 @@ class Cache(NoCache):
"""
if fn not in self.checked:
self.cacheValidUpdate(fn, appends)
-
- # Is cache enabled?
- if not self.has_cache:
- return False
if fn in self.clean:
return True
return False
@@ -585,10 +568,6 @@ class Cache(NoCache):
Is the cache valid for fn?
Make thorough (slower) checks including timestamps.
"""
- # Is cache enabled?
- if not self.has_cache:
- return False
-
self.checked.add(fn)
# File isn't in depends_cache
@@ -639,7 +618,7 @@ class Cache(NoCache):
for f in flist:
if not f:
continue
- f, exist = f.split(":")
+ f, exist = f.rsplit(":", 1)
if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
self.logger.debug2("%s's file checksum list file %s changed",
fn, f)
@@ -695,10 +674,6 @@ class Cache(NoCache):
Save the cache
Called from the parser when complete (or exiting)
"""
-
- if not self.has_cache:
- return
-
if self.cacheclean:
self.logger.debug2("Cache is clean, not saving.")
return
@@ -719,6 +694,7 @@ class Cache(NoCache):
p.dump(info)
del self.depends_cache
+ SiggenRecipeInfo.reset()
@staticmethod
def mtime(cachefile):
@@ -741,26 +717,11 @@ class Cache(NoCache):
if watcher:
watcher(info_array[0].file_depends)
- if not self.has_cache:
- return
-
if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
if parsed:
self.cacheclean = False
self.depends_cache[filename] = info_array
- def add(self, file_name, data, cacheData, parsed=None):
- """
- Save data we need into the cache
- """
-
- realfn = virtualfn2realfn(file_name)[0]
-
- info_array = []
- for cache_class in self.caches_array:
- info_array.append(cache_class(realfn, data))
- self.add_info(file_name, info_array, cacheData, parsed)
-
class MulticonfigCache(Mapping):
def __init__(self, databuilder, data_hash, caches_array):
def progress(p):
@@ -797,6 +758,7 @@ class MulticonfigCache(Mapping):
loaded = 0
for c in self.__caches.values():
+ SiggenRecipeInfo.reset()
loaded += c.prepare_cache(progress)
previous_progress = current_progress
@@ -874,11 +836,10 @@ class MultiProcessCache(object):
self.cachedata = self.create_cachedata()
self.cachedata_extras = self.create_cachedata()
- def init_cache(self, d, cache_file_name=None):
- cachedir = (d.getVar("PERSISTENT_DIR") or
- d.getVar("CACHE"))
- if cachedir in [None, '']:
+ def init_cache(self, cachedir, cache_file_name=None):
+ if not cachedir:
return
+
bb.utils.mkdirhier(cachedir)
self.cachefile = os.path.join(cachedir,
cache_file_name or self.__class__.cache_file_name)
@@ -909,6 +870,10 @@ class MultiProcessCache(object):
if not self.cachefile:
return
+ have_data = any(self.cachedata_extras)
+ if not have_data:
+ return
+
glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
i = os.getpid()
@@ -943,6 +908,8 @@ class MultiProcessCache(object):
data = self.cachedata
+ have_data = False
+
for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
f = os.path.join(os.path.dirname(self.cachefile), f)
try:
@@ -957,12 +924,14 @@ class MultiProcessCache(object):
os.unlink(f)
continue
+ have_data = True
self.merge_data(extradata, data)
os.unlink(f)
- with open(self.cachefile, "wb") as f:
- p = pickle.Pickler(f, -1)
- p.dump([data, self.__class__.CACHE_VERSION])
+ if have_data:
+ with open(self.cachefile, "wb") as f:
+ p = pickle.Pickler(f, -1)
+ p.dump([data, self.__class__.CACHE_VERSION])
bb.utils.unlockfile(glf)
@@ -1018,3 +987,11 @@ class SimpleCache(object):
p.dump([data, self.cacheversion])
bb.utils.unlockfile(glf)
+
+ def copyfile(self, target):
+ if not self.cachefile:
+ return
+
+ glf = bb.utils.lockfile(self.cachefile + ".lock")
+ shutil.copy(self.cachefile, target)
+ bb.utils.unlockfile(glf)
diff --git a/lib/bb/checksum.py b/lib/bb/checksum.py
index 1d50a2642..557793d36 100644
--- a/lib/bb/checksum.py
+++ b/lib/bb/checksum.py
@@ -11,10 +11,13 @@ import os
import stat
import bb.utils
import logging
+import re
from bb.cache import MultiProcessCache
logger = logging.getLogger("BitBake.Cache")
+filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+')
+
# mtime cache (non-persistent)
# based upon the assumption that files do not change during bitbake run
class FileMtimeCache(object):
@@ -50,6 +53,7 @@ class FileChecksumCache(MultiProcessCache):
MultiProcessCache.__init__(self)
def get_checksum(self, f):
+ f = os.path.normpath(f)
entry = self.cachedata[0].get(f)
cmtime = self.mtime_cache.cached_mtime(f)
if entry:
@@ -84,22 +88,36 @@ class FileChecksumCache(MultiProcessCache):
return None
return checksum
+ #
+ # Changing the format of file-checksums is problematic as both OE and Bitbake have
+ # knowledge of them. We need to encode a new piece of data, the portion of the path
+ # we care about from a checksum perspective. This means that files that change subdirectory
+ # are tracked by the task hashes. To do this, we do something horrible and put a "/./" into
+ # the path. The filesystem handles it but it gives us a marker to know which subsection
+ # of the path to cache.
+ #
def checksum_dir(pth):
# Handle directories recursively
if pth == "/":
bb.fatal("Refusing to checksum /")
+ pth = pth.rstrip("/")
dirchecksums = []
for root, dirs, files in os.walk(pth, topdown=True):
[dirs.remove(d) for d in list(dirs) if d in localdirsexclude]
for name in files:
- fullpth = os.path.join(root, name)
+ fullpth = os.path.join(root, name).replace(pth, os.path.join(pth, "."))
checksum = checksum_file(fullpth)
if checksum:
dirchecksums.append((fullpth, checksum))
return dirchecksums
checksums = []
- for pth in filelist.split():
+ for pth in filelist_regex.split(filelist):
+ if not pth:
+ continue
+ pth = pth.strip()
+ if not pth:
+ continue
exist = pth.split(":")[1]
if exist == "False":
continue
diff --git a/lib/bb/codeparser.py b/lib/bb/codeparser.py
index 0cec452c0..2e8b7ced3 100644
--- a/lib/bb/codeparser.py
+++ b/lib/bb/codeparser.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -25,6 +27,7 @@ import ast
import sys
import codegen
import logging
+import inspect
import bb.pysh as pysh
import bb.utils, bb.data
import hashlib
@@ -56,10 +59,40 @@ def check_indent(codestr):
return codestr
-# A custom getstate/setstate using tuples is actually worth 15% cachesize by
-# avoiding duplication of the attribute names!
+modulecode_deps = {}
+def add_module_functions(fn, functions, namespace):
+ import os
+ fstat = os.stat(fn)
+ fixedhash = fn + ":" + str(fstat.st_size) + ":" + str(fstat.st_mtime)
+ for f in functions:
+ name = "%s.%s" % (namespace, f)
+ parser = PythonParser(name, logger)
+ try:
+ parser.parse_python(None, filename=fn, lineno=1, fixedhash=fixedhash+f)
+ #bb.warn("Cached %s" % f)
+ except KeyError:
+ lines, lineno = inspect.getsourcelines(functions[f])
+ src = "".join(lines)
+ parser.parse_python(src, filename=fn, lineno=lineno, fixedhash=fixedhash+f)
+ #bb.warn("Not cached %s" % f)
+ execs = parser.execs.copy()
+ # Expand internal module exec references
+ for e in parser.execs:
+ if e in functions:
+ execs.remove(e)
+ execs.add(namespace + "." + e)
+ modulecode_deps[name] = [parser.references.copy(), execs, parser.var_execs.copy(), parser.contains.copy()]
+ #bb.warn("%s: %s\nRefs:%s Execs: %s %s %s" % (name, fn, parser.references, parser.execs, parser.var_execs, parser.contains))
+
+def update_module_dependencies(d):
+ for mod in modulecode_deps:
+ excludes = set((d.getVarFlag(mod, "vardepsexclude") or "").split())
+ if excludes:
+ modulecode_deps[mod] = [modulecode_deps[mod][0] - excludes, modulecode_deps[mod][1] - excludes, modulecode_deps[mod][2] - excludes, modulecode_deps[mod][3]]
+# A custom getstate/setstate using tuples is actually worth 15% cachesize by
+# avoiding duplication of the attribute names!
class SetCache(object):
def __init__(self):
self.setcache = {}
@@ -152,12 +185,12 @@ class CodeParserCache(MultiProcessCache):
self.shellcachelines[h] = cacheline
return cacheline
- def init_cache(self, d):
+ def init_cache(self, cachedir):
# Check if we already have the caches
if self.pythoncache:
return
- MultiProcessCache.init_cache(self, d)
+ MultiProcessCache.init_cache(self, cachedir)
# cachedata gets re-assigned in the parent
self.pythoncache = self.cachedata[0]
@@ -169,8 +202,8 @@ class CodeParserCache(MultiProcessCache):
codeparsercache = CodeParserCache()
-def parser_cache_init(d):
- codeparsercache.init_cache(d)
+def parser_cache_init(cachedir):
+ codeparsercache.init_cache(cachedir)
def parser_cache_save():
codeparsercache.save_extras()
@@ -195,6 +228,10 @@ class BufferedLogger(Logger):
self.target.handle(record)
self.buffer = []
+class DummyLogger():
+ def flush(self):
+ return
+
class PythonParser():
getvars = (".getVar", ".appendVar", ".prependVar", "oe.utils.conditional")
getvarflags = (".getVarFlag", ".appendVarFlag", ".prependVarFlag")
@@ -219,19 +256,19 @@ class PythonParser():
def visit_Call(self, node):
name = self.called_node_name(node.func)
if name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs or name in self.containsanyfuncs):
- if isinstance(node.args[0], ast.Str):
- varname = node.args[0].s
- if name in self.containsfuncs and isinstance(node.args[1], ast.Str):
+ if isinstance(node.args[0], ast.Constant) and isinstance(node.args[0].value, str):
+ varname = node.args[0].value
+ if name in self.containsfuncs and isinstance(node.args[1], ast.Constant):
if varname not in self.contains:
self.contains[varname] = set()
- self.contains[varname].add(node.args[1].s)
- elif name in self.containsanyfuncs and isinstance(node.args[1], ast.Str):
+ self.contains[varname].add(node.args[1].value)
+ elif name in self.containsanyfuncs and isinstance(node.args[1], ast.Constant):
if varname not in self.contains:
self.contains[varname] = set()
- self.contains[varname].update(node.args[1].s.split())
+ self.contains[varname].update(node.args[1].value.split())
elif name.endswith(self.getvarflags):
- if isinstance(node.args[1], ast.Str):
- self.references.add('%s[%s]' % (varname, node.args[1].s))
+ if isinstance(node.args[1], ast.Constant):
+ self.references.add('%s[%s]' % (varname, node.args[1].value))
else:
self.warn(node.func, node.args[1])
else:
@@ -239,8 +276,8 @@ class PythonParser():
else:
self.warn(node.func, node.args[0])
elif name and name.endswith(".expand"):
- if isinstance(node.args[0], ast.Str):
- value = node.args[0].s
+ if isinstance(node.args[0], ast.Constant):
+ value = node.args[0].value
d = bb.data.init()
parser = d.expandWithRefs(value, self.name)
self.references |= parser.references
@@ -250,8 +287,8 @@ class PythonParser():
self.contains[varname] = set()
self.contains[varname] |= parser.contains[varname]
elif name in self.execfuncs:
- if isinstance(node.args[0], ast.Str):
- self.var_execs.add(node.args[0].s)
+ if isinstance(node.args[0], ast.Constant):
+ self.var_execs.add(node.args[0].value)
else:
self.warn(node.func, node.args[0])
elif name and isinstance(node.func, (ast.Name, ast.Attribute)):
@@ -276,16 +313,24 @@ class PythonParser():
self.contains = {}
self.execs = set()
self.references = set()
- self.log = BufferedLogger('BitBake.Data.PythonParser', logging.DEBUG, log)
+ self._log = log
+ # Defer init as expensive
+ self.log = DummyLogger()
self.unhandled_message = "in call of %s, argument '%s' is not a string literal"
self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message)
- def parse_python(self, node, lineno=0, filename="<string>"):
- if not node or not node.strip():
+ # For the python module code it is expensive to have the function text so it is
+ # uses a different fixedhash to cache against. We can take the hit on obtaining the
+ # text if it isn't in the cache.
+ def parse_python(self, node, lineno=0, filename="<string>", fixedhash=None):
+ if not fixedhash and (not node or not node.strip()):
return
- h = bbhash(str(node))
+ if fixedhash:
+ h = fixedhash
+ else:
+ h = bbhash(str(node))
if h in codeparsercache.pythoncache:
self.references = set(codeparsercache.pythoncache[h].refs)
@@ -303,6 +348,12 @@ class PythonParser():
self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i])
return
+ if fixedhash and not node:
+ raise KeyError
+
+ # Need to parse so take the hit on the real log buffer
+ self.log = BufferedLogger('BitBake.Data.PythonParser', logging.DEBUG, self._log)
+
# We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though
node = "\n" * int(lineno) + node
code = compile(check_indent(str(node)), filename, "exec",
@@ -321,7 +372,11 @@ class ShellParser():
self.funcdefs = set()
self.allexecs = set()
self.execs = set()
- self.log = BufferedLogger('BitBake.Data.%s' % name, logging.DEBUG, log)
+ self._name = name
+ self._log = log
+ # Defer init as expensive
+ self.log = DummyLogger()
+
self.unhandled_template = "unable to handle non-literal command '%s'"
self.unhandled_template = "while parsing %s, %s" % (name, self.unhandled_template)
@@ -340,6 +395,9 @@ class ShellParser():
self.execs = set(codeparsercache.shellcacheextras[h].execs)
return self.execs
+ # Need to parse so take the hit on the real log buffer
+ self.log = BufferedLogger('BitBake.Data.%s' % self._name, logging.DEBUG, self._log)
+
self._parse_shell(value)
self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs)
diff --git a/lib/bb/command.py b/lib/bb/command.py
index f530cf844..1fcb9bf14 100644
--- a/lib/bb/command.py
+++ b/lib/bb/command.py
@@ -51,23 +51,32 @@ class Command:
"""
A queue of asynchronous commands for bitbake
"""
- def __init__(self, cooker):
+ def __init__(self, cooker, process_server):
self.cooker = cooker
self.cmds_sync = CommandsSync()
self.cmds_async = CommandsAsync()
self.remotedatastores = None
- # FIXME Add lock for this
+ self.process_server = process_server
+ # Access with locking using process_server.{get/set/clear}_async_cmd()
self.currentAsyncCommand = None
- def runCommand(self, commandline, ro_only = False):
+ def runCommand(self, commandline, process_server, ro_only=False):
command = commandline.pop(0)
# Ensure cooker is ready for commands
- if command != "updateConfig" and command != "setFeatures":
- self.cooker.init_configdata()
- if not self.remotedatastores:
- self.remotedatastores = bb.remotedata.RemoteDatastores(self.cooker)
+ if command not in ["updateConfig", "setFeatures", "ping"]:
+ try:
+ self.cooker.init_configdata()
+ if not self.remotedatastores:
+ self.remotedatastores = bb.remotedata.RemoteDatastores(self.cooker)
+ except (Exception, SystemExit) as exc:
+ import traceback
+ if isinstance(exc, bb.BBHandledException):
+ # We need to start returning real exceptions here. Until we do, we can't
+ # tell if an exception is an instance of bb.BBHandledException
+ return None, "bb.BBHandledException()\n" + traceback.format_exc()
+ return None, traceback.format_exc()
if hasattr(CommandsSync, command):
# Can run synchronous commands straight away
@@ -76,7 +85,6 @@ class Command:
if not hasattr(command_method, 'readonly') or not getattr(command_method, 'readonly'):
return None, "Not able to execute not readonly commands in readonly mode"
try:
- self.cooker.process_inotify_updates()
if getattr(command_method, 'needconfig', True):
self.cooker.updateCacheSync()
result = command_method(self, commandline)
@@ -91,24 +99,23 @@ class Command:
return None, traceback.format_exc()
else:
return result, None
- if self.currentAsyncCommand is not None:
- return None, "Busy (%s in progress)" % self.currentAsyncCommand[0]
if command not in CommandsAsync.__dict__:
return None, "No such command"
- self.currentAsyncCommand = (command, commandline)
- self.cooker.idleCallBackRegister(self.cooker.runCommands, self.cooker)
+ if not process_server.set_async_cmd((command, commandline)):
+ return None, "Busy (%s in progress)" % self.process_server.get_async_cmd()[0]
+ self.cooker.idleCallBackRegister(self.runAsyncCommand, process_server)
return True, None
- def runAsyncCommand(self):
+ def runAsyncCommand(self, _, process_server, halt):
try:
- self.cooker.process_inotify_updates()
if self.cooker.state in (bb.cooker.state.error, bb.cooker.state.shutdown, bb.cooker.state.forceshutdown):
# updateCache will trigger a shutdown of the parser
# and then raise BBHandledException triggering an exit
self.cooker.updateCache()
- return False
- if self.currentAsyncCommand is not None:
- (command, options) = self.currentAsyncCommand
+ return bb.server.process.idleFinish("Cooker in error state")
+ cmd = process_server.get_async_cmd()
+ if cmd is not None:
+ (command, options) = cmd
commandmethod = getattr(CommandsAsync, command)
needcache = getattr( commandmethod, "needcache" )
if needcache and self.cooker.state != bb.cooker.state.running:
@@ -118,24 +125,21 @@ class Command:
commandmethod(self.cmds_async, self, options)
return False
else:
- return False
+ return bb.server.process.idleFinish("Nothing to do, no async command?")
except KeyboardInterrupt as exc:
- self.finishAsyncCommand("Interrupted")
- return False
+ return bb.server.process.idleFinish("Interrupted")
except SystemExit as exc:
arg = exc.args[0]
if isinstance(arg, str):
- self.finishAsyncCommand(arg)
+ return bb.server.process.idleFinish(arg)
else:
- self.finishAsyncCommand("Exited with %s" % arg)
- return False
+ return bb.server.process.idleFinish("Exited with %s" % arg)
except Exception as exc:
import traceback
if isinstance(exc, bb.BBHandledException):
- self.finishAsyncCommand("")
+ return bb.server.process.idleFinish("")
else:
- self.finishAsyncCommand(traceback.format_exc())
- return False
+ return bb.server.process.idleFinish(traceback.format_exc())
def finishAsyncCommand(self, msg=None, code=None):
if msg or msg == "":
@@ -144,8 +148,8 @@ class Command:
bb.event.fire(CommandExit(code), self.cooker.data)
else:
bb.event.fire(CommandCompleted(), self.cooker.data)
- self.currentAsyncCommand = None
self.cooker.finishcommand()
+ self.process_server.clear_async_cmd()
def reset(self):
if self.remotedatastores:
@@ -158,6 +162,14 @@ class CommandsSync:
These must not influence any running synchronous command.
"""
+ def ping(self, command, params):
+ """
+ Allow a UI to check the server is still alive
+ """
+ return "Still alive!"
+ ping.needconfig = False
+ ping.readonly = True
+
def stateShutdown(self, command, params):
"""
Trigger cooker 'shutdown' mode
@@ -295,6 +307,11 @@ class CommandsSync:
return ret
getLayerPriorities.readonly = True
+ def revalidateCaches(self, command, params):
+ """Called by UI clients when metadata may have changed"""
+ command.cooker.revalidateCaches()
+ parseConfiguration.needconfig = False
+
def getRecipes(self, command, params):
try:
mc = params[0]
@@ -533,8 +550,8 @@ class CommandsSync:
and return a datastore object representing the environment
for the recipe.
"""
- fn = params[0]
- mc = bb.runqueue.mc_from_tid(fn)
+ virtualfn = params[0]
+ (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn)
appends = params[1]
appendlist = params[2]
if len(params) > 3:
@@ -549,6 +566,7 @@ class CommandsSync:
appendfiles = command.cooker.collections[mc].get_file_appends(fn)
else:
appendfiles = []
+ layername = command.cooker.collections[mc].calc_bbfile_priority(fn)[2]
# We are calling bb.cache locally here rather than on the server,
# but that's OK because it doesn't actually need anything from
# the server barring the global datastore (which we have a remote
@@ -556,11 +574,10 @@ class CommandsSync:
if config_data:
# We have to use a different function here if we're passing in a datastore
# NOTE: we took a copy above, so we don't do it here again
- envdata = bb.cache.parse_recipe(config_data, fn, appendfiles, mc)['']
+ envdata = command.cooker.databuilder._parse_recipe(config_data, fn, appendfiles, mc, layername)[cls]
else:
# Use the standard path
- parser = bb.cache.NoCache(command.cooker.databuilder)
- envdata = parser.loadDataFull(fn, appendfiles)
+ envdata = command.cooker.databuilder.parseRecipe(virtualfn, appendfiles, layername)
idx = command.remotedatastores.store(envdata)
return DataStoreConnectionHandle(idx)
parseRecipeFile.readonly = True
@@ -659,6 +676,16 @@ class CommandsAsync:
command.finishAsyncCommand()
findFilesMatchingInDir.needcache = False
+ def testCookerCommandEvent(self, command, params):
+ """
+ Dummy command used by OEQA selftest to test tinfoil without IO
+ """
+ pattern = params[0]
+
+ command.cooker.testCookerCommandEvent(pattern)
+ command.finishAsyncCommand()
+ testCookerCommandEvent.needcache = False
+
def findConfigFilePath(self, command, params):
"""
Find the path of the requested configuration file
@@ -723,7 +750,7 @@ class CommandsAsync:
"""
event = params[0]
bb.event.fire(eval(event), command.cooker.data)
- command.currentAsyncCommand = None
+ process_server.clear_async_cmd()
triggerEvent.needcache = False
def resetCooker(self, command, params):
@@ -750,7 +777,14 @@ class CommandsAsync:
(mc, pn) = bb.runqueue.split_mc(params[0])
taskname = params[1]
sigs = params[2]
+ bb.siggen.check_siggen_version(bb.siggen)
res = bb.siggen.find_siginfo(pn, taskname, sigs, command.cooker.databuilder.mcdata[mc])
bb.event.fire(bb.event.FindSigInfoResult(res), command.cooker.databuilder.mcdata[mc])
command.finishAsyncCommand()
findSigInfo.needcache = False
+
+ def getTaskSignatures(self, command, params):
+ res = command.cooker.getTaskSignatures(params[0], params[1])
+ bb.event.fire(bb.event.GetTaskSignatureResult(res), command.cooker.data)
+ command.finishAsyncCommand()
+ getTaskSignatures.needcache = True
diff --git a/lib/bb/compress/_pipecompress.py b/lib/bb/compress/_pipecompress.py
new file mode 100644
index 000000000..4a403d62c
--- /dev/null
+++ b/lib/bb/compress/_pipecompress.py
@@ -0,0 +1,196 @@
+#
+# Copyright BitBake Contributors
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# Helper library to implement streaming compression and decompression using an
+# external process
+#
+# This library should be used directly by end users; a wrapper library for the
+# specific compression tool should be created
+
+import builtins
+import io
+import os
+import subprocess
+
+
+def open_wrap(
+ cls, filename, mode="rb", *, encoding=None, errors=None, newline=None, **kwargs
+):
+ """
+ Open a compressed file in binary or text mode.
+
+ Users should not call this directly. A specific compression library can use
+ this helper to provide it's own "open" command
+
+ The filename argument can be an actual filename (a str or bytes object), or
+ an existing file object to read from or write to.
+
+ The mode argument can be "r", "rb", "w", "wb", "x", "xb", "a" or "ab" for
+ binary mode, or "rt", "wt", "xt" or "at" for text mode. The default mode is
+ "rb".
+
+ For binary mode, this function is equivalent to the cls constructor:
+ cls(filename, mode). In this case, the encoding, errors and newline
+ arguments must not be provided.
+
+ For text mode, a cls object is created, and wrapped in an
+ io.TextIOWrapper instance with the specified encoding, error handling
+ behavior, and line ending(s).
+ """
+ if "t" in mode:
+ if "b" in mode:
+ raise ValueError("Invalid mode: %r" % (mode,))
+ else:
+ if encoding is not None:
+ raise ValueError("Argument 'encoding' not supported in binary mode")
+ if errors is not None:
+ raise ValueError("Argument 'errors' not supported in binary mode")
+ if newline is not None:
+ raise ValueError("Argument 'newline' not supported in binary mode")
+
+ file_mode = mode.replace("t", "")
+ if isinstance(filename, (str, bytes, os.PathLike, int)):
+ binary_file = cls(filename, file_mode, **kwargs)
+ elif hasattr(filename, "read") or hasattr(filename, "write"):
+ binary_file = cls(None, file_mode, fileobj=filename, **kwargs)
+ else:
+ raise TypeError("filename must be a str or bytes object, or a file")
+
+ if "t" in mode:
+ return io.TextIOWrapper(
+ binary_file, encoding, errors, newline, write_through=True
+ )
+ else:
+ return binary_file
+
+
+class CompressionError(OSError):
+ pass
+
+
+class PipeFile(io.RawIOBase):
+ """
+ Class that implements generically piping to/from a compression program
+
+ Derived classes should add the function get_compress() and get_decompress()
+ that return the required commands. Input will be piped into stdin and the
+ (de)compressed output should be written to stdout, e.g.:
+
+ class FooFile(PipeCompressionFile):
+ def get_decompress(self):
+ return ["fooc", "--decompress", "--stdout"]
+
+ def get_compress(self):
+ return ["fooc", "--compress", "--stdout"]
+
+ """
+
+ READ = 0
+ WRITE = 1
+
+ def __init__(self, filename=None, mode="rb", *, stderr=None, fileobj=None):
+ if "t" in mode or "U" in mode:
+ raise ValueError("Invalid mode: {!r}".format(mode))
+
+ if not "b" in mode:
+ mode += "b"
+
+ if mode.startswith("r"):
+ self.mode = self.READ
+ elif mode.startswith("w"):
+ self.mode = self.WRITE
+ else:
+ raise ValueError("Invalid mode %r" % mode)
+
+ if fileobj is not None:
+ self.fileobj = fileobj
+ else:
+ self.fileobj = builtins.open(filename, mode or "rb")
+
+ if self.mode == self.READ:
+ self.p = subprocess.Popen(
+ self.get_decompress(),
+ stdin=self.fileobj,
+ stdout=subprocess.PIPE,
+ stderr=stderr,
+ close_fds=True,
+ )
+ self.pipe = self.p.stdout
+ else:
+ self.p = subprocess.Popen(
+ self.get_compress(),
+ stdin=subprocess.PIPE,
+ stdout=self.fileobj,
+ stderr=stderr,
+ close_fds=True,
+ )
+ self.pipe = self.p.stdin
+
+ self.__closed = False
+
+ def _check_process(self):
+ if self.p is None:
+ return
+
+ returncode = self.p.wait()
+ if returncode:
+ raise CompressionError("Process died with %d" % returncode)
+ self.p = None
+
+ def close(self):
+ if self.closed:
+ return
+
+ self.pipe.close()
+ if self.p is not None:
+ self._check_process()
+ self.fileobj.close()
+
+ self.__closed = True
+
+ @property
+ def closed(self):
+ return self.__closed
+
+ def fileno(self):
+ return self.pipe.fileno()
+
+ def flush(self):
+ self.pipe.flush()
+
+ def isatty(self):
+ return self.pipe.isatty()
+
+ def readable(self):
+ return self.mode == self.READ
+
+ def writable(self):
+ return self.mode == self.WRITE
+
+ def readinto(self, b):
+ if self.mode != self.READ:
+ import errno
+
+ raise OSError(
+ errno.EBADF, "read() on write-only %s object" % self.__class__.__name__
+ )
+ size = self.pipe.readinto(b)
+ if size == 0:
+ self._check_process()
+ return size
+
+ def write(self, data):
+ if self.mode != self.WRITE:
+ import errno
+
+ raise OSError(
+ errno.EBADF, "write() on read-only %s object" % self.__class__.__name__
+ )
+ data = self.pipe.write(data)
+
+ if not data:
+ self._check_process()
+
+ return data
diff --git a/lib/bb/compress/lz4.py b/lib/bb/compress/lz4.py
new file mode 100644
index 000000000..88b098932
--- /dev/null
+++ b/lib/bb/compress/lz4.py
@@ -0,0 +1,19 @@
+#
+# Copyright BitBake Contributors
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import bb.compress._pipecompress
+
+
+def open(*args, **kwargs):
+ return bb.compress._pipecompress.open_wrap(LZ4File, *args, **kwargs)
+
+
+class LZ4File(bb.compress._pipecompress.PipeFile):
+ def get_compress(self):
+ return ["lz4c", "-z", "-c"]
+
+ def get_decompress(self):
+ return ["lz4c", "-d", "-c"]
diff --git a/lib/bb/compress/zstd.py b/lib/bb/compress/zstd.py
new file mode 100644
index 000000000..cdbbe9d60
--- /dev/null
+++ b/lib/bb/compress/zstd.py
@@ -0,0 +1,30 @@
+#
+# Copyright BitBake Contributors
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import bb.compress._pipecompress
+import shutil
+
+
+def open(*args, **kwargs):
+ return bb.compress._pipecompress.open_wrap(ZstdFile, *args, **kwargs)
+
+
+class ZstdFile(bb.compress._pipecompress.PipeFile):
+ def __init__(self, *args, num_threads=1, compresslevel=3, **kwargs):
+ self.num_threads = num_threads
+ self.compresslevel = compresslevel
+ super().__init__(*args, **kwargs)
+
+ def _get_zstd(self):
+ if self.num_threads == 1 or not shutil.which("pzstd"):
+ return ["zstd"]
+ return ["pzstd", "-p", "%d" % self.num_threads]
+
+ def get_compress(self):
+ return self._get_zstd() + ["-c", "-%d" % self.compresslevel]
+
+ def get_decompress(self):
+ return self._get_zstd() + ["-d", "-c"]
diff --git a/lib/bb/cooker.py b/lib/bb/cooker.py
index 39e10e613..c5bfef55d 100644
--- a/lib/bb/cooker.py
+++ b/lib/bb/cooker.py
@@ -13,7 +13,6 @@ import sys, os, glob, os.path, re, time
import itertools
import logging
import multiprocessing
-import sre_constants
import threading
from io import StringIO, UnsupportedOperation
from contextlib import closing
@@ -23,7 +22,6 @@ from bb import utils, data, parse, event, cache, providers, taskdata, runqueue,
import queue
import signal
import prserv.serv
-import pyinotify
import json
import pickle
import codecs
@@ -81,7 +79,7 @@ class SkippedPackage:
class CookerFeatures(object):
- _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
+ _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS, RECIPE_SIGGEN_INFO] = list(range(4))
def __init__(self):
self._features=set()
@@ -104,12 +102,15 @@ class CookerFeatures(object):
class EventWriter:
def __init__(self, cooker, eventfile):
- self.file_inited = None
self.cooker = cooker
self.eventfile = eventfile
self.event_queue = []
- def write_event(self, event):
+ def write_variables(self):
+ with open(self.eventfile, "a") as f:
+ f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
+
+ def send(self, event):
with open(self.eventfile, "a") as f:
try:
str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
@@ -119,28 +120,6 @@ class EventWriter:
import traceback
print(err, traceback.format_exc())
- def send(self, event):
- if self.file_inited:
- # we have the file, just write the event
- self.write_event(event)
- else:
- # init on bb.event.BuildStarted
- name = "%s.%s" % (event.__module__, event.__class__.__name__)
- if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
- with open(self.eventfile, "w") as f:
- f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
-
- self.file_inited = True
-
- # write pending events
- for evt in self.event_queue:
- self.write_event(evt)
-
- # also write the current event
- self.write_event(event)
- else:
- # queue all events until the file is inited
- self.event_queue.append(event)
#============================================================================#
# BBCooker
@@ -150,8 +129,10 @@ class BBCooker:
Manages one bitbake build run
"""
- def __init__(self, featureSet=None, idleCallBackRegister=None):
+ def __init__(self, featureSet=None, server=None):
self.recipecaches = None
+ self.baseconfig_valid = False
+ self.parsecache_valid = False
self.eventlog = None
self.skiplist = {}
self.featureset = CookerFeatures()
@@ -159,34 +140,22 @@ class BBCooker:
for f in featureSet:
self.featureset.setFeature(f)
+ self.orig_syspath = sys.path.copy()
+ self.orig_sysmodules = [*sys.modules]
+
self.configuration = bb.cookerdata.CookerConfiguration()
- self.idleCallBackRegister = idleCallBackRegister
+ self.process_server = server
+ self.idleCallBackRegister = None
+ self.waitIdle = None
+ if server:
+ self.idleCallBackRegister = server.register_idle_function
+ self.waitIdle = server.wait_for_idle
bb.debug(1, "BBCooker starting %s" % time.time())
- sys.stdout.flush()
-
- self.configwatcher = pyinotify.WatchManager()
- bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
- sys.stdout.flush()
-
- self.configwatcher.bbseen = set()
- self.configwatcher.bbwatchedfiles = set()
- self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
- bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
- sys.stdout.flush()
- self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
- pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
- pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
- self.watcher = pyinotify.WatchManager()
- bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
- sys.stdout.flush()
- self.watcher.bbseen = set()
- self.watcher.bbwatchedfiles = set()
- self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
-
- bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
- sys.stdout.flush()
+
+ self.configwatched = {}
+ self.parsewatched = {}
# If being called by something like tinfoil, we need to clean cached data
# which may now be invalid
@@ -197,14 +166,6 @@ class BBCooker:
self.hashserv = None
self.hashservaddr = None
- self.inotify_modified_files = []
-
- def _process_inotify_updates(server, cooker, abort):
- cooker.process_inotify_updates()
- return 1.0
-
- self.idleCallBackRegister(_process_inotify_updates, self)
-
# TOSTOP must not be set or our children will hang when they output
try:
fd = sys.stdout.fileno()
@@ -218,7 +179,7 @@ class BBCooker:
except UnsupportedOperation:
pass
- self.command = bb.command.Command(self)
+ self.command = bb.command.Command(self, self.process_server)
self.state = state.initial
self.parser = None
@@ -228,84 +189,37 @@ class BBCooker:
signal.signal(signal.SIGHUP, self.sigterm_exception)
bb.debug(1, "BBCooker startup complete %s" % time.time())
- sys.stdout.flush()
def init_configdata(self):
if not hasattr(self, "data"):
self.initConfigurationData()
bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
- sys.stdout.flush()
self.handlePRServ()
- def process_inotify_updates(self):
- for n in [self.confignotifier, self.notifier]:
- if n.check_events(timeout=0):
- # read notified events and enqeue them
- n.read_events()
- n.process_events()
-
- def config_notifications(self, event):
- if event.maskname == "IN_Q_OVERFLOW":
- bb.warn("inotify event queue overflowed, invalidating caches.")
- self.parsecache_valid = False
- self.baseconfig_valid = False
- bb.parse.clear_cache()
- return
- if not event.pathname in self.configwatcher.bbwatchedfiles:
- return
- if not event.pathname in self.inotify_modified_files:
- self.inotify_modified_files.append(event.pathname)
- self.baseconfig_valid = False
-
- def notifications(self, event):
- if event.maskname == "IN_Q_OVERFLOW":
- bb.warn("inotify event queue overflowed, invalidating caches.")
- self.parsecache_valid = False
- bb.parse.clear_cache()
- return
- if event.pathname.endswith("bitbake-cookerdaemon.log") \
- or event.pathname.endswith("bitbake.lock"):
- return
- if not event.pathname in self.inotify_modified_files:
- self.inotify_modified_files.append(event.pathname)
- self.parsecache_valid = False
+ def _baseconfig_set(self, value):
+ if value and not self.baseconfig_valid:
+ bb.server.process.serverlog("Base config valid")
+ elif not value and self.baseconfig_valid:
+ bb.server.process.serverlog("Base config invalidated")
+ self.baseconfig_valid = value
+
+ def _parsecache_set(self, value):
+ if value and not self.parsecache_valid:
+ bb.server.process.serverlog("Parse cache valid")
+ elif not value and self.parsecache_valid:
+ bb.server.process.serverlog("Parse cache invalidated")
+ self.parsecache_valid = value
+
+ def add_filewatch(self, deps, configwatcher=False):
+ if configwatcher:
+ watcher = self.configwatched
+ else:
+ watcher = self.parsewatched
- def add_filewatch(self, deps, watcher=None, dirs=False):
- if not watcher:
- watcher = self.watcher
for i in deps:
- watcher.bbwatchedfiles.add(i[0])
- if dirs:
- f = i[0]
- else:
- f = os.path.dirname(i[0])
- if f in watcher.bbseen:
- continue
- watcher.bbseen.add(f)
- watchtarget = None
- while True:
- # We try and add watches for files that don't exist but if they did, would influence
- # the parser. The parent directory of these files may not exist, in which case we need
- # to watch any parent that does exist for changes.
- try:
- watcher.add_watch(f, self.watchmask, quiet=False)
- if watchtarget:
- watcher.bbwatchedfiles.add(watchtarget)
- break
- except pyinotify.WatchManagerError as e:
- if 'ENOENT' in str(e):
- watchtarget = f
- f = os.path.dirname(f)
- if f in watcher.bbseen:
- break
- watcher.bbseen.add(f)
- continue
- if 'ENOSPC' in str(e):
- providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
- providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
- providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
- providerlog.error("Root privilege is required to modify max_user_watches.")
- raise
+ f = i[0]
+ mtime = i[1]
+ watcher[f] = mtime
def sigterm_exception(self, signum, stackframe):
if signum == signal.SIGTERM:
@@ -313,6 +227,7 @@ class BBCooker:
elif signum == signal.SIGHUP:
bb.warn("Cooker received SIGHUP, shutting down...")
self.state = state.forceshutdown
+ bb.event._should_exit.set()
def setFeatures(self, features):
# we only accept a new feature set if we're in state initial, so we can reset without problems
@@ -330,6 +245,13 @@ class BBCooker:
self.state = state.initial
self.caches_array = []
+ sys.path = self.orig_syspath.copy()
+ for mod in [*sys.modules]:
+ if mod not in self.orig_sysmodules:
+ del sys.modules[mod]
+
+ self.configwatched = {}
+
# Need to preserve BB_CONSOLELOG over resets
consolelog = None
if hasattr(self, "data"):
@@ -338,12 +260,12 @@ class BBCooker:
if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
self.enableDataTracking()
- all_extra_cache_names = []
+ caches_name_array = ['bb.cache:CoreRecipeInfo']
# We hardcode all known cache types in a single place, here.
if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
- all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
-
- caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
+ caches_name_array.append("bb.cache_extra:HobRecipeInfo")
+ if CookerFeatures.RECIPE_SIGGEN_INFO in self.featureset:
+ caches_name_array.append("bb.cache:SiggenRecipeInfo")
# At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
# This is the entry point, no further check needed!
@@ -362,6 +284,10 @@ class BBCooker:
self.data_hash = self.databuilder.data_hash
self.extraconfigdata = {}
+ eventlog = self.data.getVar("BB_DEFAULT_EVENTLOG")
+ if not self.configuration.writeeventlog and eventlog:
+ self.setupEventLog(eventlog)
+
if consolelog:
self.data.setVar("BB_CONSOLELOG", consolelog)
@@ -371,31 +297,42 @@ class BBCooker:
self.disableDataTracking()
for mc in self.databuilder.mcdata.values():
- mc.renameVar("__depends", "__base_depends")
- self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
+ self.add_filewatch(mc.getVar("__base_depends", False), configwatcher=True)
- self.baseconfig_valid = True
- self.parsecache_valid = False
+ self._baseconfig_set(True)
+ self._parsecache_set(False)
def handlePRServ(self):
# Setup a PR Server based on the new configuration
try:
self.prhost = prserv.serv.auto_start(self.data)
except prserv.serv.PRServiceConfigError as e:
- bb.fatal("Unable to start PR Server, exitting")
+ bb.fatal("Unable to start PR Server, exiting, check the bitbake-cookerdaemon.log")
if self.data.getVar("BB_HASHSERVE") == "auto":
# Create a new hash server bound to a unix domain socket
if not self.hashserv:
dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
+ upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None
+ if upstream:
+ import socket
+ try:
+ sock = socket.create_connection(upstream.split(":"), 5)
+ sock.close()
+ except socket.error as e:
+ bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s"
+ % (upstream, repr(e)))
+
self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
- self.hashserv = hashserv.create_server(self.hashservaddr, dbfile, sync=False)
- self.hashserv.process = multiprocessing.Process(target=self.hashserv.serve_forever)
- self.hashserv.process.start()
- self.data.setVar("BB_HASHSERVE", self.hashservaddr)
- self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr)
- self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr)
+ self.hashserv = hashserv.create_server(
+ self.hashservaddr,
+ dbfile,
+ sync=False,
+ upstream=upstream,
+ )
+ self.hashserv.serve_as_process(log_level=logging.WARNING)
for mc in self.databuilder.mcdata:
+ self.databuilder.mcorigdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
bb.parse.init_parser(self.data)
@@ -410,6 +347,29 @@ class BBCooker:
if hasattr(self, "data"):
self.data.disableTracking()
+ def revalidateCaches(self):
+ bb.parse.clear_cache()
+
+ clean = True
+ for f in self.configwatched:
+ if not bb.parse.check_mtime(f, self.configwatched[f]):
+ bb.server.process.serverlog("Found %s changed, invalid cache" % f)
+ self._baseconfig_set(False)
+ self._parsecache_set(False)
+ clean = False
+ break
+
+ if clean:
+ for f in self.parsewatched:
+ if not bb.parse.check_mtime(f, self.parsewatched[f]):
+ bb.server.process.serverlog("Found %s changed, invalid cache" % f)
+ self._parsecache_set(False)
+ clean = False
+ break
+
+ if not clean:
+ bb.parse.BBHandler.cached_statements = {}
+
def parseConfiguration(self):
self.updateCacheSync()
@@ -428,8 +388,24 @@ class BBCooker:
self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
-
- self.parsecache_valid = False
+ self.collections = {}
+ for mc in self.multiconfigs:
+ self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
+
+ self._parsecache_set(False)
+
+ def setupEventLog(self, eventlog):
+ if self.eventlog and self.eventlog[0] != eventlog:
+ bb.event.unregister_UIHhandler(self.eventlog[1])
+ self.eventlog = None
+ if not self.eventlog or self.eventlog[0] != eventlog:
+ # we log all events to a file if so directed
+ # register the log file writer as UI Handler
+ if not os.path.exists(os.path.dirname(eventlog)):
+ bb.utils.mkdirhier(os.path.dirname(eventlog))
+ writer = EventWriter(self, eventlog)
+ EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
+ self.eventlog = (eventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)), writer)
def updateConfigOpts(self, options, environment, cmdline):
self.ui_cmdline = cmdline
@@ -450,14 +426,7 @@ class BBCooker:
setattr(self.configuration, o, options[o])
if self.configuration.writeeventlog:
- if self.eventlog and self.eventlog[0] != self.configuration.writeeventlog:
- bb.event.unregister_UIHhandler(self.eventlog[1])
- if not self.eventlog or self.eventlog[0] != self.configuration.writeeventlog:
- # we log all events to a file if so directed
- # register the log file writer as UI Handler
- writer = EventWriter(self, self.configuration.writeeventlog)
- EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
- self.eventlog = (self.configuration.writeeventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)))
+ self.setupEventLog(self.configuration.writeeventlog)
bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel
bb.msg.loggerDefaultDomains = self.configuration.debug_domains
@@ -487,19 +456,11 @@ class BBCooker:
# Now update all the variables not in the datastore to match
self.configuration.env = environment
+ self.revalidateCaches()
if not clean:
logger.debug("Base environment change, triggering reparse")
self.reset()
- def runCommands(self, server, data, abort):
- """
- Run any queued asynchronous command
- This is done by the idle handler so it runs in true context rather than
- tied to any UI.
- """
-
- return self.command.runAsyncCommand()
-
def showVersions(self):
(latest_versions, preferred_versions, required) = self.findProviders()
@@ -541,6 +502,8 @@ class BBCooker:
if not orig_tracking:
self.enableDataTracking()
self.reset()
+ # reset() resets to the UI requested value so we have to redo this
+ self.enableDataTracking()
def mc_base(p):
if p.startswith('mc:'):
@@ -564,21 +527,21 @@ class BBCooker:
if pkgs_to_build[0] in set(ignore.split()):
bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
- taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
+ taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.halt, allowincomplete=True)
mc = runlist[0][0]
fn = runlist[0][3]
if fn:
try:
- bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
- envdata = bb_caches[mc].loadDataFull(fn, self.collections[mc].get_file_appends(fn))
+ layername = self.collections[mc].calc_bbfile_priority(fn)[2]
+ envdata = self.databuilder.parseRecipe(fn, self.collections[mc].get_file_appends(fn), layername)
except Exception as e:
parselog.exception("Unable to read %s", fn)
raise
else:
if not mc in self.databuilder.mcdata:
- bb.fatal('Not multiconfig named "%s" found' % mc)
+ bb.fatal('No multiconfig named "%s" found' % mc)
envdata = self.databuilder.mcdata[mc]
data.expandKeys(envdata)
parse.ast.runAnonFuncs(envdata)
@@ -593,7 +556,7 @@ class BBCooker:
data.emit_env(env, envdata, True)
logger.plain(env.getvalue())
- # emit the metadata which isnt valid shell
+ # emit the metadata which isn't valid shell
for e in sorted(envdata.keys()):
if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
@@ -602,7 +565,7 @@ class BBCooker:
self.disableDataTracking()
self.reset()
- def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
+ def buildTaskData(self, pkgs_to_build, task, halt, allowincomplete=False):
"""
Prepare a runqueue and taskdata object for iteration over pkgs_to_build
"""
@@ -649,7 +612,7 @@ class BBCooker:
localdata = {}
for mc in self.multiconfigs:
- taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
+ taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist, allowincomplete=allowincomplete)
localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
bb.data.expandKeys(localdata[mc])
@@ -698,19 +661,18 @@ class BBCooker:
taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
mcdeps |= set(taskdata[mc].get_mcdepends())
new = False
- for mc in self.multiconfigs:
- for k in mcdeps:
- if k in seen:
- continue
- l = k.split(':')
- depmc = l[2]
- if depmc not in self.multiconfigs:
- bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc))
- else:
- logger.debug("Adding providers for multiconfig dependency %s" % l[3])
- taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
- seen.add(k)
- new = True
+ for k in mcdeps:
+ if k in seen:
+ continue
+ l = k.split(':')
+ depmc = l[2]
+ if depmc not in self.multiconfigs:
+ bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc))
+ else:
+ logger.debug("Adding providers for multiconfig dependency %s" % l[3])
+ taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
+ seen.add(k)
+ new = True
for mc in self.multiconfigs:
taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
@@ -723,7 +685,7 @@ class BBCooker:
Prepare a runqueue and taskdata object for iteration over pkgs_to_build
"""
- # We set abort to False here to prevent unbuildable targets raising
+ # We set halt to False here to prevent unbuildable targets raising
# an exception when we're just generating data
taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
@@ -800,7 +762,9 @@ class BBCooker:
for dep in rq.rqdata.runtaskentries[tid].depends:
(depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
- depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
+ if depmc:
+ depmc = "mc:" + depmc + ":"
+ depend_tree["tdepends"][dotname].append("%s%s.%s" % (depmc, deppn, bb.runqueue.taskname_from_tid(dep)))
if taskfn not in seen_fns:
seen_fns.append(taskfn)
packages = []
@@ -1064,6 +1028,11 @@ class BBCooker:
if matches:
bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
+ def testCookerCommandEvent(self, filepattern):
+ # Dummy command used by OEQA selftest to test tinfoil without IO
+ matches = ["A", "B"]
+ bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
+
def findProviders(self, mc=''):
return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
@@ -1225,15 +1194,15 @@ class BBCooker:
except bb.utils.VersionStringException as vse:
bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
if not res:
- parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
+ parselog.debug3("Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
continue
else:
- parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
+ parselog.debug3("Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
continue
- parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
+ parselog.debug3("Layer '%s' recommends layer '%s', so we are adding it", c, rec)
collection_depends[c].append(rec)
else:
- parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
+ parselog.debug3("Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
# Recursively work out collection priorities based on dependencies
def calc_layer_priority(collection):
@@ -1245,7 +1214,7 @@ class BBCooker:
if depprio > max_depprio:
max_depprio = depprio
max_depprio += 1
- parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
+ parselog.debug("Calculated priority of layer %s as %d", collection, max_depprio)
collection_priorities[collection] = max_depprio
# Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
@@ -1257,7 +1226,7 @@ class BBCooker:
errors = True
continue
elif regex == "":
- parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
+ parselog.debug("BBFILE_PATTERN_%s is empty" % c)
cre = re.compile('^NULL$')
errors = False
else:
@@ -1304,8 +1273,8 @@ class BBCooker:
if bf.startswith("/") or bf.startswith("../"):
bf = os.path.abspath(bf)
- self.collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
- filelist, masked, searchdirs = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
+ collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
+ filelist, masked, searchdirs = collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
try:
os.stat(bf)
bf = os.path.abspath(bf)
@@ -1371,7 +1340,8 @@ class BBCooker:
bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
- infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn))
+ layername = self.collections[mc].calc_bbfile_priority(fn)[2]
+ infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn), layername)
infos = dict(infos)
fn = bb.cache.realfn2virtual(fn, cls, mc)
@@ -1397,14 +1367,16 @@ class BBCooker:
self.recipecaches[mc].rundeps[fn] = defaultdict(list)
self.recipecaches[mc].runrecs[fn] = defaultdict(list)
+ bb.parse.siggen.setup_datacache(self.recipecaches)
+
# Invalidate task for target if force mode active
if self.configuration.force:
logger.verbose("Invalidate task %s, %s", task, fn)
- bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
+ bb.parse.siggen.invalidate_task(task, fn)
# Setup taskdata structure
taskdata = {}
- taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
+ taskdata[mc] = bb.taskdata.TaskData(self.configuration.halt)
taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
if quietlog:
@@ -1414,17 +1386,20 @@ class BBCooker:
buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
if fireevents:
bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
+ if self.eventlog:
+ self.eventlog[2].write_variables()
+ bb.event.enable_heartbeat()
# Execute the runqueue
runlist = [[mc, item, task, fn]]
rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
- def buildFileIdle(server, rq, abort):
+ def buildFileIdle(server, rq, halt):
msg = None
interrupted = 0
- if abort or self.state == state.forceshutdown:
+ if halt or self.state == state.forceshutdown:
rq.finish_runqueue(True)
msg = "Forced shutdown"
interrupted = 2
@@ -1439,37 +1414,68 @@ class BBCooker:
failures += len(exc.args)
retval = False
except SystemExit as exc:
- self.command.finishAsyncCommand(str(exc))
if quietlog:
bb.runqueue.logger.setLevel(rqloglevel)
- return False
+ return bb.server.process.idleFinish(str(exc))
if not retval:
if fireevents:
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
- self.command.finishAsyncCommand(msg)
+ bb.event.disable_heartbeat()
# We trashed self.recipecaches above
- self.parsecache_valid = False
+ self._parsecache_set(False)
self.configuration.limited_deps = False
bb.parse.siggen.reset(self.data)
if quietlog:
bb.runqueue.logger.setLevel(rqloglevel)
- return False
+ return bb.server.process.idleFinish(msg)
if retval is True:
return True
return retval
self.idleCallBackRegister(buildFileIdle, rq)
+ def getTaskSignatures(self, target, tasks):
+ sig = []
+ getAllTaskSignatures = False
+
+ if not tasks:
+ tasks = ["do_build"]
+ getAllTaskSignatures = True
+
+ for task in tasks:
+ taskdata, runlist = self.buildTaskData(target, task, self.configuration.halt)
+ rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
+ rq.rqdata.prepare()
+
+ for l in runlist:
+ mc, pn, taskname, fn = l
+
+ taskdep = rq.rqdata.dataCaches[mc].task_deps[fn]
+ for t in taskdep['tasks']:
+ if t in taskdep['nostamp'] or "setscene" in t:
+ continue
+ tid = bb.runqueue.build_tid(mc, fn, t)
+
+ if t in task or getAllTaskSignatures:
+ try:
+ rq.rqdata.prepare_task_hash(tid)
+ sig.append([pn, t, rq.rqdata.get_task_unihash(tid)])
+ except KeyError:
+ sig.append(self.getTaskSignatures(target, [t])[0])
+
+ return sig
+
def buildTargets(self, targets, task):
"""
Attempt to build the targets specified
"""
- def buildTargetsIdle(server, rq, abort):
+ def buildTargetsIdle(server, rq, halt):
msg = None
interrupted = 0
- if abort or self.state == state.forceshutdown:
+ if halt or self.state == state.forceshutdown:
+ bb.event._should_exit.set()
rq.finish_runqueue(True)
msg = "Forced shutdown"
interrupted = 2
@@ -1484,16 +1490,16 @@ class BBCooker:
failures += len(exc.args)
retval = False
except SystemExit as exc:
- self.command.finishAsyncCommand(str(exc))
- return False
+ return bb.server.process.idleFinish(str(exc))
if not retval:
try:
for mc in self.multiconfigs:
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
finally:
- self.command.finishAsyncCommand(msg)
- return False
+ bb.event.disable_heartbeat()
+ return bb.server.process.idleFinish(msg)
+
if retval is True:
return True
return retval
@@ -1512,7 +1518,7 @@ class BBCooker:
bb.event.fire(bb.event.BuildInit(packages), self.data)
- taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
+ taskdata, runlist = self.buildTaskData(targets, task, self.configuration.halt)
buildname = self.data.getVar("BUILDNAME", False)
@@ -1525,6 +1531,9 @@ class BBCooker:
for mc in self.multiconfigs:
bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
+ if self.eventlog:
+ self.eventlog[2].write_variables()
+ bb.event.enable_heartbeat()
rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
if 'universe' in targets:
@@ -1534,7 +1543,13 @@ class BBCooker:
def getAllKeysWithFlags(self, flaglist):
+ def dummy_autorev(d):
+ return
+
dump = {}
+ # Horrible but for now we need to avoid any sideeffects of autorev being called
+ saved = bb.fetch2.get_autorev
+ bb.fetch2.get_autorev = dummy_autorev
for k in self.data.keys():
try:
expand = True
@@ -1554,6 +1569,7 @@ class BBCooker:
dump[k][d] = None
except Exception as e:
print(e)
+ bb.fetch2.get_autorev = saved
return dump
@@ -1561,13 +1577,6 @@ class BBCooker:
if self.state == state.running:
return
- # reload files for which we got notifications
- for p in self.inotify_modified_files:
- bb.parse.update_cache(p)
- if p in bb.parse.BBHandler.cached_statements:
- del bb.parse.BBHandler.cached_statements[p]
- self.inotify_modified_files = []
-
if not self.baseconfig_valid:
logger.debug("Reloading base configuration data")
self.initConfigurationData()
@@ -1580,7 +1589,7 @@ class BBCooker:
if self.state in (state.shutdown, state.forceshutdown, state.error):
if hasattr(self.parser, 'shutdown'):
- self.parser.shutdown(clean=False, force = True)
+ self.parser.shutdown(clean=False)
self.parser.final_cleanup()
raise bb.BBHandledException()
@@ -1588,6 +1597,9 @@ class BBCooker:
self.updateCacheSync()
if self.state != state.parsing and not self.parsecache_valid:
+ bb.server.process.serverlog("Parsing started")
+ self.parsewatched = {}
+
bb.parse.siggen.reset(self.data)
self.parseConfiguration ()
if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
@@ -1601,30 +1613,27 @@ class BBCooker:
for dep in self.configuration.extra_assume_provided:
self.recipecaches[mc].ignored_dependencies.add(dep)
- self.collections = {}
-
mcfilelist = {}
total_masked = 0
searchdirs = set()
for mc in self.multiconfigs:
- self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
(filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
mcfilelist[mc] = filelist
total_masked += masked
searchdirs |= set(search)
- # Add inotify watches for directories searched for bb/bbappend files
+ # Add mtimes for directories searched for bb/bbappend files
for dirent in searchdirs:
- self.add_filewatch([[dirent]], dirs=True)
+ self.add_filewatch([(dirent, bb.parse.cached_mtime_noerror(dirent))])
self.parser = CookerParser(self, mcfilelist, total_masked)
- self.parsecache_valid = True
+ self._parsecache_set(True)
self.state = state.parsing
if not self.parser.parse_next():
- collectlog.debug(1, "parsing complete")
+ collectlog.debug("parsing complete")
if self.parser.error:
raise bb.BBHandledException()
self.show_appends_with_no_recipes()
@@ -1647,7 +1656,7 @@ class BBCooker:
# Return a copy, don't modify the original
pkgs_to_build = pkgs_to_build[:]
- if len(pkgs_to_build) == 0:
+ if not pkgs_to_build:
raise NothingToBuild
ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
@@ -1669,7 +1678,7 @@ class BBCooker:
if 'universe' in pkgs_to_build:
parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
- parselog.debug(1, "collating packages for \"universe\"")
+ parselog.debug("collating packages for \"universe\"")
pkgs_to_build.remove('universe')
for mc in self.multiconfigs:
for t in self.recipecaches[mc].universe_target:
@@ -1694,26 +1703,36 @@ class BBCooker:
def post_serve(self):
self.shutdown(force=True)
prserv.serv.auto_shutdown()
+ if hasattr(bb.parse, "siggen"):
+ bb.parse.siggen.exit()
if self.hashserv:
self.hashserv.process.terminate()
self.hashserv.process.join()
if hasattr(self, "data"):
bb.event.fire(CookerExit(), self.data)
- def shutdown(self, force = False):
+ def shutdown(self, force=False):
if force:
self.state = state.forceshutdown
+ bb.event._should_exit.set()
else:
self.state = state.shutdown
if self.parser:
- self.parser.shutdown(clean=not force, force=force)
+ self.parser.shutdown(clean=False)
self.parser.final_cleanup()
def finishcommand(self):
+ if hasattr(self.parser, 'shutdown'):
+ self.parser.shutdown(clean=False)
+ self.parser.final_cleanup()
self.state = state.initial
+ bb.event._should_exit.clear()
def reset(self):
+ if hasattr(bb.parse, "siggen"):
+ bb.parse.siggen.exit()
+ self.finishcommand()
self.initConfigurationData()
self.handlePRServ()
@@ -1725,9 +1744,9 @@ class BBCooker:
if hasattr(self, "data"):
self.databuilder.reset()
self.data = self.databuilder.data
- self.parsecache_valid = False
- self.baseconfig_valid = False
-
+ # In theory tinfoil could have modified the base data before parsing,
+ # ideally need to track if anything did modify the datastore
+ self._parsecache_set(False)
class CookerExit(bb.event.Event):
"""
@@ -1742,16 +1761,16 @@ class CookerCollectFiles(object):
def __init__(self, priorities, mc=''):
self.mc = mc
self.bbappends = []
- # Priorities is a list of tupples, with the second element as the pattern.
+ # Priorities is a list of tuples, with the second element as the pattern.
# We need to sort the list with the longest pattern first, and so on to
# the shortest. This allows nested layers to be properly evaluated.
self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
def calc_bbfile_priority(self, filename):
- for _, _, regex, pri in self.bbfile_config_priorities:
+ for layername, _, regex, pri in self.bbfile_config_priorities:
if regex.match(filename):
- return pri, regex
- return 0, None
+ return pri, regex, layername
+ return 0, None, None
def get_bbfiles(self):
"""Get list of default .bb files by reading out the current directory"""
@@ -1770,7 +1789,7 @@ class CookerCollectFiles(object):
for ignored in ('SCCS', 'CVS', '.svn'):
if ignored in dirs:
dirs.remove(ignored)
- found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
+ found += [os.path.join(dir, f) for f in files if (f.endswith(('.bb', '.bbappend')))]
return found
@@ -1778,7 +1797,7 @@ class CookerCollectFiles(object):
"""Collect all available .bb build files"""
masked = 0
- collectlog.debug(1, "collecting .bb files")
+ collectlog.debug("collecting .bb files")
files = (config.getVar( "BBFILES") or "").split()
@@ -1786,14 +1805,14 @@ class CookerCollectFiles(object):
files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] )
config.setVar("BBFILES_PRIORITIZED", " ".join(files))
- if not len(files):
+ if not files:
files = self.get_bbfiles()
- if not len(files):
+ if not files:
collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
bb.event.fire(CookerExit(), eventdata)
- # We need to track where we look so that we can add inotify watches. There
+ # We need to track where we look so that we can know when the cache is invalid. There
# is no nice way to do this, this is horrid. We intercept the os.listdir()
# (or os.scandir() for python 3.6+) calls while we run glob().
origlistdir = os.listdir
@@ -1849,7 +1868,7 @@ class CookerCollectFiles(object):
try:
re.compile(mask)
bbmasks.append(mask)
- except sre_constants.error:
+ except re.error:
collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
# Then validate the combined regular expressions. This should never
@@ -1857,7 +1876,7 @@ class CookerCollectFiles(object):
bbmask = "|".join(bbmasks)
try:
bbmask_compiled = re.compile(bbmask)
- except sre_constants.error:
+ except re.error:
collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
bbmask = None
@@ -1865,7 +1884,7 @@ class CookerCollectFiles(object):
bbappend = []
for f in newfiles:
if bbmask and bbmask_compiled.search(f):
- collectlog.debug(1, "skipping masked file %s", f)
+ collectlog.debug("skipping masked file %s", f)
masked += 1
continue
if f.endswith('.bb'):
@@ -1873,7 +1892,7 @@ class CookerCollectFiles(object):
elif f.endswith('.bbappend'):
bbappend.append(f)
else:
- collectlog.debug(1, "skipping %s: unknown file extension", f)
+ collectlog.debug("skipping %s: unknown file extension", f)
# Build a list of .bbappend files for each .bb file
for f in bbappend:
@@ -1924,7 +1943,7 @@ class CookerCollectFiles(object):
# Calculate priorities for each file
for p in pkgfns:
realfn, cls, mc = bb.cache.virtualfn2realfn(p)
- priorities[p], regex = self.calc_bbfile_priority(realfn)
+ priorities[p], regex, _ = self.calc_bbfile_priority(realfn)
if regex in unmatched_regex:
matched_regex.add(regex)
unmatched_regex.remove(regex)
@@ -1975,15 +1994,30 @@ class ParsingFailure(Exception):
Exception.__init__(self, realexception, recipe)
class Parser(multiprocessing.Process):
- def __init__(self, jobs, results, quit, init, profile):
+ def __init__(self, jobs, results, quit, profile):
self.jobs = jobs
self.results = results
self.quit = quit
- self.init = init
multiprocessing.Process.__init__(self)
self.context = bb.utils.get_context().copy()
self.handlers = bb.event.get_class_handlers().copy()
self.profile = profile
+ self.queue_signals = False
+ self.signal_received = []
+ self.signal_threadlock = threading.Lock()
+
+ def catch_sig(self, signum, frame):
+ if self.queue_signals:
+ self.signal_received.append(signum)
+ else:
+ self.handle_sig(signum, frame)
+
+ def handle_sig(self, signum, frame):
+ if signum == signal.SIGTERM:
+ signal.signal(signal.SIGTERM, signal.SIG_DFL)
+ os.kill(os.getpid(), signal.SIGTERM)
+ elif signum == signal.SIGINT:
+ signal.default_int_handler(signum, frame)
def run(self):
@@ -2003,38 +2037,50 @@ class Parser(multiprocessing.Process):
prof.dump_stats(logfile)
def realrun(self):
- if self.init:
- self.init()
+ # Signal handling here is hard. We must not terminate any process or thread holding the write
+ # lock for the event stream as it will not be released, ever, and things will hang.
+ # Python handles signals in the main thread/process but they can be raised from any thread and
+ # we want to defer processing of any SIGTERM/SIGINT signal until we're outside the critical section
+ # and don't hold the lock (see server/process.py). We therefore always catch the signals (so any
+ # new thread should also do so) and we defer handling but we handle with the local thread lock
+ # held (a threading lock, not a multiprocessing one) so that no other thread in the process
+ # can be in the critical section.
+ signal.signal(signal.SIGTERM, self.catch_sig)
+ signal.signal(signal.SIGHUP, signal.SIG_DFL)
+ signal.signal(signal.SIGINT, self.catch_sig)
+ bb.utils.set_process_name(multiprocessing.current_process().name)
+ multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
+ multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
pending = []
- while True:
- try:
- self.quit.get_nowait()
- except queue.Empty:
- pass
- else:
- self.results.close()
- self.results.join_thread()
- break
+ havejobs = True
+ try:
+ while havejobs or pending:
+ if self.quit.is_set():
+ break
- if pending:
- result = pending.pop()
- else:
+ job = None
try:
job = self.jobs.pop()
except IndexError:
- self.results.close()
- self.results.join_thread()
- break
- result = self.parse(*job)
- # Clear the siggen cache after parsing to control memory usage, its huge
- bb.parse.siggen.postparsing_clean_cache()
- try:
- self.results.put(result, timeout=0.25)
- except queue.Full:
- pending.append(result)
+ havejobs = False
+ if job:
+ result = self.parse(*job)
+ # Clear the siggen cache after parsing to control memory usage, its huge
+ bb.parse.siggen.postparsing_clean_cache()
+ pending.append(result)
+
+ if pending:
+ try:
+ result = pending.pop()
+ self.results.put(result, timeout=0.05)
+ except queue.Full:
+ pending.append(result)
+ finally:
+ self.results.close()
+ self.results.join_thread()
- def parse(self, mc, cache, filename, appends):
+ def parse(self, mc, cache, filename, appends, layername):
try:
origfilter = bb.event.LogHandler.filter
# Record the filename we're parsing into any events generated
@@ -2048,17 +2094,17 @@ class Parser(multiprocessing.Process):
bb.event.set_class_handlers(self.handlers.copy())
bb.event.LogHandler.filter = parse_filter
- return True, mc, cache.parse(filename, appends)
+ return True, mc, cache.parse(filename, appends, layername)
except Exception as exc:
tb = sys.exc_info()[2]
exc.recipe = filename
exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
- return True, exc
+ return True, None, exc
# Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
# and for example a worker thread doesn't just exit on its own in response to
# a SystemExit event for example.
except BaseException as exc:
- return True, ParsingFailure(exc, filename)
+ return True, None, ParsingFailure(exc, filename)
finally:
bb.event.LogHandler.filter = origfilter
@@ -2088,10 +2134,11 @@ class CookerParser(object):
for mc in self.cooker.multiconfigs:
for filename in self.mcfilelist[mc]:
appends = self.cooker.collections[mc].get_file_appends(filename)
+ layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2]
if not self.bb_caches[mc].cacheValid(filename, appends):
- self.willparse.add((mc, self.bb_caches[mc], filename, appends))
+ self.willparse.add((mc, self.bb_caches[mc], filename, appends, layername))
else:
- self.fromcache.add((mc, self.bb_caches[mc], filename, appends))
+ self.fromcache.add((mc, self.bb_caches[mc], filename, appends, layername))
self.total = len(self.fromcache) + len(self.willparse)
self.toparse = len(self.willparse)
@@ -2100,6 +2147,7 @@ class CookerParser(object):
self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
multiprocessing.cpu_count()), self.toparse)
+ bb.cache.SiggenRecipeInfo.reset()
self.start()
self.haveshutdown = False
self.syncthread = None
@@ -2109,15 +2157,8 @@ class CookerParser(object):
self.processes = []
if self.toparse:
bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
- def init():
- signal.signal(signal.SIGTERM, signal.SIG_DFL)
- signal.signal(signal.SIGHUP, signal.SIG_DFL)
- signal.signal(signal.SIGINT, signal.SIG_IGN)
- bb.utils.set_process_name(multiprocessing.current_process().name)
- multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
- multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
-
- self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
+
+ self.parser_quit = multiprocessing.Event()
self.result_queue = multiprocessing.Queue()
def chunkify(lst,n):
@@ -2125,14 +2166,14 @@ class CookerParser(object):
self.jobs = chunkify(list(self.willparse), self.num_processes)
for i in range(0, self.num_processes):
- parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
+ parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, self.cooker.configuration.profile)
parser.start()
self.process_names.append(parser.name)
self.processes.append(parser)
self.results = itertools.chain(self.results, self.parse_generator())
- def shutdown(self, clean=True, force=False):
+ def shutdown(self, clean=True, eventmsg="Parsing halted due to errors"):
if not self.toparse:
return
if self.haveshutdown:
@@ -2146,9 +2187,9 @@ class CookerParser(object):
self.total)
bb.event.fire(event, self.cfgdata)
-
- for process in self.processes:
- self.parser_quit.put(None)
+ else:
+ bb.event.fire(bb.event.ParseError(eventmsg), self.cfgdata)
+ bb.error("Parsing halted due to errors, see error messages above")
# Cleanup the queue before call process.join(), otherwise there might be
# deadlocks.
@@ -2158,25 +2199,39 @@ class CookerParser(object):
except queue.Empty:
break
- for process in self.processes:
- if force:
- process.join(.1)
- process.terminate()
- else:
- process.join()
-
- self.parser_quit.close()
- # Allow data left in the cancel queue to be discarded
- self.parser_quit.cancel_join_thread()
-
def sync_caches():
for c in self.bb_caches.values():
+ bb.cache.SiggenRecipeInfo.reset()
c.sync()
- sync = threading.Thread(target=sync_caches, name="SyncThread")
- self.syncthread = sync
- sync.start()
+ self.syncthread = threading.Thread(target=sync_caches, name="SyncThread")
+ self.syncthread.start()
+
+ self.parser_quit.set()
+
+ for process in self.processes:
+ process.join(0.5)
+
+ for process in self.processes:
+ if process.exitcode is None:
+ os.kill(process.pid, signal.SIGINT)
+
+ for process in self.processes:
+ process.join(0.5)
+
+ for process in self.processes:
+ if process.exitcode is None:
+ process.terminate()
+
+ for process in self.processes:
+ process.join()
+ # Added in 3.7, cleans up zombies
+ if hasattr(process, "close"):
+ process.close()
+
+ bb.codeparser.parser_cache_save()
bb.codeparser.parser_cache_savemerge()
+ bb.cache.SiggenRecipeInfo.reset()
bb.fetch.fetcher_parse_done()
if self.cooker.configuration.profile:
profiles = []
@@ -2194,49 +2249,64 @@ class CookerParser(object):
self.syncthread.join()
def load_cached(self):
- for mc, cache, filename, appends in self.fromcache:
- cached, infos = cache.load(filename, appends)
- yield not cached, mc, infos
+ for mc, cache, filename, appends, layername in self.fromcache:
+ infos = cache.loadCached(filename, appends)
+ yield False, mc, infos
def parse_generator(self):
- while True:
+ empty = False
+ while self.processes or not empty:
+ for process in self.processes.copy():
+ if not process.is_alive():
+ process.join()
+ self.processes.remove(process)
+
if self.parsed >= self.toparse:
break
try:
result = self.result_queue.get(timeout=0.25)
except queue.Empty:
- pass
+ empty = True
+ yield None, None, None
else:
- value = result[1]
- if isinstance(value, BaseException):
- raise value
- else:
- yield result
+ empty = False
+ yield result
+
+ if not (self.parsed >= self.toparse):
+ raise bb.parse.ParseError("Not all recipes parsed, parser thread killed/died? Exiting.", None)
+
def parse_next(self):
result = []
parsed = None
try:
parsed, mc, result = next(self.results)
+ if isinstance(result, BaseException):
+ # Turn exceptions back into exceptions
+ raise result
+ if parsed is None:
+ # Timeout, loop back through the main loop
+ return True
+
except StopIteration:
self.shutdown()
return False
except bb.BBHandledException as exc:
self.error += 1
- logger.error('Failed to parse recipe: %s' % exc.recipe)
- self.shutdown(clean=False, force=True)
+ logger.debug('Failed to parse recipe: %s' % exc.recipe)
+ self.shutdown(clean=False)
return False
except ParsingFailure as exc:
self.error += 1
logger.error('Unable to parse %s: %s' %
(exc.recipe, bb.exceptions.to_string(exc.realexception)))
- self.shutdown(clean=False, force=True)
+ self.shutdown(clean=False)
return False
except bb.parse.ParseError as exc:
self.error += 1
logger.error(str(exc))
- self.shutdown(clean=False, force=True)
+ self.shutdown(clean=False, eventmsg=str(exc))
return False
except bb.data_smart.ExpansionError as exc:
self.error += 1
@@ -2245,7 +2315,7 @@ class CookerParser(object):
tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
logger.error('ExpansionError during parsing %s', value.recipe,
exc_info=(etype, value, tb))
- self.shutdown(clean=False, force=True)
+ self.shutdown(clean=False)
return False
except Exception as exc:
self.error += 1
@@ -2257,7 +2327,7 @@ class CookerParser(object):
# Most likely, an exception occurred during raising an exception
import traceback
logger.error('Exception during parse: %s' % traceback.format_exc())
- self.shutdown(clean=False, force=True)
+ self.shutdown(clean=False)
return False
self.current += 1
@@ -2279,11 +2349,13 @@ class CookerParser(object):
return True
def reparse(self, filename):
+ bb.cache.SiggenRecipeInfo.reset()
to_reparse = set()
for mc in self.cooker.multiconfigs:
- to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename)))
+ layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2]
+ to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename), layername))
- for mc, filename, appends in to_reparse:
- infos = self.bb_caches[mc].parse(filename, appends)
+ for mc, filename, appends, layername in to_reparse:
+ infos = self.bb_caches[mc].parse(filename, appends, layername)
for vfn, info_array in infos:
self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)
diff --git a/lib/bb/cookerdata.py b/lib/bb/cookerdata.py
index 1c1e008c6..0649e4099 100644
--- a/lib/bb/cookerdata.py
+++ b/lib/bb/cookerdata.py
@@ -57,7 +57,7 @@ class ConfigParameters(object):
def updateToServer(self, server, environment):
options = {}
- for o in ["abort", "force", "invalidate_stamp",
+ for o in ["halt", "force", "invalidate_stamp",
"dry_run", "dump_signatures",
"extra_assume_provided", "profile",
"prefile", "postfile", "server_timeout",
@@ -86,7 +86,7 @@ class ConfigParameters(object):
action['msg'] = "Only one target can be used with the --environment option."
elif self.options.buildfile and len(self.options.pkgs_to_build) > 0:
action['msg'] = "No target should be used with the --environment and --buildfile options."
- elif len(self.options.pkgs_to_build) > 0:
+ elif self.options.pkgs_to_build:
action['action'] = ["showEnvironmentTarget", self.options.pkgs_to_build]
else:
action['action'] = ["showEnvironment", self.options.buildfile]
@@ -124,7 +124,7 @@ class CookerConfiguration(object):
self.prefile = []
self.postfile = []
self.cmd = None
- self.abort = True
+ self.halt = True
self.force = False
self.profile = False
self.nosetscene = False
@@ -160,12 +160,7 @@ def catch_parse_error(func):
def wrapped(fn, *args):
try:
return func(fn, *args)
- except IOError as exc:
- import traceback
- parselog.critical(traceback.format_exc())
- parselog.critical("Unable to parse %s: %s" % (fn, exc))
- raise bb.BBHandledException()
- except bb.data_smart.ExpansionError as exc:
+ except Exception as exc:
import traceback
bbdir = os.path.dirname(__file__) + os.sep
@@ -177,14 +172,11 @@ def catch_parse_error(func):
break
parselog.critical("Unable to parse %s" % fn, exc_info=(exc_class, exc, tb))
raise bb.BBHandledException()
- except bb.parse.ParseError as exc:
- parselog.critical(str(exc))
- raise bb.BBHandledException()
return wrapped
@catch_parse_error
def parse_config_file(fn, data, include=True):
- return bb.parse.handle(fn, data, include)
+ return bb.parse.handle(fn, data, include, baseconfig=True)
@catch_parse_error
def _inherit(bbclass, data):
@@ -210,7 +202,7 @@ def findConfigFile(configfile, data):
#
# We search for a conf/bblayers.conf under an entry in BBPATH or in cwd working
-# up to /. If that fails, we search for a conf/bitbake.conf in BBPATH.
+# up to /. If that fails, bitbake would fall back to cwd.
#
def findTopdir():
@@ -223,11 +215,8 @@ def findTopdir():
layerconf = findConfigFile("bblayers.conf", d)
if layerconf:
return os.path.dirname(os.path.dirname(layerconf))
- if bbpath:
- bitbakeconf = bb.utils.which(bbpath, "conf/bitbake.conf")
- if bitbakeconf:
- return os.path.dirname(os.path.dirname(bitbakeconf))
- return None
+
+ return os.path.abspath(os.getcwd())
class CookerDataBuilder(object):
@@ -250,10 +239,14 @@ class CookerDataBuilder(object):
self.savedenv = bb.data.init()
for k in cookercfg.env:
self.savedenv.setVar(k, cookercfg.env[k])
+ if k in bb.data_smart.bitbake_renamed_vars:
+ bb.error('Shell environment variable %s has been renamed to %s' % (k, bb.data_smart.bitbake_renamed_vars[k]))
+ bb.fatal("Exiting to allow enviroment variables to be corrected")
filtered_keys = bb.utils.approved_variables()
bb.data.inheritFromOS(self.basedata, self.savedenv, filtered_keys)
self.basedata.setVar("BB_ORIGENV", self.savedenv)
+ self.basedata.setVar("__bbclasstype", "global")
if worker:
self.basedata.setVar("BB_WORKERCONTEXT", "1")
@@ -261,15 +254,15 @@ class CookerDataBuilder(object):
self.data = self.basedata
self.mcdata = {}
- def parseBaseConfiguration(self):
+ def parseBaseConfiguration(self, worker=False):
+ mcdata = {}
data_hash = hashlib.sha256()
try:
self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles)
- if self.data.getVar("BB_WORKERCONTEXT", False) is None:
+ if self.data.getVar("BB_WORKERCONTEXT", False) is None and not worker:
bb.fetch.fetcher_init(self.data)
bb.parse.init_parser(self.data)
- bb.codeparser.parser_cache_init(self.data)
bb.event.fire(bb.event.ConfigParsed(), self.data)
@@ -287,38 +280,62 @@ class CookerDataBuilder(object):
bb.parse.init_parser(self.data)
data_hash.update(self.data.get_hash().encode('utf-8'))
- self.mcdata[''] = self.data
+ mcdata[''] = self.data
multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split()
for config in multiconfig:
- mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config)
- bb.event.fire(bb.event.ConfigParsed(), mcdata)
- self.mcdata[config] = mcdata
- data_hash.update(mcdata.get_hash().encode('utf-8'))
+ if config[0].isdigit():
+ bb.fatal("Multiconfig name '%s' is invalid as multiconfigs cannot start with a digit" % config)
+ parsed_mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config)
+ bb.event.fire(bb.event.ConfigParsed(), parsed_mcdata)
+ mcdata[config] = parsed_mcdata
+ data_hash.update(parsed_mcdata.get_hash().encode('utf-8'))
if multiconfig:
- bb.event.fire(bb.event.MultiConfigParsed(self.mcdata), self.data)
+ bb.event.fire(bb.event.MultiConfigParsed(mcdata), self.data)
self.data_hash = data_hash.hexdigest()
- except (SyntaxError, bb.BBHandledException):
- raise bb.BBHandledException()
except bb.data_smart.ExpansionError as e:
logger.error(str(e))
raise bb.BBHandledException()
- except Exception:
- logger.exception("Error parsing configuration files")
+
+ bb.codeparser.update_module_dependencies(self.data)
+
+ # Handle obsolete variable names
+ d = self.data
+ renamedvars = d.getVarFlags('BB_RENAMED_VARIABLES') or {}
+ renamedvars.update(bb.data_smart.bitbake_renamed_vars)
+ issues = False
+ for v in renamedvars:
+ if d.getVar(v) != None or d.hasOverrides(v):
+ issues = True
+ loginfo = {}
+ history = d.varhistory.get_variable_refs(v)
+ for h in history:
+ for line in history[h]:
+ loginfo = {'file' : h, 'line' : line}
+ bb.data.data_smart._print_rename_error(v, loginfo, renamedvars)
+ if not history:
+ bb.data.data_smart._print_rename_error(v, loginfo, renamedvars)
+ if issues:
raise bb.BBHandledException()
+ for mc in mcdata:
+ mcdata[mc].renameVar("__depends", "__base_depends")
+ mcdata[mc].setVar("__bbclasstype", "recipe")
+
# Create a copy so we can reset at a later date when UIs disconnect
- self.origdata = self.data
- self.data = bb.data.createCopy(self.origdata)
- self.mcdata[''] = self.data
+ self.mcorigdata = mcdata
+ for mc in mcdata:
+ self.mcdata[mc] = bb.data.createCopy(mcdata[mc])
+ self.data = self.mcdata['']
def reset(self):
# We may not have run parseBaseConfiguration() yet
- if not hasattr(self, 'origdata'):
+ if not hasattr(self, 'mcorigdata'):
return
- self.data = bb.data.createCopy(self.origdata)
- self.mcdata[''] = self.data
+ for mc in self.mcorigdata:
+ self.mcdata[mc] = bb.data.createCopy(self.mcorigdata[mc])
+ self.data = self.mcdata['']
def _findLayerConf(self, data):
return findConfigFile("bblayers.conf", data)
@@ -333,15 +350,23 @@ class CookerDataBuilder(object):
layerconf = self._findLayerConf(data)
if layerconf:
- parselog.debug(2, "Found bblayers.conf (%s)", layerconf)
+ parselog.debug2("Found bblayers.conf (%s)", layerconf)
# By definition bblayers.conf is in conf/ of TOPDIR.
# We may have been called with cwd somewhere else so reset TOPDIR
data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf)))
data = parse_config_file(layerconf, data)
+ if not data.getVar("BB_CACHEDIR"):
+ data.setVar("BB_CACHEDIR", "${TOPDIR}/cache")
+
+ bb.codeparser.parser_cache_init(data.getVar("BB_CACHEDIR"))
+
layers = (data.getVar('BBLAYERS') or "").split()
broken_layers = []
+ if not layers:
+ bb.fatal("The bblayers.conf file doesn't contain any BBLAYERS definition")
+
data = bb.data.createCopy(data)
approved = bb.utils.approved_variables()
@@ -357,8 +382,10 @@ class CookerDataBuilder(object):
parselog.critical("Please check BBLAYERS in %s" % (layerconf))
raise bb.BBHandledException()
+ layerseries = None
+ compat_entries = {}
for layer in layers:
- parselog.debug(2, "Adding layer %s", layer)
+ parselog.debug2("Adding layer %s", layer)
if 'HOME' in approved and '~' in layer:
layer = os.path.expanduser(layer)
if layer.endswith('/'):
@@ -369,8 +396,27 @@ class CookerDataBuilder(object):
data.expandVarref('LAYERDIR')
data.expandVarref('LAYERDIR_RE')
+ # Sadly we can't have nice things.
+ # Some layers think they're going to be 'clever' and copy the values from
+ # another layer, e.g. using ${LAYERSERIES_COMPAT_core}. The whole point of
+ # this mechanism is to make it clear which releases a layer supports and
+ # show when a layer master branch is bitrotting and is unmaintained.
+ # We therefore avoid people doing this here.
+ collections = (data.getVar('BBFILE_COLLECTIONS') or "").split()
+ for c in collections:
+ compat_entry = data.getVar("LAYERSERIES_COMPAT_%s" % c)
+ if compat_entry:
+ compat_entries[c] = set(compat_entry.split())
+ data.delVar("LAYERSERIES_COMPAT_%s" % c)
+ if not layerseries:
+ layerseries = set((data.getVar("LAYERSERIES_CORENAMES") or "").split())
+ if layerseries:
+ data.delVar("LAYERSERIES_CORENAMES")
+
data.delVar('LAYERDIR_RE')
data.delVar('LAYERDIR')
+ for c in compat_entries:
+ data.setVar("LAYERSERIES_COMPAT_%s" % c, " ".join(sorted(compat_entries[c])))
bbfiles_dynamic = (data.getVar('BBFILES_DYNAMIC') or "").split()
collections = (data.getVar('BBFILE_COLLECTIONS') or "").split()
@@ -389,26 +435,38 @@ class CookerDataBuilder(object):
if invalid:
bb.fatal("BBFILES_DYNAMIC entries must be of the form {!}<collection name>:<filename pattern>, not:\n %s" % "\n ".join(invalid))
- layerseries = set((data.getVar("LAYERSERIES_CORENAMES") or "").split())
collections_tmp = collections[:]
for c in collections:
collections_tmp.remove(c)
if c in collections_tmp:
bb.fatal("Found duplicated BBFILE_COLLECTIONS '%s', check bblayers.conf or layer.conf to fix it." % c)
- compat = set((data.getVar("LAYERSERIES_COMPAT_%s" % c) or "").split())
+
+ compat = set()
+ if c in compat_entries:
+ compat = compat_entries[c]
+ if compat and not layerseries:
+ bb.fatal("No core layer found to work with layer '%s'. Missing entry in bblayers.conf?" % c)
if compat and not (compat & layerseries):
bb.fatal("Layer %s is not compatible with the core layer which only supports these series: %s (layer is compatible with %s)"
% (c, " ".join(layerseries), " ".join(compat)))
elif not compat and not data.getVar("BB_WORKERCONTEXT"):
bb.warn("Layer %s should set LAYERSERIES_COMPAT_%s in its conf/layer.conf file to list the core layer names it is compatible with." % (c, c))
+ data.setVar("LAYERSERIES_CORENAMES", " ".join(sorted(layerseries)))
+
if not data.getVar("BBPATH"):
msg = "The BBPATH variable is not set"
if not layerconf:
msg += (" and bitbake did not find a conf/bblayers.conf file in"
" the expected location.\nMaybe you accidentally"
" invoked bitbake from the wrong directory?")
- raise SystemExit(msg)
+ bb.fatal(msg)
+
+ if not data.getVar("TOPDIR"):
+ data.setVar("TOPDIR", os.path.abspath(os.getcwd()))
+ if not data.getVar("BB_CACHEDIR"):
+ data.setVar("BB_CACHEDIR", "${TOPDIR}/cache")
+ bb.codeparser.parser_cache_init(data.getVar("BB_CACHEDIR"))
data = parse_config_file(os.path.join("conf", "bitbake.conf"), data)
@@ -421,7 +479,7 @@ class CookerDataBuilder(object):
for bbclass in bbclasses:
data = _inherit(bbclass, data)
- # Nomally we only register event handlers at the end of parsing .bb files
+ # Normally we only register event handlers at the end of parsing .bb files
# We register any handlers we've found so far here...
for var in data.getVar('__BBHANDLERS', False) or []:
handlerfn = data.getVarFlag(var, "filename", False)
@@ -435,3 +493,54 @@ class CookerDataBuilder(object):
return data
+ @staticmethod
+ def _parse_recipe(bb_data, bbfile, appends, mc, layername):
+ bb_data.setVar("__BBMULTICONFIG", mc)
+ bb_data.setVar("FILE_LAYERNAME", layername)
+
+ bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
+ bb.parse.cached_mtime_noerror(bbfile_loc)
+
+ if appends:
+ bb_data.setVar('__BBAPPEND', " ".join(appends))
+
+ return bb.parse.handle(bbfile, bb_data)
+
+ def parseRecipeVariants(self, bbfile, appends, virtonly=False, mc=None, layername=None):
+ """
+ Load and parse one .bb build file
+ Return the data and whether parsing resulted in the file being skipped
+ """
+
+ if virtonly:
+ (bbfile, virtual, mc) = bb.cache.virtualfn2realfn(bbfile)
+ bb_data = self.mcdata[mc].createCopy()
+ bb_data.setVar("__ONLYFINALISE", virtual or "default")
+ return self._parse_recipe(bb_data, bbfile, appends, mc, layername)
+
+ if mc is not None:
+ bb_data = self.mcdata[mc].createCopy()
+ return self._parse_recipe(bb_data, bbfile, appends, mc, layername)
+
+ bb_data = self.data.createCopy()
+ datastores = self._parse_recipe(bb_data, bbfile, appends, '', layername)
+
+ for mc in self.mcdata:
+ if not mc:
+ continue
+ bb_data = self.mcdata[mc].createCopy()
+ newstores = self._parse_recipe(bb_data, bbfile, appends, mc, layername)
+ for ns in newstores:
+ datastores["mc:%s:%s" % (mc, ns)] = newstores[ns]
+
+ return datastores
+
+ def parseRecipe(self, virtualfn, appends, layername):
+ """
+ Return a complete set of data for fn.
+ To do this, we need to parse the file.
+ """
+ logger.debug("Parsing %s (full)" % virtualfn)
+ (fn, virtual, mc) = bb.cache.virtualfn2realfn(virtualfn)
+ datastores = self.parseRecipeVariants(virtualfn, appends, virtonly=True, layername=layername)
+ return datastores[virtual]
diff --git a/lib/bb/daemonize.py b/lib/bb/daemonize.py
index c187fcfc6..768940443 100644
--- a/lib/bb/daemonize.py
+++ b/lib/bb/daemonize.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -74,26 +76,26 @@ def createDaemon(function, logfile):
with open('/dev/null', 'r') as si:
os.dup2(si.fileno(), sys.stdin.fileno())
- try:
- so = open(logfile, 'a+')
- os.dup2(so.fileno(), sys.stdout.fileno())
- os.dup2(so.fileno(), sys.stderr.fileno())
- except io.UnsupportedOperation:
- sys.stdout = open(logfile, 'a+')
+ with open(logfile, 'a+') as so:
+ try:
+ os.dup2(so.fileno(), sys.stdout.fileno())
+ os.dup2(so.fileno(), sys.stderr.fileno())
+ except io.UnsupportedOperation:
+ sys.stdout = so
- # Have stdout and stderr be the same so log output matches chronologically
- # and there aren't two seperate buffers
- sys.stderr = sys.stdout
+ # Have stdout and stderr be the same so log output matches chronologically
+ # and there aren't two separate buffers
+ sys.stderr = sys.stdout
- try:
- function()
- except Exception as e:
- traceback.print_exc()
- finally:
- bb.event.print_ui_queue()
- # os._exit() doesn't flush open files like os.exit() does. Manually flush
- # stdout and stderr so that any logging output will be seen, particularly
- # exception tracebacks.
- sys.stdout.flush()
- sys.stderr.flush()
- os._exit(0)
+ try:
+ function()
+ except Exception as e:
+ traceback.print_exc()
+ finally:
+ bb.event.print_ui_queue()
+ # os._exit() doesn't flush open files like os.exit() does. Manually flush
+ # stdout and stderr so that any logging output will be seen, particularly
+ # exception tracebacks.
+ sys.stdout.flush()
+ sys.stderr.flush()
+ os._exit(0)
diff --git a/lib/bb/data.py b/lib/bb/data.py
index 97022853c..505f42950 100644
--- a/lib/bb/data.py
+++ b/lib/bb/data.py
@@ -4,14 +4,16 @@ BitBake 'Data' implementations
Functions for interacting with the data structure used by the
BitBake build tools.
-The expandKeys and update_data are the most expensive
-operations. At night the cookie monster came by and
+expandKeys and datastore iteration are the most expensive
+operations. Updating overrides is now "on the fly" but still based
+on the idea of the cookie monster introduced by zecke:
+"At night the cookie monster came by and
suggested 'give me cookies on setting the variables and
things will work out'. Taking this suggestion into account
applying the skills from the not yet passed 'Entwurf und
Analyse von Algorithmen' lecture and the cookie
monster seems to be right. We will track setVar more carefully
-to have faster update_data and expandKeys operations.
+to have faster datastore operations."
This is a trade-off between speed and memory again but
the speed is more critical here.
@@ -26,11 +28,6 @@ the speed is more critical here.
import sys, os, re
import hashlib
-if sys.argv[0][-5:] == "pydoc":
- path = os.path.dirname(os.path.dirname(sys.argv[1]))
-else:
- path = os.path.dirname(os.path.dirname(sys.argv[0]))
-sys.path.insert(0, path)
from itertools import groupby
from bb import data_smart
@@ -70,10 +67,6 @@ def keys(d):
"""Return a list of keys in d"""
return d.keys()
-
-__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
-__expand_python_regexp__ = re.compile(r"\${@.+?}")
-
def expand(s, d, varname = None):
"""Variable expansion using the data store"""
return d.expand(s, varname)
@@ -121,8 +114,8 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
if d.getVarFlag(var, 'python', False) and func:
return False
- export = d.getVarFlag(var, "export", False)
- unexport = d.getVarFlag(var, "unexport", False)
+ export = bb.utils.to_boolean(d.getVarFlag(var, "export"))
+ unexport = bb.utils.to_boolean(d.getVarFlag(var, "unexport"))
if not all and not export and not unexport and not func:
return False
@@ -195,8 +188,8 @@ def emit_env(o=sys.__stdout__, d = init(), all=False):
def exported_keys(d):
return (key for key in d.keys() if not key.startswith('__') and
- d.getVarFlag(key, 'export', False) and
- not d.getVarFlag(key, 'unexport', False))
+ bb.utils.to_boolean(d.getVarFlag(key, 'export')) and
+ not bb.utils.to_boolean(d.getVarFlag(key, 'unexport')))
def exported_vars(d):
k = list(exported_keys(d))
@@ -226,7 +219,7 @@ def emit_func(func, o=sys.__stdout__, d = init()):
deps = newdeps
seen |= deps
newdeps = set()
- for dep in deps:
+ for dep in sorted(deps):
if d.getVarFlag(dep, "func", False) and not d.getVarFlag(dep, "python", False):
emit_var(dep, o, d, False) and o.write('\n')
newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep))
@@ -268,65 +261,72 @@ def emit_func_python(func, o=sys.__stdout__, d = init()):
newdeps |= set((d.getVarFlag(dep, "vardeps") or "").split())
newdeps -= seen
-def update_data(d):
- """Performs final steps upon the datastore, including application of overrides"""
- d.finalize(parent = True)
+def build_dependencies(key, keys, mod_funcs, shelldeps, varflagsexcl, ignored_vars, d, codeparsedata):
+ def handle_contains(value, contains, exclusions, d):
+ newvalue = []
+ if value:
+ newvalue.append(str(value))
+ for k in sorted(contains):
+ if k in exclusions or k in ignored_vars:
+ continue
+ l = (d.getVar(k) or "").split()
+ for item in sorted(contains[k]):
+ for word in item.split():
+ if not word in l:
+ newvalue.append("\n%s{%s} = Unset" % (k, item))
+ break
+ else:
+ newvalue.append("\n%s{%s} = Set" % (k, item))
+ return "".join(newvalue)
+
+ def handle_remove(value, deps, removes, d):
+ for r in sorted(removes):
+ r2 = d.expandWithRefs(r, None)
+ value += "\n_remove of %s" % r
+ deps |= r2.references
+ deps = deps | (keys & r2.execs)
+ value = handle_contains(value, r2.contains, exclusions, d)
+ return value
-def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
deps = set()
try:
+ if key in mod_funcs:
+ exclusions = set()
+ moddep = bb.codeparser.modulecode_deps[key]
+ value = handle_contains("", moddep[3], exclusions, d)
+ return frozenset((moddep[0] | keys & moddep[1]) - ignored_vars), value
+
if key[-1] == ']':
vf = key[:-1].split('[')
+ if vf[1] == "vardepvalueexclude":
+ return deps, ""
value, parser = d.getVarFlag(vf[0], vf[1], False, retparser=True)
deps |= parser.references
deps = deps | (keys & parser.execs)
- return deps, value
+ deps -= ignored_vars
+ return frozenset(deps), value
varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "exports", "postfuncs", "prefuncs", "lineno", "filename"]) or {}
vardeps = varflags.get("vardeps")
-
- def handle_contains(value, contains, d):
- newvalue = ""
- for k in sorted(contains):
- l = (d.getVar(k) or "").split()
- for item in sorted(contains[k]):
- for word in item.split():
- if not word in l:
- newvalue += "\n%s{%s} = Unset" % (k, item)
- break
- else:
- newvalue += "\n%s{%s} = Set" % (k, item)
- if not newvalue:
- return value
- if not value:
- return newvalue
- return value + newvalue
-
- def handle_remove(value, deps, removes, d):
- for r in sorted(removes):
- r2 = d.expandWithRefs(r, None)
- value += "\n_remove of %s" % r
- deps |= r2.references
- deps = deps | (keys & r2.execs)
- return value
+ exclusions = varflags.get("vardepsexclude", "").split()
if "vardepvalue" in varflags:
value = varflags.get("vardepvalue")
elif varflags.get("func"):
if varflags.get("python"):
- value = d.getVarFlag(key, "_content", False)
+ value = codeparsedata.getVarFlag(key, "_content", False)
parser = bb.codeparser.PythonParser(key, logger)
parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno"))
deps = deps | parser.references
deps = deps | (keys & parser.execs)
- value = handle_contains(value, parser.contains, d)
+ value = handle_contains(value, parser.contains, exclusions, d)
else:
- value, parsedvar = d.getVarFlag(key, "_content", False, retparser=True)
+ value, parsedvar = codeparsedata.getVarFlag(key, "_content", False, retparser=True)
parser = bb.codeparser.ShellParser(key, logger)
parser.parse_shell(parsedvar.value)
deps = deps | shelldeps
deps = deps | parsedvar.references
deps = deps | (keys & parser.execs) | (keys & parsedvar.execs)
- value = handle_contains(value, parsedvar.contains, d)
+ value = handle_contains(value, parsedvar.contains, exclusions, d)
if hasattr(parsedvar, "removes"):
value = handle_remove(value, deps, parsedvar.removes, d)
if vardeps is None:
@@ -341,7 +341,7 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
value, parser = d.getVarFlag(key, "_content", False, retparser=True)
deps |= parser.references
deps = deps | (keys & parser.execs)
- value = handle_contains(value, parser.contains, d)
+ value = handle_contains(value, parser.contains, exclusions, d)
if hasattr(parser, "removes"):
value = handle_remove(value, deps, parser.removes, d)
@@ -361,43 +361,50 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
deps |= set(varfdeps)
deps |= set((vardeps or "").split())
- deps -= set(varflags.get("vardepsexclude", "").split())
+ deps -= set(exclusions)
+ deps -= ignored_vars
except bb.parse.SkipRecipe:
raise
except Exception as e:
bb.warn("Exception during build_dependencies for %s" % key)
raise
- return deps, value
+ return frozenset(deps), value
#bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs)))
#d.setVarFlag(key, "vardeps", deps)
-def generate_dependencies(d, whitelist):
+def generate_dependencies(d, ignored_vars):
- keys = set(key for key in d if not key.startswith("__"))
- shelldeps = set(key for key in d.getVar("__exportlist", False) if d.getVarFlag(key, "export", False) and not d.getVarFlag(key, "unexport", False))
+ mod_funcs = set(bb.codeparser.modulecode_deps.keys())
+ keys = set(key for key in d if not key.startswith("__")) | mod_funcs
+ shelldeps = set(key for key in d.getVar("__exportlist", False) if bb.utils.to_boolean(d.getVarFlag(key, "export")) and not bb.utils.to_boolean(d.getVarFlag(key, "unexport")))
varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS')
+ codeparserd = d.createCopy()
+ for forced in (d.getVar('BB_HASH_CODEPARSER_VALS') or "").split():
+ key, value = forced.split("=", 1)
+ codeparserd.setVar(key, value)
+
deps = {}
values = {}
tasklist = d.getVar('__BBTASKS', False) or []
for task in tasklist:
- deps[task], values[task] = build_dependencies(task, keys, shelldeps, varflagsexcl, d)
+ deps[task], values[task] = build_dependencies(task, keys, mod_funcs, shelldeps, varflagsexcl, ignored_vars, d, codeparserd)
newdeps = deps[task]
seen = set()
while newdeps:
- nextdeps = newdeps - whitelist
+ nextdeps = newdeps
seen |= nextdeps
newdeps = set()
for dep in nextdeps:
if dep not in deps:
- deps[dep], values[dep] = build_dependencies(dep, keys, shelldeps, varflagsexcl, d)
+ deps[dep], values[dep] = build_dependencies(dep, keys, mod_funcs, shelldeps, varflagsexcl, ignored_vars, d, codeparserd)
newdeps |= deps[dep]
newdeps -= seen
#print "For %s: %s" % (task, str(deps[task]))
return tasklist, deps, values
-def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn):
+def generate_dependency_hash(tasklist, gendeps, lookupcache, ignored_vars, fn):
taskdeps = {}
basehash = {}
@@ -406,9 +413,10 @@ def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn):
if data is None:
bb.error("Task %s from %s seems to be empty?!" % (task, fn))
- data = ''
+ data = []
+ else:
+ data = [data]
- gendeps[task] -= whitelist
newdeps = gendeps[task]
seen = set()
while newdeps:
@@ -416,27 +424,24 @@ def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn):
seen |= nextdeps
newdeps = set()
for dep in nextdeps:
- if dep in whitelist:
- continue
- gendeps[dep] -= whitelist
newdeps |= gendeps[dep]
newdeps -= seen
alldeps = sorted(seen)
for dep in alldeps:
- data = data + dep
+ data.append(dep)
var = lookupcache[dep]
if var is not None:
- data = data + str(var)
+ data.append(str(var))
k = fn + ":" + task
- basehash[k] = hashlib.sha256(data.encode("utf-8")).hexdigest()
- taskdeps[task] = alldeps
+ basehash[k] = hashlib.sha256("".join(data).encode("utf-8")).hexdigest()
+ taskdeps[task] = frozenset(seen)
return taskdeps, basehash
def inherits_class(klass, d):
val = d.getVar('__inherit_cache', False) or []
- needle = os.path.join('classes', '%s.bbclass' % klass)
+ needle = '/%s.bbclass' % klass
for v in val:
if v.endswith(needle):
return True
diff --git a/lib/bb/data_smart.py b/lib/bb/data_smart.py
index 2328c334a..0128a5bb1 100644
--- a/lib/bb/data_smart.py
+++ b/lib/bb/data_smart.py
@@ -16,8 +16,11 @@ BitBake build tools.
#
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-import copy, re, sys, traceback
-from collections import MutableMapping
+import builtins
+import copy
+import re
+import sys
+from collections.abc import MutableMapping
import logging
import hashlib
import bb, bb.codeparser
@@ -26,13 +29,25 @@ from bb.COW import COWDictBase
logger = logging.getLogger("BitBake.Data")
-__setvar_keyword__ = ["_append", "_prepend", "_remove"]
-__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>[^A-Z]*))?$')
-__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~]+?}")
-__expand_python_regexp__ = re.compile(r"\${@.+?}")
+__setvar_keyword__ = [":append", ":prepend", ":remove"]
+__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>:append|:prepend|:remove)(:(?P<add>[^A-Z]*))?$')
+__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~:]+?}")
+__expand_python_regexp__ = re.compile(r"\${@(?:{.*?}|.)+?}")
__whitespace_split__ = re.compile(r'(\s)')
__override_regexp__ = re.compile(r'[a-z0-9]+')
+bitbake_renamed_vars = {
+ "BB_ENV_WHITELIST": "BB_ENV_PASSTHROUGH",
+ "BB_ENV_EXTRAWHITE": "BB_ENV_PASSTHROUGH_ADDITIONS",
+ "BB_HASHBASE_WHITELIST": "BB_BASEHASH_IGNORE_VARS",
+ "BB_HASHCONFIG_WHITELIST": "BB_HASHCONFIG_IGNORE_VARS",
+ "BB_HASHTASK_WHITELIST": "BB_TASKHASH_IGNORE_TASKS",
+ "BB_SETSCENE_ENFORCE_WHITELIST": "BB_SETSCENE_ENFORCE_IGNORE_TASKS",
+ "MULTI_PROVIDER_WHITELIST": "BB_MULTI_PROVIDER_ALLOWED",
+ "BB_STAMP_WHITELIST": "is a deprecated variable and support has been removed",
+ "BB_STAMP_POLICY": "is a deprecated variable and support has been removed",
+}
+
def infer_caller_details(loginfo, parent = False, varval = True):
"""Save the caller the trouble of specifying everything."""
# Save effort.
@@ -80,10 +95,11 @@ def infer_caller_details(loginfo, parent = False, varval = True):
loginfo['func'] = func
class VariableParse:
- def __init__(self, varname, d, val = None):
+ def __init__(self, varname, d, unexpanded_value = None, val = None):
self.varname = varname
self.d = d
self.value = val
+ self.unexpanded_value = unexpanded_value
self.references = set()
self.execs = set()
@@ -107,6 +123,11 @@ class VariableParse:
else:
code = match.group()[3:-1]
+ # Do not run code that contains one or more unexpanded variables
+ # instead return the code with the characters we removed put back
+ if __expand_var_regexp__.findall(code):
+ return "${@" + code + "}"
+
if self.varname:
varname = 'Var <%s>' % self.varname
else:
@@ -132,16 +153,21 @@ class VariableParse:
value = utils.better_eval(codeobj, DataContext(self.d), {'d' : self.d})
return str(value)
-
class DataContext(dict):
+ excluded = set([i for i in dir(builtins) if not i.startswith('_')] + ['oe'])
+
def __init__(self, metadata, **kwargs):
self.metadata = metadata
dict.__init__(self, **kwargs)
self['d'] = metadata
+ self.context = set(bb.utils.get_context())
def __missing__(self, key):
+ if key in self.excluded or key in self.context:
+ raise KeyError(key)
+
value = self.metadata.getVar(key)
- if value is None or self.metadata.getVarFlag(key, 'func', False):
+ if value is None:
raise KeyError(key)
else:
return value
@@ -151,6 +177,7 @@ class ExpansionError(Exception):
self.expression = expression
self.variablename = varname
self.exception = exception
+ self.varlist = [varname or expression or ""]
if varname:
if expression:
self.msg = "Failure expanding variable %s, expression was %s which triggered exception %s: %s" % (varname, expression, type(exception).__name__, exception)
@@ -160,8 +187,14 @@ class ExpansionError(Exception):
self.msg = "Failure expanding expression %s which triggered exception %s: %s" % (expression, type(exception).__name__, exception)
Exception.__init__(self, self.msg)
self.args = (varname, expression, exception)
+
+ def addVar(self, varname):
+ if varname:
+ self.varlist.append(varname)
+
def __str__(self):
- return self.msg
+ chain = "\nThe variable dependency chain for the failure is: " + " -> ".join(self.varlist)
+ return self.msg + chain
class IncludeHistory(object):
def __init__(self, parent = None, filename = '[TOP LEVEL]'):
@@ -277,7 +310,7 @@ class VariableHistory(object):
for (r, override) in d.overridedata[var]:
for event in self.variable(r):
loginfo = event.copy()
- if 'flag' in loginfo and not loginfo['flag'].startswith("_"):
+ if 'flag' in loginfo and not loginfo['flag'].startswith(("_", ":")):
continue
loginfo['variable'] = var
loginfo['op'] = 'override[%s]:%s' % (override, loginfo['op'])
@@ -329,6 +362,16 @@ class VariableHistory(object):
lines.append(line)
return lines
+ def get_variable_refs(self, var):
+ """Return a dict of file/line references"""
+ var_history = self.variable(var)
+ refs = {}
+ for event in var_history:
+ if event['file'] not in refs:
+ refs[event['file']] = []
+ refs[event['file']].append(event['line'])
+ return refs
+
def get_variable_items_files(self, var):
"""
Use variable history to map items added to a list variable and
@@ -342,7 +385,7 @@ class VariableHistory(object):
for event in history:
if 'flag' in event:
continue
- if event['op'] == '_remove':
+ if event['op'] == ':remove':
continue
if isset and event['op'] == 'set?':
continue
@@ -363,6 +406,23 @@ class VariableHistory(object):
else:
self.variables[var] = []
+def _print_rename_error(var, loginfo, renamedvars, fullvar=None):
+ info = ""
+ if "file" in loginfo:
+ info = " file: %s" % loginfo["file"]
+ if "line" in loginfo:
+ info += " line: %s" % loginfo["line"]
+ if fullvar and fullvar != var:
+ info += " referenced as: %s" % fullvar
+ if info:
+ info = " (%s)" % info.strip()
+ renameinfo = renamedvars[var]
+ if " " in renameinfo:
+ # A space signals a string to display instead of a rename
+ bb.erroronce('Variable %s %s%s' % (var, renameinfo, info))
+ else:
+ bb.erroronce('Variable %s has been renamed to %s%s' % (var, renameinfo, info))
+
class DataSmart(MutableMapping):
def __init__(self):
self.dict = {}
@@ -370,6 +430,8 @@ class DataSmart(MutableMapping):
self.inchistory = IncludeHistory()
self.varhistory = VariableHistory(self)
self._tracking = False
+ self._var_renames = {}
+ self._var_renames.update(bitbake_renamed_vars)
self.expand_cache = {}
@@ -391,9 +453,9 @@ class DataSmart(MutableMapping):
def expandWithRefs(self, s, varname):
if not isinstance(s, str): # sanity check
- return VariableParse(varname, self, s)
+ return VariableParse(varname, self, s, s)
- varparse = VariableParse(varname, self)
+ varparse = VariableParse(varname, self, s)
while s.find('${') != -1:
olds = s
@@ -403,14 +465,17 @@ class DataSmart(MutableMapping):
s = __expand_python_regexp__.sub(varparse.python_sub, s)
except SyntaxError as e:
# Likely unmatched brackets, just don't expand the expression
- if e.msg != "EOL while scanning string literal":
+ if e.msg != "EOL while scanning string literal" and not e.msg.startswith("unterminated string literal"):
raise
if s == olds:
break
- except ExpansionError:
+ except ExpansionError as e:
+ e.addVar(varname)
raise
except bb.parse.SkipRecipe:
raise
+ except bb.BBHandledException:
+ raise
except Exception as exc:
tb = sys.exc_info()[2]
raise ExpansionError(varname, s, exc).with_traceback(tb) from exc
@@ -422,24 +487,19 @@ class DataSmart(MutableMapping):
def expand(self, s, varname = None):
return self.expandWithRefs(s, varname).value
- def finalize(self, parent = False):
- return
-
- def internal_finalize(self, parent = False):
- """Performs final steps upon the datastore, including application of overrides"""
- self.overrides = None
-
def need_overrides(self):
if self.overrides is not None:
return
if self.inoverride:
return
+ overrride_stack = []
for count in range(5):
self.inoverride = True
# Can end up here recursively so setup dummy values
self.overrides = []
self.overridesset = set()
self.overrides = (self.getVar("OVERRIDES") or "").split(":") or []
+ overrride_stack.append(self.overrides)
self.overridesset = set(self.overrides)
self.inoverride = False
self.expand_cache = {}
@@ -449,7 +509,7 @@ class DataSmart(MutableMapping):
self.overrides = newoverrides
self.overridesset = set(self.overrides)
else:
- bb.fatal("Overrides could not be expanded into a stable state after 5 iterations, overrides must be being referenced by other overridden variables in some recursive fashion. Please provide your configuration to bitbake-devel so we can laugh, er, I mean try and understand how to make it work.")
+ bb.fatal("Overrides could not be expanded into a stable state after 5 iterations, overrides must be being referenced by other overridden variables in some recursive fashion. Please provide your configuration to bitbake-devel so we can laugh, er, I mean try and understand how to make it work. The list of failing override expansions: %s" % "\n".join(str(s) for s in overrride_stack))
def initVar(self, var):
self.expand_cache = {}
@@ -460,27 +520,44 @@ class DataSmart(MutableMapping):
dest = self.dict
while dest:
if var in dest:
- return dest[var], self.overridedata.get(var, None)
+ return dest[var]
if "_data" not in dest:
break
dest = dest["_data"]
- return None, self.overridedata.get(var, None)
+ return None
def _makeShadowCopy(self, var):
if var in self.dict:
return
- local_var, _ = self._findVar(var)
+ local_var = self._findVar(var)
if local_var:
self.dict[var] = copy.copy(local_var)
else:
self.initVar(var)
+ def hasOverrides(self, var):
+ return var in self.overridedata
def setVar(self, var, value, **loginfo):
#print("var=" + str(var) + " val=" + str(value))
+
+ if not var.startswith("__anon_") and ("_append" in var or "_prepend" in var or "_remove" in var):
+ info = "%s" % var
+ if "file" in loginfo:
+ info += " file: %s" % loginfo["file"]
+ if "line" in loginfo:
+ info += " line: %s" % loginfo["line"]
+ bb.fatal("Variable %s contains an operation using the old override syntax. Please convert this layer/metadata before attempting to use with a newer bitbake." % info)
+
+ shortvar = var.split(":", 1)[0]
+ if shortvar in self._var_renames:
+ _print_rename_error(shortvar, loginfo, self._var_renames, fullvar=var)
+ # Mark that we have seen a renamed variable
+ self.setVar("_FAILPARSINGERRORHANDLED", True)
+
self.expand_cache = {}
parsing=False
if 'parsing' in loginfo:
@@ -509,7 +586,7 @@ class DataSmart(MutableMapping):
# pay the cookie monster
# more cookies for the cookie monster
- if '_' in var:
+ if ':' in var:
self._setvar_update_overrides(base, **loginfo)
if base in self.overridevars:
@@ -520,27 +597,27 @@ class DataSmart(MutableMapping):
self._makeShadowCopy(var)
if not parsing:
- if "_append" in self.dict[var]:
- del self.dict[var]["_append"]
- if "_prepend" in self.dict[var]:
- del self.dict[var]["_prepend"]
- if "_remove" in self.dict[var]:
- del self.dict[var]["_remove"]
+ if ":append" in self.dict[var]:
+ del self.dict[var][":append"]
+ if ":prepend" in self.dict[var]:
+ del self.dict[var][":prepend"]
+ if ":remove" in self.dict[var]:
+ del self.dict[var][":remove"]
if var in self.overridedata:
active = []
self.need_overrides()
for (r, o) in self.overridedata[var]:
if o in self.overridesset:
active.append(r)
- elif "_" in o:
- if set(o.split("_")).issubset(self.overridesset):
+ elif ":" in o:
+ if set(o.split(":")).issubset(self.overridesset):
active.append(r)
for a in active:
self.delVar(a)
del self.overridedata[var]
# more cookies for the cookie monster
- if '_' in var:
+ if ':' in var:
self._setvar_update_overrides(var, **loginfo)
# setting var
@@ -562,12 +639,12 @@ class DataSmart(MutableMapping):
nextnew.update(vardata.references)
nextnew.update(vardata.contains.keys())
new = nextnew
- self.internal_finalize(True)
+ self.overrides = None
def _setvar_update_overrides(self, var, **loginfo):
# aka pay the cookie monster
- override = var[var.rfind('_')+1:]
- shortvar = var[:var.rfind('_')]
+ override = var[var.rfind(':')+1:]
+ shortvar = var[:var.rfind(':')]
while override and __override_regexp__.match(override):
if shortvar not in self.overridedata:
self.overridedata[shortvar] = []
@@ -576,9 +653,9 @@ class DataSmart(MutableMapping):
self.overridedata[shortvar] = list(self.overridedata[shortvar])
self.overridedata[shortvar].append([var, override])
override = None
- if "_" in shortvar:
- override = var[shortvar.rfind('_')+1:]
- shortvar = var[:shortvar.rfind('_')]
+ if ":" in shortvar:
+ override = var[shortvar.rfind(':')+1:]
+ shortvar = var[:shortvar.rfind(':')]
if len(shortvar) == 0:
override = None
@@ -602,10 +679,11 @@ class DataSmart(MutableMapping):
self.varhistory.record(**loginfo)
self.setVar(newkey, val, ignore=True, parsing=True)
- for i in (__setvar_keyword__):
- src = self.getVarFlag(key, i, False)
- if src is None:
+ srcflags = self.getVarFlags(key, False, True) or {}
+ for i in srcflags:
+ if i not in (__setvar_keyword__):
continue
+ src = srcflags[i]
dest = self.getVarFlag(newkey, i, False) or []
dest.extend(src)
@@ -617,7 +695,7 @@ class DataSmart(MutableMapping):
self.overridedata[newkey].append([v.replace(key, newkey), o])
self.renameVar(v, v.replace(key, newkey))
- if '_' in newkey and val is None:
+ if ':' in newkey and val is None:
self._setvar_update_overrides(newkey, **loginfo)
loginfo['variable'] = key
@@ -629,12 +707,12 @@ class DataSmart(MutableMapping):
def appendVar(self, var, value, **loginfo):
loginfo['op'] = 'append'
self.varhistory.record(**loginfo)
- self.setVar(var + "_append", value, ignore=True, parsing=True)
+ self.setVar(var + ":append", value, ignore=True, parsing=True)
def prependVar(self, var, value, **loginfo):
loginfo['op'] = 'prepend'
self.varhistory.record(**loginfo)
- self.setVar(var + "_prepend", value, ignore=True, parsing=True)
+ self.setVar(var + ":prepend", value, ignore=True, parsing=True)
def delVar(self, var, **loginfo):
self.expand_cache = {}
@@ -645,10 +723,10 @@ class DataSmart(MutableMapping):
self.dict[var] = {}
if var in self.overridedata:
del self.overridedata[var]
- if '_' in var:
- override = var[var.rfind('_')+1:]
- shortvar = var[:var.rfind('_')]
- while override and override.islower():
+ if ':' in var:
+ override = var[var.rfind(':')+1:]
+ shortvar = var[:var.rfind(':')]
+ while override and __override_regexp__.match(override):
try:
if shortvar in self.overridedata:
# Force CoW by recreating the list first
@@ -657,15 +735,23 @@ class DataSmart(MutableMapping):
except ValueError as e:
pass
override = None
- if "_" in shortvar:
- override = var[shortvar.rfind('_')+1:]
- shortvar = var[:shortvar.rfind('_')]
+ if ":" in shortvar:
+ override = var[shortvar.rfind(':')+1:]
+ shortvar = var[:shortvar.rfind(':')]
if len(shortvar) == 0:
override = None
def setVarFlag(self, var, flag, value, **loginfo):
self.expand_cache = {}
+ if var == "BB_RENAMED_VARIABLES":
+ self._var_renames[flag] = value
+
+ if var in self._var_renames:
+ _print_rename_error(var, loginfo, self._var_renames)
+ # Mark that we have seen a renamed variable
+ self.setVar("_FAILPARSINGERRORHANDLED", True)
+
if 'op' not in loginfo:
loginfo['op'] = "set"
loginfo['flag'] = flag
@@ -674,7 +760,7 @@ class DataSmart(MutableMapping):
self._makeShadowCopy(var)
self.dict[var][flag] = value
- if flag == "_defaultval" and '_' in var:
+ if flag == "_defaultval" and ':' in var:
self._setvar_update_overrides(var, **loginfo)
if flag == "_defaultval" and var in self.overridevars:
self._setvar_update_overridevars(var, value)
@@ -695,22 +781,27 @@ class DataSmart(MutableMapping):
return None
cachename = var + "[" + flag + "]"
+ if not expand and retparser and cachename in self.expand_cache:
+ return self.expand_cache[cachename].unexpanded_value, self.expand_cache[cachename]
+
if expand and cachename in self.expand_cache:
return self.expand_cache[cachename].value
- local_var, overridedata = self._findVar(var)
+ local_var = self._findVar(var)
value = None
removes = set()
- if flag == "_content" and overridedata is not None and not parsing:
+ if flag == "_content" and not parsing:
+ overridedata = self.overridedata.get(var, None)
+ if flag == "_content" and not parsing and overridedata is not None:
match = False
active = {}
self.need_overrides()
for (r, o) in overridedata:
- # What about double overrides both with "_" in the name?
+ # FIXME What about double overrides both with "_" in the name?
if o in self.overridesset:
active[o] = r
- elif "_" in o:
- if set(o.split("_")).issubset(self.overridesset):
+ elif ":" in o:
+ if set(o.split(":")).issubset(self.overridesset):
active[o] = r
mod = True
@@ -718,10 +809,10 @@ class DataSmart(MutableMapping):
mod = False
for o in self.overrides:
for a in active.copy():
- if a.endswith("_" + o):
+ if a.endswith(":" + o):
t = active[a]
del active[a]
- active[a.replace("_" + o, "")] = t
+ active[a.replace(":" + o, "")] = t
mod = True
elif a == o:
match = active[a]
@@ -740,31 +831,31 @@ class DataSmart(MutableMapping):
value = copy.copy(local_var["_defaultval"])
- if flag == "_content" and local_var is not None and "_append" in local_var and not parsing:
- if not value:
- value = ""
+ if flag == "_content" and local_var is not None and ":append" in local_var and not parsing:
self.need_overrides()
- for (r, o) in local_var["_append"]:
+ for (r, o) in local_var[":append"]:
match = True
if o:
- for o2 in o.split("_"):
+ for o2 in o.split(":"):
if not o2 in self.overrides:
match = False
if match:
+ if value is None:
+ value = ""
value = value + r
- if flag == "_content" and local_var is not None and "_prepend" in local_var and not parsing:
- if not value:
- value = ""
+ if flag == "_content" and local_var is not None and ":prepend" in local_var and not parsing:
self.need_overrides()
- for (r, o) in local_var["_prepend"]:
+ for (r, o) in local_var[":prepend"]:
match = True
if o:
- for o2 in o.split("_"):
+ for o2 in o.split(":"):
if not o2 in self.overrides:
match = False
if match:
+ if value is None:
+ value = ""
value = r + value
parser = None
@@ -773,12 +864,12 @@ class DataSmart(MutableMapping):
if expand:
value = parser.value
- if value and flag == "_content" and local_var is not None and "_remove" in local_var and not parsing:
+ if value and flag == "_content" and local_var is not None and ":remove" in local_var and not parsing:
self.need_overrides()
- for (r, o) in local_var["_remove"]:
+ for (r, o) in local_var[":remove"]:
match = True
if o:
- for o2 in o.split("_"):
+ for o2 in o.split(":"):
if not o2 in self.overrides:
match = False
if match:
@@ -791,7 +882,7 @@ class DataSmart(MutableMapping):
expanded_removes[r] = self.expand(r).split()
parser.removes = set()
- val = ""
+ val = []
for v in __whitespace_split__.split(parser.value):
skip = False
for r in removes:
@@ -800,8 +891,8 @@ class DataSmart(MutableMapping):
skip = True
if skip:
continue
- val = val + v
- parser.value = val
+ val.append(v)
+ parser.value = "".join(val)
if expand:
value = parser.value
@@ -816,7 +907,7 @@ class DataSmart(MutableMapping):
def delVarFlag(self, var, flag, **loginfo):
self.expand_cache = {}
- local_var, _ = self._findVar(var)
+ local_var = self._findVar(var)
if not local_var:
return
if not var in self.dict:
@@ -859,12 +950,12 @@ class DataSmart(MutableMapping):
self.dict[var][i] = flags[i]
def getVarFlags(self, var, expand = False, internalflags=False):
- local_var, _ = self._findVar(var)
+ local_var = self._findVar(var)
flags = {}
if local_var:
for i in local_var:
- if i.startswith("_") and not internalflags:
+ if i.startswith(("_", ":")) and not internalflags:
continue
flags[i] = local_var[i]
if expand and i in expand:
@@ -905,6 +996,7 @@ class DataSmart(MutableMapping):
data.inchistory = self.inchistory.copy()
data._tracking = self._tracking
+ data._var_renames = self._var_renames
data.overrides = None
data.overridevars = copy.copy(self.overridevars)
@@ -927,7 +1019,7 @@ class DataSmart(MutableMapping):
value = self.getVar(variable, False)
for key in keys:
referrervalue = self.getVar(key, False)
- if referrervalue and ref in referrervalue:
+ if referrervalue and isinstance(referrervalue, str) and ref in referrervalue:
self.setVar(key, referrervalue.replace(ref, value))
def localkeys(self):
@@ -962,8 +1054,8 @@ class DataSmart(MutableMapping):
for (r, o) in self.overridedata[var]:
if o in self.overridesset:
overrides.add(var)
- elif "_" in o:
- if set(o.split("_")).issubset(self.overridesset):
+ elif ":" in o:
+ if set(o.split(":")).issubset(self.overridesset):
overrides.add(var)
for k in keylist(self.dict):
@@ -993,10 +1085,10 @@ class DataSmart(MutableMapping):
d = self.createCopy()
bb.data.expandKeys(d)
- config_whitelist = set((d.getVar("BB_HASHCONFIG_WHITELIST") or "").split())
+ config_ignore_vars = set((d.getVar("BB_HASHCONFIG_IGNORE_VARS") or "").split())
keys = set(key for key in iter(d) if not key.startswith("__"))
for key in keys:
- if key in config_whitelist:
+ if key in config_ignore_vars:
continue
value = d.getVar(key, False) or ""
diff --git a/lib/bb/event.py b/lib/bb/event.py
index 0454c7533..4761c8688 100644
--- a/lib/bb/event.py
+++ b/lib/bb/event.py
@@ -40,7 +40,7 @@ class HeartbeatEvent(Event):
"""Triggered at regular time intervals of 10 seconds. Other events can fire much more often
(runQueueTaskStarted when there are many short tasks) or not at all for long periods
of time (again runQueueTaskStarted, when there is just one long-running task), so this
- event is more suitable for doing some task-independent work occassionally."""
+ event is more suitable for doing some task-independent work occasionally."""
def __init__(self, time):
Event.__init__(self)
self.time = time
@@ -68,29 +68,39 @@ _catchall_handlers = {}
_eventfilter = None
_uiready = False
_thread_lock = threading.Lock()
-_thread_lock_enabled = False
-
-if hasattr(__builtins__, '__setitem__'):
- builtins = __builtins__
-else:
- builtins = __builtins__.__dict__
+_heartbeat_enabled = False
+_should_exit = threading.Event()
def enable_threadlock():
- global _thread_lock_enabled
- _thread_lock_enabled = True
+ # Always needed now
+ return
def disable_threadlock():
- global _thread_lock_enabled
- _thread_lock_enabled = False
+ # Always needed now
+ return
+
+def enable_heartbeat():
+ global _heartbeat_enabled
+ _heartbeat_enabled = True
+
+def disable_heartbeat():
+ global _heartbeat_enabled
+ _heartbeat_enabled = False
+
+#
+# In long running code, this function should be called periodically
+# to check if we should exit due to an interuption (.e.g Ctrl+C from the UI)
+#
+def check_for_interrupts(d):
+ global _should_exit
+ if _should_exit.is_set():
+ bb.warn("Exiting due to interrupt.")
+ raise bb.BBHandledException()
def execute_handler(name, handler, event, d):
event.data = d
- addedd = False
- if 'd' not in builtins:
- builtins['d'] = d
- addedd = True
try:
- ret = handler(event)
+ ret = handler(event, d)
except (bb.parse.SkipRecipe, bb.BBHandledException):
raise
except Exception:
@@ -104,8 +114,7 @@ def execute_handler(name, handler, event, d):
raise
finally:
del event.data
- if addedd:
- del builtins['d']
+
def fire_class_handlers(event, d):
if isinstance(event, logging.LogRecord):
@@ -132,8 +141,14 @@ def print_ui_queue():
if not _uiready:
from bb.msg import BBLogFormatter
# Flush any existing buffered content
- sys.stdout.flush()
- sys.stderr.flush()
+ try:
+ sys.stdout.flush()
+ except:
+ pass
+ try:
+ sys.stderr.flush()
+ except:
+ pass
stdout = logging.StreamHandler(sys.stdout)
stderr = logging.StreamHandler(sys.stderr)
formatter = BBLogFormatter("%(levelname)s: %(message)s")
@@ -174,36 +189,30 @@ def print_ui_queue():
def fire_ui_handlers(event, d):
global _thread_lock
- global _thread_lock_enabled
if not _uiready:
# No UI handlers registered yet, queue up the messages
ui_queue.append(event)
return
- if _thread_lock_enabled:
- _thread_lock.acquire()
-
- errors = []
- for h in _ui_handlers:
- #print "Sending event %s" % event
- try:
- if not _ui_logfilters[h].filter(event):
- continue
- # We use pickle here since it better handles object instances
- # which xmlrpc's marshaller does not. Events *must* be serializable
- # by pickle.
- if hasattr(_ui_handlers[h].event, "sendpickle"):
- _ui_handlers[h].event.sendpickle((pickle.dumps(event)))
- else:
- _ui_handlers[h].event.send(event)
- except:
- errors.append(h)
- for h in errors:
- del _ui_handlers[h]
-
- if _thread_lock_enabled:
- _thread_lock.release()
+ with bb.utils.lock_timeout(_thread_lock):
+ errors = []
+ for h in _ui_handlers:
+ #print "Sending event %s" % event
+ try:
+ if not _ui_logfilters[h].filter(event):
+ continue
+ # We use pickle here since it better handles object instances
+ # which xmlrpc's marshaller does not. Events *must* be serializable
+ # by pickle.
+ if hasattr(_ui_handlers[h].event, "sendpickle"):
+ _ui_handlers[h].event.sendpickle((pickle.dumps(event)))
+ else:
+ _ui_handlers[h].event.send(event)
+ except:
+ errors.append(h)
+ for h in errors:
+ del _ui_handlers[h]
def fire(event, d):
"""Fire off an Event"""
@@ -247,15 +256,16 @@ def register(name, handler, mask=None, filename=None, lineno=None, data=None):
if handler is not None:
# handle string containing python code
if isinstance(handler, str):
- tmp = "def %s(e):\n%s" % (name, handler)
+ tmp = "def %s(e, d):\n%s" % (name, handler)
+ # Inject empty lines to make code match lineno in filename
+ if lineno is not None:
+ tmp = "\n" * (lineno-1) + tmp
try:
code = bb.methodpool.compile_cache(tmp)
if not code:
if filename is None:
- filename = "%s(e)" % name
+ filename = "%s(e, d)" % name
code = compile(tmp, filename, "exec", ast.PyCF_ONLY_AST)
- if lineno is not None:
- ast.increment_lineno(code, lineno-1)
code = compile(code, filename, "exec")
bb.methodpool.compile_cache_add(tmp, code)
except SyntaxError:
@@ -317,21 +327,23 @@ def set_eventfilter(func):
_eventfilter = func
def register_UIHhandler(handler, mainui=False):
- bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
- _ui_handlers[_ui_handler_seq] = handler
- level, debug_domains = bb.msg.constructLogOptions()
- _ui_logfilters[_ui_handler_seq] = UIEventFilter(level, debug_domains)
- if mainui:
- global _uiready
- _uiready = _ui_handler_seq
- return _ui_handler_seq
+ with bb.utils.lock_timeout(_thread_lock):
+ bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
+ _ui_handlers[_ui_handler_seq] = handler
+ level, debug_domains = bb.msg.constructLogOptions()
+ _ui_logfilters[_ui_handler_seq] = UIEventFilter(level, debug_domains)
+ if mainui:
+ global _uiready
+ _uiready = _ui_handler_seq
+ return _ui_handler_seq
def unregister_UIHhandler(handlerNum, mainui=False):
if mainui:
global _uiready
_uiready = False
- if handlerNum in _ui_handlers:
- del _ui_handlers[handlerNum]
+ with bb.utils.lock_timeout(_thread_lock):
+ if handlerNum in _ui_handlers:
+ del _ui_handlers[handlerNum]
return
def get_uihandler():
@@ -486,7 +498,7 @@ class BuildCompleted(BuildBase, OperationCompleted):
BuildBase.__init__(self, n, p, failures)
class DiskFull(Event):
- """Disk full case build aborted"""
+ """Disk full case build halted"""
def __init__(self, dev, type, freespace, mountpoint):
Event.__init__(self)
self._dev = dev
@@ -764,7 +776,7 @@ class LogHandler(logging.Handler):
class MetadataEvent(Event):
"""
Generic event that target for OE-Core classes
- to report information during asynchrous execution
+ to report information during asynchronous execution
"""
def __init__(self, eventtype, eventdata):
Event.__init__(self)
@@ -845,3 +857,19 @@ class FindSigInfoResult(Event):
def __init__(self, result):
Event.__init__(self)
self.result = result
+
+class GetTaskSignatureResult(Event):
+ """
+ Event to return results from GetTaskSignatures command
+ """
+ def __init__(self, sig):
+ Event.__init__(self)
+ self.sig = sig
+
+class ParseError(Event):
+ """
+ Event to indicate parse failed
+ """
+ def __init__(self, msg):
+ super().__init__()
+ self._msg = msg
diff --git a/lib/bb/exceptions.py b/lib/bb/exceptions.py
index ecbad5997..801db9c82 100644
--- a/lib/bb/exceptions.py
+++ b/lib/bb/exceptions.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/lib/bb/fetch2/README b/lib/bb/fetch2/README
new file mode 100644
index 000000000..67b787ef4
--- /dev/null
+++ b/lib/bb/fetch2/README
@@ -0,0 +1,57 @@
+There are expectations of users of the fetcher code. This file attempts to document
+some of the constraints that are present. Some are obvious, some are less so. It is
+documented in the context of how OE uses it but the API calls are generic.
+
+a) network access for sources is only expected to happen in the do_fetch task.
+ This is not enforced or tested but is required so that we can:
+
+ i) audit the sources used (i.e. for license/manifest reasons)
+ ii) support offline builds with a suitable cache
+ iii) allow work to continue even with downtime upstream
+ iv) allow for changes upstream in incompatible ways
+ v) allow rebuilding of the software in X years time
+
+b) network access is not expected in do_unpack task.
+
+c) you can take DL_DIR and use it as a mirror for offline builds.
+
+d) access to the network is only made when explicitly configured in recipes
+ (e.g. use of AUTOREV, or use of git tags which change revision).
+
+e) fetcher output is deterministic (i.e. if you fetch configuration XXX now it
+ will match in future exactly in a clean build with a new DL_DIR).
+ One specific pain point example are git tags. They can be replaced and change
+ so the git fetcher has to resolve them with the network. We use git revisions
+ where possible to avoid this and ensure determinism.
+
+f) network access is expected to work with the standard linux proxy variables
+ so that access behind firewalls works (the fetcher sets these in the
+ environment but only in the do_fetch tasks).
+
+g) access during parsing has to be minimal, a "git ls-remote" for an AUTOREV
+ git recipe might be ok but you can't expect to checkout a git tree.
+
+h) we need to provide revision information during parsing such that a version
+ for the recipe can be constructed.
+
+i) versions are expected to be able to increase in a way which sorts allowing
+ package feeds to operate (see PR server required for git revisions to sort).
+
+j) API to query for possible version upgrades of a url is highly desireable to
+ allow our automated upgrage code to function (it is implied this does always
+ have network access).
+
+k) Where fixes or changes to behaviour in the fetcher are made, we ask that
+ test cases are added (run with "bitbake-selftest bb.tests.fetch"). We do
+ have fairly extensive test coverage of the fetcher as it is the only way
+ to track all of its corner cases, it still doesn't give entire coverage
+ though sadly.
+
+l) If using tools during parse time, they will have to be in ASSUME_PROVIDED
+ in OE's context as we can't build git-native, then parse a recipe and use
+ git ls-remote.
+
+Not all fetchers support all features, autorev is optional and doesn't make
+sense for some. Upgrade detection means different things in different contexts
+too.
+
diff --git a/lib/bb/fetch2/__init__.py b/lib/bb/fetch2/__init__.py
index cf0201c49..5bf2c4b8c 100644
--- a/lib/bb/fetch2/__init__.py
+++ b/lib/bb/fetch2/__init__.py
@@ -113,7 +113,7 @@ class MissingParameterError(BBFetchException):
self.args = (missing, url)
class ParameterError(BBFetchException):
- """Exception raised when a url cannot be proccessed due to invalid parameters."""
+ """Exception raised when a url cannot be processed due to invalid parameters."""
def __init__(self, message, url):
msg = "URL: '%s' has invalid parameters. %s" % (url, message)
self.url = url
@@ -182,7 +182,7 @@ class URI(object):
Some notes about relative URIs: while it's specified that
a URI beginning with <scheme>:// should either be directly
followed by a hostname or a /, the old URI handling of the
- fetch2 library did not comform to this. Therefore, this URI
+ fetch2 library did not conform to this. Therefore, this URI
class has some kludges to make sure that URIs are parsed in
a way comforming to bitbake's current usage. This URI class
supports the following:
@@ -199,7 +199,7 @@ class URI(object):
file://hostname/absolute/path.diff (would be IETF compliant)
Note that the last case only applies to a list of
- "whitelisted" schemes (currently only file://), that requires
+ explicitly allowed schemes (currently only file://), that requires
its URIs to not have a network location.
"""
@@ -290,12 +290,12 @@ class URI(object):
def _param_str_split(self, string, elmdelim, kvdelim="="):
ret = collections.OrderedDict()
- for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim) if x]:
+ for k, v in [x.split(kvdelim, 1) if kvdelim in x else (x, None) for x in string.split(elmdelim) if x]:
ret[k] = v
return ret
def _param_str_join(self, dict_, elmdelim, kvdelim="="):
- return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()])
+ return elmdelim.join([kvdelim.join([k, v]) if v else k for k, v in dict_.items()])
@property
def hostport(self):
@@ -388,7 +388,7 @@ def decodeurl(url):
if s:
if not '=' in s:
raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
- s1, s2 = s.split('=')
+ s1, s2 = s.split('=', 1)
p[s1] = s2
return type, host, urllib.parse.unquote(path), user, pswd, p
@@ -402,24 +402,24 @@ def encodeurl(decoded):
if not type:
raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
- url = '%s://' % type
+ url = ['%s://' % type]
if user and type != "file":
- url += "%s" % user
+ url.append("%s" % user)
if pswd:
- url += ":%s" % pswd
- url += "@"
+ url.append(":%s" % pswd)
+ url.append("@")
if host and type != "file":
- url += "%s" % host
+ url.append("%s" % host)
if path:
# Standardise path to ensure comparisons work
while '//' in path:
path = path.replace("//", "/")
- url += "%s" % urllib.parse.quote(path)
+ url.append("%s" % urllib.parse.quote(path))
if p:
for parm in p:
- url += ";%s=%s" % (parm, p[parm])
+ url.append(";%s=%s" % (parm, p[parm]))
- return url
+ return "".join(url)
def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
if not ud.url or not uri_find or not uri_replace:
@@ -430,6 +430,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
uri_replace_decoded = list(decodeurl(uri_replace))
logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
result_decoded = ['', '', '', '', '', {}]
+ # 0 - type, 1 - host, 2 - path, 3 - user, 4- pswd, 5 - params
for loc, i in enumerate(uri_find_decoded):
result_decoded[loc] = uri_decoded[loc]
regexp = i
@@ -449,6 +450,9 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
for l in replacements:
uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
result_decoded[loc][k] = uri_replace_decoded[loc][k]
+ elif (loc == 3 or loc == 4) and uri_replace_decoded[loc]:
+ # User/password in the replacement is just a straight replacement
+ result_decoded[loc] = uri_replace_decoded[loc]
elif (re.match(regexp, uri_decoded[loc])):
if not uri_replace_decoded[loc]:
result_decoded[loc] = ""
@@ -465,10 +469,18 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
basename = os.path.basename(mirrortarball)
# Kill parameters, they make no sense for mirror tarballs
uri_decoded[5] = {}
+ uri_find_decoded[5] = {}
elif ud.localpath and ud.method.supports_checksum(ud):
basename = os.path.basename(ud.localpath)
- if basename and not result_decoded[loc].endswith(basename):
- result_decoded[loc] = os.path.join(result_decoded[loc], basename)
+ if basename:
+ uri_basename = os.path.basename(uri_decoded[loc])
+ # Prefix with a slash as a sentinel in case
+ # result_decoded[loc] does not contain one.
+ path = "/" + result_decoded[loc]
+ if uri_basename and basename != uri_basename and path.endswith("/" + uri_basename):
+ result_decoded[loc] = path[1:-len(uri_basename)] + basename
+ elif not path.endswith("/" + basename):
+ result_decoded[loc] = os.path.join(path[1:], basename)
else:
return None
result = encodeurl(result_decoded)
@@ -506,7 +518,7 @@ def fetcher_init(d):
else:
raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
- _checksum_cache.init_cache(d)
+ _checksum_cache.init_cache(d.getVar("BB_CACHEDIR"))
for m in methods:
if hasattr(m, "init"):
@@ -534,7 +546,7 @@ def mirror_from_string(data):
bb.warn('Invalid mirror data %s, should have paired members.' % data)
return list(zip(*[iter(mirrors)]*2))
-def verify_checksum(ud, d, precomputed={}):
+def verify_checksum(ud, d, precomputed={}, localpath=None, fatal_nochecksum=True):
"""
verify the MD5 and SHA256 checksum for downloaded src
@@ -548,20 +560,25 @@ def verify_checksum(ud, d, precomputed={}):
file against those in the recipe each time, rather than only after
downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
"""
-
if ud.ignore_checksums or not ud.method.supports_checksum(ud):
return {}
+ if localpath is None:
+ localpath = ud.localpath
+
def compute_checksum_info(checksum_id):
checksum_name = getattr(ud, "%s_name" % checksum_id)
if checksum_id in precomputed:
checksum_data = precomputed[checksum_id]
else:
- checksum_data = getattr(bb.utils, "%s_file" % checksum_id)(ud.localpath)
+ checksum_data = getattr(bb.utils, "%s_file" % checksum_id)(localpath)
checksum_expected = getattr(ud, "%s_expected" % checksum_id)
+ if checksum_expected == '':
+ checksum_expected = None
+
return {
"id": checksum_id,
"name": checksum_name,
@@ -581,17 +598,13 @@ def verify_checksum(ud, d, precomputed={}):
checksum_lines = ["SRC_URI[%s] = \"%s\"" % (ci["name"], ci["data"])]
# If no checksum has been provided
- if ud.method.recommends_checksum(ud) and all(ci["expected"] is None for ci in checksum_infos):
+ if fatal_nochecksum and ud.method.recommends_checksum(ud) and all(ci["expected"] is None for ci in checksum_infos):
messages = []
strict = d.getVar("BB_STRICT_CHECKSUM") or "0"
# If strict checking enabled and neither sum defined, raise error
if strict == "1":
- messages.append("No checksum specified for '%s', please add at " \
- "least one to the recipe:" % ud.localpath)
- messages.extend(checksum_lines)
- logger.error("\n".join(messages))
- raise NoChecksumError("Missing SRC_URI checksum", ud.url)
+ raise NoChecksumError("\n".join(checksum_lines))
bb.event.fire(MissingChecksumEvent(ud.url, **checksum_event), d)
@@ -612,8 +625,8 @@ def verify_checksum(ud, d, precomputed={}):
for ci in checksum_infos:
if ci["expected"] and ci["expected"] != ci["data"]:
- messages.append("File: '%s' has %s checksum %s when %s was " \
- "expected" % (ud.localpath, ci["id"], ci["data"], ci["expected"]))
+ messages.append("File: '%s' has %s checksum '%s' when '%s' was " \
+ "expected" % (localpath, ci["id"], ci["data"], ci["expected"]))
bad_checksum = ci["data"]
if bad_checksum:
@@ -731,13 +744,16 @@ def subprocess_setup():
# SIGPIPE errors are known issues with gzip/bash
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
-def get_autorev(d):
- # only not cache src rev in autorev case
+def mark_recipe_nocache(d):
if d.getVar('BB_SRCREV_POLICY') != "cache":
d.setVar('BB_DONT_CACHE', '1')
+
+def get_autorev(d):
+ mark_recipe_nocache(d)
+ d.setVar("__BBAUTOREV_SEEN", True)
return "AUTOINC"
-def get_srcrev(d, method_name='sortable_revision'):
+def _get_srcrev(d, method_name='sortable_revision'):
"""
Return the revision string, usually for use in the version string (PV) of the current package
Most packages usually only have one SCM so we just pass on the call.
@@ -751,23 +767,34 @@ def get_srcrev(d, method_name='sortable_revision'):
that fetcher provides a method with the given name and the same signature as sortable_revision.
"""
+ d.setVar("__BBSRCREV_SEEN", "1")
+ recursion = d.getVar("__BBINSRCREV")
+ if recursion:
+ raise FetchError("There are recursive references in fetcher variables, likely through SRC_URI")
+ d.setVar("__BBINSRCREV", True)
+
scms = []
+ revs = []
fetcher = Fetch(d.getVar('SRC_URI').split(), d)
urldata = fetcher.ud
for u in urldata:
if urldata[u].method.supports_srcrev():
scms.append(u)
- if len(scms) == 0:
- raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
+ if not scms:
+ d.delVar("__BBINSRCREV")
+ return "", revs
+
if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0])
+ revs.append(rev)
if len(rev) > 10:
rev = rev[:10]
+ d.delVar("__BBINSRCREV")
if autoinc:
- return "AUTOINC+" + rev
- return rev
+ return "AUTOINC+" + rev, revs
+ return rev, revs
#
# Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
@@ -783,6 +810,7 @@ def get_srcrev(d, method_name='sortable_revision'):
ud = urldata[scm]
for name in ud.names:
autoinc, rev = getattr(ud.method, method_name)(ud, d, name)
+ revs.append(rev)
seenautoinc = seenautoinc or autoinc
if len(rev) > 10:
rev = rev[:10]
@@ -799,12 +827,70 @@ def get_srcrev(d, method_name='sortable_revision'):
if seenautoinc:
format = "AUTOINC+" + format
- return format
+ d.delVar("__BBINSRCREV")
+ return format, revs
+
+def get_hashvalue(d, method_name='sortable_revision'):
+ pkgv, revs = _get_srcrev(d, method_name=method_name)
+ return " ".join(revs)
+
+def get_pkgv_string(d, method_name='sortable_revision'):
+ pkgv, revs = _get_srcrev(d, method_name=method_name)
+ return pkgv
+
+def get_srcrev(d, method_name='sortable_revision'):
+ pkgv, revs = _get_srcrev(d, method_name=method_name)
+ if not pkgv:
+ raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
+ return pkgv
def localpath(url, d):
fetcher = bb.fetch2.Fetch([url], d)
return fetcher.localpath(url)
+# Need to export PATH as binary could be in metadata paths
+# rather than host provided
+# Also include some other variables.
+FETCH_EXPORT_VARS = ['HOME', 'PATH',
+ 'HTTP_PROXY', 'http_proxy',
+ 'HTTPS_PROXY', 'https_proxy',
+ 'FTP_PROXY', 'ftp_proxy',
+ 'FTPS_PROXY', 'ftps_proxy',
+ 'NO_PROXY', 'no_proxy',
+ 'ALL_PROXY', 'all_proxy',
+ 'GIT_PROXY_COMMAND',
+ 'GIT_SSH',
+ 'GIT_SSH_COMMAND',
+ 'GIT_SSL_CAINFO',
+ 'GIT_SMART_HTTP',
+ 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
+ 'SOCKS5_USER', 'SOCKS5_PASSWD',
+ 'DBUS_SESSION_BUS_ADDRESS',
+ 'P4CONFIG',
+ 'SSL_CERT_FILE',
+ 'NODE_EXTRA_CA_CERTS',
+ 'AWS_PROFILE',
+ 'AWS_ACCESS_KEY_ID',
+ 'AWS_SECRET_ACCESS_KEY',
+ 'AWS_ROLE_ARN',
+ 'AWS_WEB_IDENTITY_TOKEN_FILE',
+ 'AWS_DEFAULT_REGION',
+ 'AWS_SESSION_TOKEN',
+ 'GIT_CACHE_PATH',
+ 'REMOTE_CONTAINERS_IPC',
+ 'SSL_CERT_DIR']
+
+def get_fetcher_environment(d):
+ newenv = {}
+ origenv = d.getVar("BB_ORIGENV")
+ for name in bb.fetch2.FETCH_EXPORT_VARS:
+ value = d.getVar(name)
+ if not value and origenv:
+ value = origenv.getVar(name)
+ if value:
+ newenv[name] = value
+ return newenv
+
def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
"""
Run cmd returning the command output
@@ -813,25 +899,7 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
Optionally remove the files/directories listed in cleanup upon failure
"""
- # Need to export PATH as binary could be in metadata paths
- # rather than host provided
- # Also include some other variables.
- # FIXME: Should really include all export varaiables?
- exportvars = ['HOME', 'PATH',
- 'HTTP_PROXY', 'http_proxy',
- 'HTTPS_PROXY', 'https_proxy',
- 'FTP_PROXY', 'ftp_proxy',
- 'FTPS_PROXY', 'ftps_proxy',
- 'NO_PROXY', 'no_proxy',
- 'ALL_PROXY', 'all_proxy',
- 'GIT_PROXY_COMMAND',
- 'GIT_SSH',
- 'GIT_SSL_CAINFO',
- 'GIT_SMART_HTTP',
- 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
- 'SOCKS5_USER', 'SOCKS5_PASSWD',
- 'DBUS_SESSION_BUS_ADDRESS',
- 'P4CONFIG']
+ exportvars = FETCH_EXPORT_VARS
if not cleanup:
cleanup = []
@@ -868,14 +936,17 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
(output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir)
success = True
except bb.process.NotFoundError as e:
- error_message = "Fetch command %s" % (e.command)
+ error_message = "Fetch command %s not found" % (e.command)
except bb.process.ExecutionError as e:
if e.stdout:
output = "output:\n%s\n%s" % (e.stdout, e.stderr)
elif e.stderr:
output = "output:\n%s" % e.stderr
else:
- output = "no output"
+ if log:
+ output = "see logfile for output"
+ else:
+ output = "no output"
error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output)
except bb.process.CmdError as e:
error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
@@ -937,6 +1008,7 @@ def build_mirroruris(origud, mirrors, ld):
try:
newud = FetchData(newuri, ld)
+ newud.ignore_checksums = True
newud.setup_localpath(ld)
except bb.fetch2.BBFetchException as e:
logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
@@ -1046,7 +1118,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
logger.debug(str(e))
try:
- ud.method.clean(ud, ld)
+ if ud.method.cleanup_upon_failure():
+ ud.method.clean(ud, ld)
except UnboundLocalError:
pass
return False
@@ -1057,6 +1130,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
def ensure_symlink(target, link_name):
if not os.path.exists(link_name):
+ dirname = os.path.dirname(link_name)
+ bb.utils.mkdirhier(dirname)
if os.path.islink(link_name):
# Broken symbolic link
os.unlink(link_name)
@@ -1140,11 +1215,11 @@ def srcrev_internal_helper(ud, d, name):
pn = d.getVar("PN")
attempts = []
if name != '' and pn:
- attempts.append("SRCREV_%s_pn-%s" % (name, pn))
+ attempts.append("SRCREV_%s:pn-%s" % (name, pn))
if name != '':
attempts.append("SRCREV_%s" % name)
if pn:
- attempts.append("SRCREV_pn-%s" % pn)
+ attempts.append("SRCREV:pn-%s" % pn)
attempts.append("SRCREV")
for a in attempts:
@@ -1169,6 +1244,7 @@ def srcrev_internal_helper(ud, d, name):
if srcrev == "INVALID" or not srcrev:
raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
if srcrev == "AUTOINC":
+ d.setVar("__BBAUTOREV_ACTED_UPON", True)
srcrev = ud.method.latest_revision(ud, d, name)
return srcrev
@@ -1180,23 +1256,21 @@ def get_checksum_file_list(d):
SRC_URI as a space-separated string
"""
fetch = Fetch([], d, cache = False, localonly = True)
-
- dl_dir = d.getVar('DL_DIR')
filelist = []
for u in fetch.urls:
ud = fetch.ud[u]
-
if ud and isinstance(ud.method, local.Local):
- paths = ud.method.localpaths(ud, d)
+ found = False
+ paths = ud.method.localfile_searchpaths(ud, d)
for f in paths:
pth = ud.decodedurl
- if f.startswith(dl_dir):
- # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
- if os.path.exists(f):
- bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN'), os.path.basename(f)))
- else:
- bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN'), os.path.basename(f)))
+ if os.path.exists(f):
+ found = True
filelist.append(f + ":" + str(os.path.exists(f)))
+ if not found:
+ bb.fatal(("Unable to get checksum for %s SRC_URI entry %s: file could not be found"
+ "\nThe following paths were searched:"
+ "\n%s") % (d.getVar('PN'), os.path.basename(f), '\n'.join(paths)))
return " ".join(filelist)
@@ -1243,18 +1317,13 @@ class FetchData(object):
if checksum_name in self.parm:
checksum_expected = self.parm[checksum_name]
- elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az"]:
+ elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az", "crate", "gs"]:
checksum_expected = None
else:
checksum_expected = d.getVarFlag("SRC_URI", checksum_name)
setattr(self, "%s_expected" % checksum_id, checksum_expected)
- for checksum_id in CHECKSUM_LIST:
- configure_checksum(checksum_id)
-
- self.ignore_checksums = False
-
self.names = self.parm.get("name",'default').split(',')
self.method = None
@@ -1276,6 +1345,11 @@ class FetchData(object):
if hasattr(self.method, "urldata_init"):
self.method.urldata_init(self, d)
+ for checksum_id in CHECKSUM_LIST:
+ configure_checksum(checksum_id)
+
+ self.ignore_checksums = False
+
if "localpath" in self.parm:
# if user sets localpath for file, use it instead.
self.localpath = self.parm["localpath"]
@@ -1355,6 +1429,9 @@ class FetchMethod(object):
Is localpath something that can be represented by a checksum?
"""
+ # We cannot compute checksums for None
+ if urldata.localpath is None:
+ return False
# We cannot compute checksums for directories
if os.path.isdir(urldata.localpath):
return False
@@ -1367,6 +1444,12 @@ class FetchMethod(object):
"""
return False
+ def cleanup_upon_failure(self):
+ """
+ When a fetch fails, should clean() be called?
+ """
+ return True
+
def verify_donestamp(self, ud, d):
"""
Verify the donestamp file
@@ -1434,30 +1517,33 @@ class FetchMethod(object):
cmd = None
if unpack:
+ tar_cmd = 'tar --extract --no-same-owner'
+ if 'striplevel' in urldata.parm:
+ tar_cmd += ' --strip-components=%s' % urldata.parm['striplevel']
if file.endswith('.tar'):
- cmd = 'tar x --no-same-owner -f %s' % file
+ cmd = '%s -f %s' % (tar_cmd, file)
elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
- cmd = 'tar xz --no-same-owner -f %s' % file
+ cmd = '%s -z -f %s' % (tar_cmd, file)
elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
- cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
+ cmd = 'bzip2 -dc %s | %s -f -' % (file, tar_cmd)
elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
cmd = 'gzip -dc %s > %s' % (file, efile)
elif file.endswith('.bz2'):
cmd = 'bzip2 -dc %s > %s' % (file, efile)
elif file.endswith('.txz') or file.endswith('.tar.xz'):
- cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
+ cmd = 'xz -dc %s | %s -f -' % (file, tar_cmd)
elif file.endswith('.xz'):
cmd = 'xz -dc %s > %s' % (file, efile)
elif file.endswith('.tar.lz'):
- cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file
+ cmd = 'lzip -dc %s | %s -f -' % (file, tar_cmd)
elif file.endswith('.lz'):
cmd = 'lzip -dc %s > %s' % (file, efile)
elif file.endswith('.tar.7z'):
- cmd = '7z x -so %s | tar x --no-same-owner -f -' % file
+ cmd = '7z x -so %s | %s -f -' % (file, tar_cmd)
elif file.endswith('.7z'):
cmd = '7za x -y %s 1>/dev/null' % file
elif file.endswith('.tzst') or file.endswith('.tar.zst'):
- cmd = 'zstd --decompress --stdout %s | tar x --no-same-owner -f -' % file
+ cmd = 'zstd --decompress --stdout %s | %s -f -' % (file, tar_cmd)
elif file.endswith('.zst'):
cmd = 'zstd --decompress --stdout %s > %s' % (file, efile)
elif file.endswith('.zip') or file.endswith('.jar'):
@@ -1490,7 +1576,7 @@ class FetchMethod(object):
raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url)
else:
raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url)
- cmd = 'ar x %s %s && tar --no-same-owner -xpf %s && rm %s' % (file, datafile, datafile, datafile)
+ cmd = 'ar x %s %s && %s -p -f %s && rm %s' % (file, datafile, tar_cmd, datafile, datafile)
# If 'subdir' param exists, create a dir and use it as destination for unpack cmd
if 'subdir' in urldata.parm:
@@ -1506,6 +1592,7 @@ class FetchMethod(object):
unpackdir = rootdir
if not unpack or not cmd:
+ urldata.unpack_tracer.unpack("file-copy", unpackdir)
# If file == dest, then avoid any copies, as we already put the file into dest!
dest = os.path.join(unpackdir, os.path.basename(file))
if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)):
@@ -1520,6 +1607,8 @@ class FetchMethod(object):
destdir = urlpath.rsplit("/", 1)[0] + '/'
bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir)
+ else:
+ urldata.unpack_tracer.unpack("archive-extract", unpackdir)
if not cmd:
return
@@ -1611,12 +1700,61 @@ class FetchMethod(object):
"""
return []
+
+class DummyUnpackTracer(object):
+ """
+ Abstract API definition for a class that traces unpacked source files back
+ to their respective upstream SRC_URI entries, for software composition
+ analysis, license compliance and detailed SBOM generation purposes.
+ User may load their own unpack tracer class (instead of the dummy
+ one) by setting the BB_UNPACK_TRACER_CLASS config parameter.
+ """
+ def start(self, unpackdir, urldata_dict, d):
+ """
+ Start tracing the core Fetch.unpack process, using an index to map
+ unpacked files to each SRC_URI entry.
+ This method is called by Fetch.unpack and it may receive nested calls by
+ gitsm and npmsw fetchers, that expand SRC_URI entries by adding implicit
+ URLs and by recursively calling Fetch.unpack from new (nested) Fetch
+ instances.
+ """
+ return
+ def start_url(self, url):
+ """Start tracing url unpack process.
+ This method is called by Fetch.unpack before the fetcher-specific unpack
+ method starts, and it may receive nested calls by gitsm and npmsw
+ fetchers.
+ """
+ return
+ def unpack(self, unpack_type, destdir):
+ """
+ Set unpack_type and destdir for current url.
+ This method is called by the fetcher-specific unpack method after url
+ tracing started.
+ """
+ return
+ def finish_url(self, url):
+ """Finish tracing url unpack process and update the file index.
+ This method is called by Fetch.unpack after the fetcher-specific unpack
+ method finished its job, and it may receive nested calls by gitsm
+ and npmsw fetchers.
+ """
+ return
+ def complete(self):
+ """
+ Finish tracing the Fetch.unpack process, and check if all nested
+ Fecth.unpack calls (if any) have been completed; if so, save collected
+ metadata.
+ """
+ return
+
+
class Fetch(object):
def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
if localonly and cache:
raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
- if len(urls) == 0:
+ if not urls:
urls = d.getVar("SRC_URI").split()
self.urls = urls
self.d = d
@@ -1631,10 +1769,30 @@ class Fetch(object):
if key in urldata_cache:
self.ud = urldata_cache[key]
+ # the unpack_tracer object needs to be made available to possible nested
+ # Fetch instances (when those are created by gitsm and npmsw fetchers)
+ # so we set it as a global variable
+ global unpack_tracer
+ try:
+ unpack_tracer
+ except NameError:
+ class_path = d.getVar("BB_UNPACK_TRACER_CLASS")
+ if class_path:
+ # use user-defined unpack tracer class
+ import importlib
+ module_name, _, class_name = class_path.rpartition(".")
+ module = importlib.import_module(module_name)
+ class_ = getattr(module, class_name)
+ unpack_tracer = class_()
+ else:
+ # fall back to the dummy/abstract class
+ unpack_tracer = DummyUnpackTracer()
+
for url in urls:
if url not in self.ud:
try:
self.ud[url] = FetchData(url, d, localonly)
+ self.ud[url].unpack_tracer = unpack_tracer
except NonLocalMethod:
if localonly:
self.ud[url] = None
@@ -1673,6 +1831,7 @@ class Fetch(object):
network = self.d.getVar("BB_NO_NETWORK")
premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY"))
+ checksum_missing_messages = []
for u in urls:
ud = self.ud[u]
ud.setup_localpath(self.d)
@@ -1684,7 +1843,6 @@ class Fetch(object):
try:
self.d.setVar("BB_NO_NETWORK", network)
-
if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
done = True
elif m.try_premirror(ud, self.d):
@@ -1705,7 +1863,9 @@ class Fetch(object):
self.d.setVar("BB_NO_NETWORK", "1")
firsterr = None
- verified_stamp = m.verify_donestamp(ud, self.d)
+ verified_stamp = False
+ if done:
+ verified_stamp = m.verify_donestamp(ud, self.d)
if not done and (not verified_stamp or m.need_update(ud, self.d)):
try:
if not trusted_network(self.d, ud.url):
@@ -1735,7 +1895,7 @@ class Fetch(object):
logger.debug(str(e))
firsterr = e
# Remove any incomplete fetch
- if not verified_stamp:
+ if not verified_stamp and m.cleanup_upon_failure():
m.clean(ud, self.d)
logger.debug("Trying MIRRORS")
mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
@@ -1754,17 +1914,28 @@ class Fetch(object):
raise ChecksumError("Stale Error Detected")
except BBFetchException as e:
- if isinstance(e, ChecksumError):
+ if isinstance(e, NoChecksumError):
+ (message, _) = e.args
+ checksum_missing_messages.append(message)
+ continue
+ elif isinstance(e, ChecksumError):
logger.error("Checksum failure fetching %s" % u)
raise
finally:
if ud.lockfile:
bb.utils.unlockfile(lf)
+ if checksum_missing_messages:
+ logger.error("Missing SRC_URI checksum, please add those to the recipe: \n%s", "\n".join(checksum_missing_messages))
+ raise BBFetchException("There was some missing checksums in the recipe")
def checkstatus(self, urls=None):
"""
- Check all urls exist upstream
+ Check all URLs exist upstream.
+
+ Returns None if the URLs exist, raises FetchError if the check wasn't
+ successful but there wasn't an error (such as file not found), and
+ raises other exceptions in error cases.
"""
if not urls:
@@ -1787,7 +1958,7 @@ class Fetch(object):
ret = m.try_mirrors(self, ud, self.d, mirrors, True)
if not ret:
- raise FetchError("URL %s doesn't work" % u, u)
+ raise FetchError("URL doesn't work", u)
def unpack(self, root, urls=None):
"""
@@ -1797,6 +1968,8 @@ class Fetch(object):
if not urls:
urls = self.urls
+ unpack_tracer.start(root, self.ud, self.d)
+
for u in urls:
ud = self.ud[u]
ud.setup_localpath(self.d)
@@ -1804,11 +1977,15 @@ class Fetch(object):
if ud.lockfile:
lf = bb.utils.lockfile(ud.lockfile)
+ unpack_tracer.start_url(u)
ud.method.unpack(ud, root, self.d)
+ unpack_tracer.finish_url(u)
if ud.lockfile:
bb.utils.unlockfile(lf)
+ unpack_tracer.complete()
+
def clean(self, urls=None):
"""
Clean files that the fetcher gets or places
@@ -1909,6 +2086,8 @@ from . import clearcase
from . import npm
from . import npmsw
from . import az
+from . import crate
+from . import gcp
methods.append(local.Local())
methods.append(wget.Wget())
@@ -1929,3 +2108,5 @@ methods.append(clearcase.ClearCase())
methods.append(npm.Npm())
methods.append(npmsw.NpmShrinkWrap())
methods.append(az.Az())
+methods.append(crate.Crate())
+methods.append(gcp.GCP())
diff --git a/lib/bb/fetch2/crate.py b/lib/bb/fetch2/crate.py
new file mode 100644
index 000000000..01d49435c
--- /dev/null
+++ b/lib/bb/fetch2/crate.py
@@ -0,0 +1,141 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementation for crates.io
+"""
+
+# Copyright (C) 2016 Doug Goldstein
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import hashlib
+import json
+import os
+import subprocess
+import bb
+from bb.fetch2 import logger, subprocess_setup, UnpackError
+from bb.fetch2.wget import Wget
+
+
+class Crate(Wget):
+
+ """Class to fetch crates via wget"""
+
+ def _cargo_bitbake_path(self, rootdir):
+ return os.path.join(rootdir, "cargo_home", "bitbake")
+
+ def supports(self, ud, d):
+ """
+ Check to see if a given url is for this fetcher
+ """
+ return ud.type in ['crate']
+
+ def recommends_checksum(self, urldata):
+ return True
+
+ def urldata_init(self, ud, d):
+ """
+ Sets up to download the respective crate from crates.io
+ """
+
+ if ud.type == 'crate':
+ self._crate_urldata_init(ud, d)
+
+ super(Crate, self).urldata_init(ud, d)
+
+ def _crate_urldata_init(self, ud, d):
+ """
+ Sets up the download for a crate
+ """
+
+ # URL syntax is: crate://NAME/VERSION
+ # break the URL apart by /
+ parts = ud.url.split('/')
+ if len(parts) < 5:
+ raise bb.fetch2.ParameterError("Invalid URL: Must be crate://HOST/NAME/VERSION", ud.url)
+
+ # version is expected to be the last token
+ # but ignore possible url parameters which will be used
+ # by the top fetcher class
+ version = parts[-1].split(";")[0]
+ # second to last field is name
+ name = parts[-2]
+ # host (this is to allow custom crate registries to be specified
+ host = '/'.join(parts[2:-2])
+
+ # if using upstream just fix it up nicely
+ if host == 'crates.io':
+ host = 'crates.io/api/v1/crates'
+
+ ud.url = "https://%s/%s/%s/download" % (host, name, version)
+ ud.parm['downloadfilename'] = "%s-%s.crate" % (name, version)
+ if 'name' not in ud.parm:
+ ud.parm['name'] = '%s-%s' % (name, version)
+
+ logger.debug2("Fetching %s to %s" % (ud.url, ud.parm['downloadfilename']))
+
+ def unpack(self, ud, rootdir, d):
+ """
+ Uses the crate to build the necessary paths for cargo to utilize it
+ """
+ if ud.type == 'crate':
+ return self._crate_unpack(ud, rootdir, d)
+ else:
+ super(Crate, self).unpack(ud, rootdir, d)
+
+ def _crate_unpack(self, ud, rootdir, d):
+ """
+ Unpacks a crate
+ """
+ thefile = ud.localpath
+
+ # possible metadata we need to write out
+ metadata = {}
+
+ # change to the rootdir to unpack but save the old working dir
+ save_cwd = os.getcwd()
+ os.chdir(rootdir)
+
+ bp = d.getVar('BP')
+ if bp == ud.parm.get('name'):
+ cmd = "tar -xz --no-same-owner -f %s" % thefile
+ ud.unpack_tracer.unpack("crate-extract", rootdir)
+ else:
+ cargo_bitbake = self._cargo_bitbake_path(rootdir)
+ ud.unpack_tracer.unpack("cargo-extract", cargo_bitbake)
+
+ cmd = "tar -xz --no-same-owner -f %s -C %s" % (thefile, cargo_bitbake)
+
+ # ensure we've got these paths made
+ bb.utils.mkdirhier(cargo_bitbake)
+
+ # generate metadata necessary
+ with open(thefile, 'rb') as f:
+ # get the SHA256 of the original tarball
+ tarhash = hashlib.sha256(f.read()).hexdigest()
+
+ metadata['files'] = {}
+ metadata['package'] = tarhash
+
+ path = d.getVar('PATH')
+ if path:
+ cmd = "PATH=\"%s\" %s" % (path, cmd)
+ bb.note("Unpacking %s to %s/" % (thefile, os.getcwd()))
+
+ ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
+
+ os.chdir(save_cwd)
+
+ if ret != 0:
+ raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), ud.url)
+
+ # if we have metadata to write out..
+ if len(metadata) > 0:
+ cratepath = os.path.splitext(os.path.basename(thefile))[0]
+ bbpath = self._cargo_bitbake_path(rootdir)
+ mdfile = '.cargo-checksum.json'
+ mdpath = os.path.join(bbpath, cratepath, mdfile)
+ with open(mdpath, "w") as f:
+ json.dump(metadata, f)
diff --git a/lib/bb/fetch2/gcp.py b/lib/bb/fetch2/gcp.py
new file mode 100644
index 000000000..f40ce2eaa
--- /dev/null
+++ b/lib/bb/fetch2/gcp.py
@@ -0,0 +1,101 @@
+"""
+BitBake 'Fetch' implementation for Google Cloup Platform Storage.
+
+Class for fetching files from Google Cloud Storage using the
+Google Cloud Storage Python Client. The GCS Python Client must
+be correctly installed, configured and authenticated prior to use.
+Additionally, gsutil must also be installed.
+
+"""
+
+# Copyright (C) 2023, Snap Inc.
+#
+# Based in part on bb.fetch2.s3:
+# Copyright (C) 2017 Andre McCurdy
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import bb
+import urllib.parse, urllib.error
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import logger
+
+class GCP(FetchMethod):
+ """
+ Class to fetch urls via GCP's Python API.
+ """
+ def __init__(self):
+ self.gcp_client = None
+
+ def supports(self, ud, d):
+ """
+ Check to see if a given url can be fetched with GCP.
+ """
+ return ud.type in ['gs']
+
+ def recommends_checksum(self, urldata):
+ return True
+
+ def urldata_init(self, ud, d):
+ if 'downloadfilename' in ud.parm:
+ ud.basename = ud.parm['downloadfilename']
+ else:
+ ud.basename = os.path.basename(ud.path)
+
+ ud.localfile = d.expand(urllib.parse.unquote(ud.basename))
+ ud.basecmd = "gsutil stat"
+
+ def get_gcp_client(self):
+ from google.cloud import storage
+ self.gcp_client = storage.Client(project=None)
+
+ def download(self, ud, d):
+ """
+ Fetch urls using the GCP API.
+ Assumes localpath was called first.
+ """
+ logger.debug2(f"Trying to download gs://{ud.host}{ud.path} to {ud.localpath}")
+ if self.gcp_client is None:
+ self.get_gcp_client()
+
+ bb.fetch2.check_network_access(d, ud.basecmd, f"gs://{ud.host}{ud.path}")
+ runfetchcmd("%s %s" % (ud.basecmd, f"gs://{ud.host}{ud.path}"), d)
+
+ # Path sometimes has leading slash, so strip it
+ path = ud.path.lstrip("/")
+ blob = self.gcp_client.bucket(ud.host).blob(path)
+ blob.download_to_filename(ud.localpath)
+
+ # Additional sanity checks copied from the wget class (although there
+ # are no known issues which mean these are required, treat the GCP API
+ # tool with a little healthy suspicion).
+ if not os.path.exists(ud.localpath):
+ raise FetchError(f"The GCP API returned success for gs://{ud.host}{ud.path} but {ud.localpath} doesn't exist?!")
+
+ if os.path.getsize(ud.localpath) == 0:
+ os.remove(ud.localpath)
+ raise FetchError(f"The downloaded file for gs://{ud.host}{ud.path} resulted in a zero size file?! Deleting and failing since this isn't right.")
+
+ return True
+
+ def checkstatus(self, fetch, ud, d):
+ """
+ Check the status of a URL.
+ """
+ logger.debug2(f"Checking status of gs://{ud.host}{ud.path}")
+ if self.gcp_client is None:
+ self.get_gcp_client()
+
+ bb.fetch2.check_network_access(d, ud.basecmd, f"gs://{ud.host}{ud.path}")
+ runfetchcmd("%s %s" % (ud.basecmd, f"gs://{ud.host}{ud.path}"), d)
+
+ # Path sometimes has leading slash, so strip it
+ path = ud.path.lstrip("/")
+ if self.gcp_client.bucket(ud.host).blob(path).exists() == False:
+ raise FetchError(f"The GCP API reported that gs://{ud.host}{ud.path} does not exist")
+ else:
+ return True
diff --git a/lib/bb/fetch2/git.py b/lib/bb/fetch2/git.py
index 5e65c83c6..c7ff769fd 100644
--- a/lib/bb/fetch2/git.py
+++ b/lib/bb/fetch2/git.py
@@ -44,13 +44,27 @@ Supported SRC_URI options are:
- nobranch
Don't check the SHA validation for branch. set this option for the recipe
- referring to commit which is valid in tag instead of branch.
+ referring to commit which is valid in any namespace (branch, tag, ...)
+ instead of branch.
The default is "0", set nobranch=1 if needed.
+- subpath
+ Limit the checkout to a specific subpath of the tree.
+ By default, checkout the whole tree, set subpath=<path> if needed
+
+- destsuffix
+ The name of the path in which to place the checkout.
+ By default, the path is git/, set destsuffix=<suffix> if needed
+
- usehead
For local git:// urls to use the current branch HEAD as the revision for use with
AUTOREV. Implies nobranch.
+- lfs
+ Enable the checkout to use LFS for large files. This will download all LFS files
+ in the download step, as the unpack step does not have network access.
+ The default is "1", set lfs=0 to skip.
+
"""
# Copyright (C) 2005 Richard Purdie
@@ -64,14 +78,20 @@ import fnmatch
import os
import re
import shlex
+import shutil
import subprocess
import tempfile
import bb
import bb.progress
+from contextlib import contextmanager
from bb.fetch2 import FetchMethod
from bb.fetch2 import runfetchcmd
from bb.fetch2 import logger
+from bb.fetch2 import trusted_network
+
+sha1_re = re.compile(r'^[0-9a-f]{40}$')
+slash_re = re.compile(r"/+")
class GitProgressHandler(bb.progress.LineFilterProgressHandler):
"""Extract progress information from git output"""
@@ -130,6 +150,9 @@ class Git(FetchMethod):
def supports_checksum(self, urldata):
return False
+ def cleanup_upon_failure(self):
+ return False
+
def urldata_init(self, ud, d):
"""
init git specific variable within url data
@@ -141,6 +164,11 @@ class Git(FetchMethod):
ud.proto = 'file'
else:
ud.proto = "git"
+ if ud.host == "github.com" and ud.proto == "git":
+ # github stopped supporting git protocol
+ # https://github.blog/2021-09-01-improving-git-protocol-security-github/#no-more-unauthenticated-git
+ ud.proto = "https"
+ bb.warn("URL: %s uses git protocol which is no longer supported by github. Please change to ;protocol=https in the url." % ud.url)
if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
@@ -164,7 +192,10 @@ class Git(FetchMethod):
ud.nocheckout = 1
ud.unresolvedrev = {}
- branches = ud.parm.get("branch", "master").split(',')
+ branches = ud.parm.get("branch", "").split(',')
+ if branches == [""] and not ud.nobranch:
+ bb.warn("URL: %s does not set any branch parameter. The future default branch used by tools and repositories is uncertain and we will therefore soon require this is set in all git urls." % ud.url)
+ branches = ["master"]
if len(branches) != len(ud.names):
raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
@@ -231,7 +262,7 @@ class Git(FetchMethod):
for name in ud.names:
ud.unresolvedrev[name] = 'HEAD'
- ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0"
+ ud.basecmd = d.getVar("FETCHCMD_git") or "git -c gc.autoDetach=false -c core.pager=cat -c safe.bareRepository=all"
write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0"
ud.write_tarballs = write_tarballs != "0" or ud.rebaseable
@@ -240,20 +271,20 @@ class Git(FetchMethod):
ud.setup_revisions(d)
for name in ud.names:
- # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
- if not ud.revisions[name] or len(ud.revisions[name]) != 40 or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]):
+ # Ensure any revision that doesn't look like a SHA-1 is translated into one
+ if not sha1_re.match(ud.revisions[name] or ''):
if ud.revisions[name]:
ud.unresolvedrev[name] = ud.revisions[name]
ud.revisions[name] = self.latest_revision(ud, d, name)
- gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.').replace(' ','_'))
+ gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.').replace(' ','_').replace('(', '_').replace(')', '_'))
if gitsrcname.startswith('.'):
gitsrcname = gitsrcname[1:]
- # for rebaseable git repo, it is necessary to keep mirror tar ball
- # per revision, so that even the revision disappears from the
+ # For a rebaseable git repo, it is necessary to keep a mirror tar ball
+ # per revision, so that even if the revision disappears from the
# upstream repo in the future, the mirror will remain intact and still
- # contains the revision
+ # contain the revision
if ud.rebaseable:
for name in ud.names:
gitsrcname = gitsrcname + '_' + ud.revisions[name]
@@ -297,7 +328,10 @@ class Git(FetchMethod):
return ud.clonedir
def need_update(self, ud, d):
- return self.clonedir_need_update(ud, d) or self.shallow_tarball_need_update(ud) or self.tarball_need_update(ud)
+ return self.clonedir_need_update(ud, d) \
+ or self.shallow_tarball_need_update(ud) \
+ or self.tarball_need_update(ud) \
+ or self.lfs_need_update(ud, d)
def clonedir_need_update(self, ud, d):
if not os.path.exists(ud.clonedir):
@@ -309,6 +343,15 @@ class Git(FetchMethod):
return True
return False
+ def lfs_need_update(self, ud, d):
+ if self.clonedir_need_update(ud, d):
+ return True
+
+ for name in ud.names:
+ if not self._lfs_objects_downloaded(ud, d, name, ud.clonedir):
+ return True
+ return False
+
def clonedir_need_shallow_revs(self, ud, d):
for rev in ud.shallow_revs:
try:
@@ -328,6 +371,16 @@ class Git(FetchMethod):
# is not possible
if bb.utils.to_boolean(d.getVar("BB_FETCH_PREMIRRORONLY")):
return True
+ # If the url is not in trusted network, that is, BB_NO_NETWORK is set to 0
+ # and BB_ALLOWED_NETWORKS does not contain the host that ud.url uses, then
+ # we need to try premirrors first as using upstream is destined to fail.
+ if not trusted_network(d, ud.url):
+ return True
+ # the following check is to ensure incremental fetch in downloads, this is
+ # because the premirror might be old and does not contain the new rev required,
+ # and this will cause a total removal and new clone. So if we can reach to
+ # network, we prefer upstream over premirror, though the premirror might contain
+ # the new rev.
if os.path.exists(ud.clonedir):
return False
return True
@@ -341,17 +394,54 @@ class Git(FetchMethod):
if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d):
ud.localpath = ud.fullshallow
return
- elif os.path.exists(ud.fullmirror) and not os.path.exists(ud.clonedir):
- bb.utils.mkdirhier(ud.clonedir)
- runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir)
-
+ elif os.path.exists(ud.fullmirror) and self.need_update(ud, d):
+ if not os.path.exists(ud.clonedir):
+ bb.utils.mkdirhier(ud.clonedir)
+ runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir)
+ else:
+ tmpdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
+ runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=tmpdir)
+ output = runfetchcmd("%s remote" % ud.basecmd, d, quiet=True, workdir=ud.clonedir)
+ if 'mirror' in output:
+ runfetchcmd("%s remote rm mirror" % ud.basecmd, d, workdir=ud.clonedir)
+ runfetchcmd("%s remote add --mirror=fetch mirror %s" % (ud.basecmd, tmpdir), d, workdir=ud.clonedir)
+ fetch_cmd = "LANG=C %s fetch -f --update-head-ok --progress mirror " % (ud.basecmd)
+ runfetchcmd(fetch_cmd, d, workdir=ud.clonedir)
repourl = self._get_repo_url(ud)
+ needs_clone = False
+ if os.path.exists(ud.clonedir):
+ # The directory may exist, but not be the top level of a bare git
+ # repository in which case it needs to be deleted and re-cloned.
+ try:
+ # Since clones can be bare, use --absolute-git-dir instead of --show-toplevel
+ output = runfetchcmd("LANG=C %s rev-parse --absolute-git-dir" % ud.basecmd, d, workdir=ud.clonedir)
+ toplevel = output.rstrip()
+
+ if not bb.utils.path_is_descendant(toplevel, ud.clonedir):
+ logger.warning("Top level directory '%s' is not a descendant of '%s'. Re-cloning", toplevel, ud.clonedir)
+ needs_clone = True
+ except bb.fetch2.FetchError as e:
+ logger.warning("Unable to get top level for %s (not a git directory?): %s", ud.clonedir, e)
+ needs_clone = True
+ except FileNotFoundError as e:
+ logger.warning("%s", e)
+ needs_clone = True
+
+ if needs_clone:
+ shutil.rmtree(ud.clonedir)
+ else:
+ needs_clone = True
+
# If the repo still doesn't exist, fallback to cloning it
- if not os.path.exists(ud.clonedir):
- # We do this since git will use a "-l" option automatically for local urls where possible
+ if needs_clone:
+ # We do this since git will use a "-l" option automatically for local urls where possible,
+ # but it doesn't work when git/objects is a symlink, only works when it is a directory.
if repourl.startswith("file://"):
- repourl = repourl[7:]
+ repourl_path = repourl[7:]
+ objects = os.path.join(repourl_path, 'objects')
+ if os.path.isdir(objects) and not os.path.islink(objects):
+ repourl = repourl_path
clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, shlex.quote(repourl), ud.clonedir)
if ud.proto.lower() != 'file':
bb.fetch2.check_network_access(d, clone_cmd, ud.url)
@@ -365,7 +455,11 @@ class Git(FetchMethod):
runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir)
runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=ud.clonedir)
- fetch_cmd = "LANG=C %s fetch -f --progress %s refs/*:refs/*" % (ud.basecmd, shlex.quote(repourl))
+
+ if ud.nobranch:
+ fetch_cmd = "LANG=C %s fetch -f --progress %s refs/*:refs/*" % (ud.basecmd, shlex.quote(repourl))
+ else:
+ fetch_cmd = "LANG=C %s fetch -f --progress %s refs/heads/*:refs/heads/* refs/tags/*:refs/tags/*" % (ud.basecmd, shlex.quote(repourl))
if ud.proto.lower() != 'file':
bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
progresshandler = GitProgressHandler(d)
@@ -388,15 +482,14 @@ class Git(FetchMethod):
if missing_rev:
raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev)
- if self._contains_lfs(ud, d, ud.clonedir) and self._need_lfs(ud):
+ if self.lfs_need_update(ud, d):
# Unpack temporary working copy, use it to run 'git checkout' to force pre-fetching
- # of all LFS blobs needed at the the srcrev.
+ # of all LFS blobs needed at the srcrev.
#
# It would be nice to just do this inline here by running 'git-lfs fetch'
# on the bare clonedir, but that operation requires a working copy on some
# releases of Git LFS.
- tmpdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
- try:
+ with tempfile.TemporaryDirectory(dir=d.getVar('DL_DIR')) as tmpdir:
# Do the checkout. This implicitly involves a Git LFS fetch.
Git.unpack(self, ud, tmpdir, d)
@@ -412,12 +505,24 @@ class Git(FetchMethod):
# Only do this if the unpack resulted in a .git/lfs directory being
# created; this only happens if at least one blob needed to be
# downloaded.
- if os.path.exists(os.path.join(tmpdir, "git", ".git", "lfs")):
- runfetchcmd("tar -cf - lfs | tar -xf - -C %s" % ud.clonedir, d, workdir="%s/git/.git" % tmpdir)
- finally:
- bb.utils.remove(tmpdir, recurse=True)
+ if os.path.exists(os.path.join(ud.destdir, ".git", "lfs")):
+ runfetchcmd("tar -cf - lfs | tar -xf - -C %s" % ud.clonedir, d, workdir="%s/.git" % ud.destdir)
def build_mirror_data(self, ud, d):
+
+ # Create as a temp file and move atomically into position to avoid races
+ @contextmanager
+ def create_atomic(filename):
+ fd, tfile = tempfile.mkstemp(dir=os.path.dirname(filename))
+ try:
+ yield tfile
+ umask = os.umask(0o666)
+ os.umask(umask)
+ os.chmod(tfile, (0o666 & ~umask))
+ os.rename(tfile, filename)
+ finally:
+ os.close(fd)
+
if ud.shallow and ud.write_shallow_tarballs:
if not os.path.exists(ud.fullshallow):
if os.path.islink(ud.fullshallow):
@@ -428,7 +533,8 @@ class Git(FetchMethod):
self.clone_shallow_local(ud, shallowclone, d)
logger.info("Creating tarball of git repository")
- runfetchcmd("tar -czf %s ." % ud.fullshallow, d, workdir=shallowclone)
+ with create_atomic(ud.fullshallow) as tfile:
+ runfetchcmd("tar -czf %s ." % tfile, d, workdir=shallowclone)
runfetchcmd("touch %s.done" % ud.fullshallow, d)
finally:
bb.utils.remove(tempdir, recurse=True)
@@ -437,7 +543,11 @@ class Git(FetchMethod):
os.unlink(ud.fullmirror)
logger.info("Creating tarball of git repository")
- runfetchcmd("tar -czf %s ." % ud.fullmirror, d, workdir=ud.clonedir)
+ with create_atomic(ud.fullmirror) as tfile:
+ mtime = runfetchcmd("{} log --all -1 --format=%cD".format(ud.basecmd), d,
+ quiet=True, workdir=ud.clonedir)
+ runfetchcmd("tar -czf %s --owner oe:0 --group oe:0 --mtime \"%s\" ."
+ % (tfile, mtime), d, workdir=ud.clonedir)
runfetchcmd("touch %s.done" % ud.fullmirror, d)
def clone_shallow_local(self, ud, dest, d):
@@ -499,18 +609,31 @@ class Git(FetchMethod):
def unpack(self, ud, destdir, d):
""" unpack the downloaded src to destdir"""
- subdir = ud.parm.get("subpath", "")
- if subdir != "":
- readpathspec = ":%s" % subdir
- def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/'))
- else:
- readpathspec = ""
- def_destsuffix = "git/"
+ subdir = ud.parm.get("subdir")
+ subpath = ud.parm.get("subpath")
+ readpathspec = ""
+ def_destsuffix = "git/"
+
+ if subpath:
+ readpathspec = ":%s" % subpath
+ def_destsuffix = "%s/" % os.path.basename(subpath.rstrip('/'))
+
+ if subdir:
+ # If 'subdir' param exists, create a dir and use it as destination for unpack cmd
+ if os.path.isabs(subdir):
+ if not os.path.realpath(subdir).startswith(os.path.realpath(destdir)):
+ raise bb.fetch2.UnpackError("subdir argument isn't a subdirectory of unpack root %s" % destdir, ud.url)
+ destdir = subdir
+ else:
+ destdir = os.path.join(destdir, subdir)
+ def_destsuffix = ""
destsuffix = ud.parm.get("destsuffix", def_destsuffix)
destdir = ud.destdir = os.path.join(destdir, destsuffix)
if os.path.exists(destdir):
bb.utils.prunedir(destdir)
+ if not ud.bareclone:
+ ud.unpack_tracer.unpack("git", destdir)
need_lfs = self._need_lfs(ud)
@@ -520,13 +643,12 @@ class Git(FetchMethod):
source_found = False
source_error = []
- if not source_found:
- clonedir_is_up_to_date = not self.clonedir_need_update(ud, d)
- if clonedir_is_up_to_date:
- runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d)
- source_found = True
- else:
- source_error.append("clone directory not available or not up to date: " + ud.clonedir)
+ clonedir_is_up_to_date = not self.clonedir_need_update(ud, d)
+ if clonedir_is_up_to_date:
+ runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d)
+ source_found = True
+ else:
+ source_error.append("clone directory not available or not up to date: " + ud.clonedir)
if not source_found:
if ud.shallow:
@@ -550,9 +672,11 @@ class Git(FetchMethod):
raise bb.fetch2.FetchError("Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 to ignore it)" % (repourl))
elif not need_lfs:
bb.note("Repository %s has LFS content but it is not being fetched" % (repourl))
+ else:
+ runfetchcmd("%s lfs install --local" % ud.basecmd, d, workdir=destdir)
if not ud.nocheckout:
- if subdir != "":
+ if subpath:
runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d,
workdir=destdir)
runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir)
@@ -601,6 +725,35 @@ class Git(FetchMethod):
raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
return output.split()[0] != "0"
+ def _lfs_objects_downloaded(self, ud, d, name, wd):
+ """
+ Verifies whether the LFS objects for requested revisions have already been downloaded
+ """
+ # Bail out early if this repository doesn't use LFS
+ if not self._need_lfs(ud) or not self._contains_lfs(ud, d, wd):
+ return True
+
+ # The Git LFS specification specifies ([1]) the LFS folder layout so it should be safe to check for file
+ # existence.
+ # [1] https://github.com/git-lfs/git-lfs/blob/main/docs/spec.md#intercepting-git
+ cmd = "%s lfs ls-files -l %s" \
+ % (ud.basecmd, ud.revisions[name])
+ output = runfetchcmd(cmd, d, quiet=True, workdir=wd).rstrip()
+ # Do not do any further matching if no objects are managed by LFS
+ if not output:
+ return True
+
+ # Match all lines beginning with the hexadecimal OID
+ oid_regex = re.compile("^(([a-fA-F0-9]{2})([a-fA-F0-9]{2})[A-Fa-f0-9]+)")
+ for line in output.split("\n"):
+ oid = re.search(oid_regex, line)
+ if not oid:
+ bb.warn("git lfs ls-files output '%s' did not match expected format." % line)
+ if not os.path.exists(os.path.join(wd, "lfs", "objects", oid.group(2), oid.group(3), oid.group(1))):
+ return False
+
+ return True
+
def _need_lfs(self, ud):
return ud.parm.get("lfs", "1") == "1"
@@ -609,13 +762,11 @@ class Git(FetchMethod):
Check if the repository has 'lfs' (large file) content
"""
- if not ud.nobranch:
- branchname = ud.branches[ud.names[0]]
- else:
- branchname = "master"
-
- # The bare clonedir doesn't use the remote names; it has the branch immediately.
- if wd == ud.clonedir:
+ if ud.nobranch:
+ # If no branch is specified, use the current git commit
+ refname = self._build_revision(ud, d, ud.names[0])
+ elif wd == ud.clonedir:
+ # The bare clonedir doesn't use the remote names; it has the branch immediately.
refname = ud.branches[ud.names[0]]
else:
refname = "origin/%s" % ud.branches[ud.names[0]]
@@ -658,7 +809,6 @@ class Git(FetchMethod):
Return a unique key for the url
"""
# Collapse adjacent slashes
- slash_re = re.compile(r"/+")
return "git:" + ud.host + slash_re.sub(".", ud.path) + ud.unresolvedrev[name]
def _lsremote(self, ud, d, search):
@@ -691,6 +841,12 @@ class Git(FetchMethod):
"""
Compute the HEAD revision for the url
"""
+ if not d.getVar("__BBSRCREV_SEEN"):
+ raise bb.fetch2.FetchError("Recipe uses a floating tag/branch '%s' for repo '%s' without a fixed SRCREV yet doesn't call bb.fetch2.get_srcrev() (use SRCPV in PV for OE)." % (ud.unresolvedrev[name], ud.host+ud.path))
+
+ # Ensure we mark as not cached
+ bb.fetch2.mark_recipe_nocache(d)
+
output = self._lsremote(ud, d, "")
# Tags of the form ^{} may not work, need to fallback to other form
if ud.unresolvedrev[name][:5] == "refs/" or ud.usehead:
@@ -715,38 +871,42 @@ class Git(FetchMethod):
"""
pupver = ('', '')
- tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or r"(?P<pver>([0-9][\.|_]?)+)")
try:
output = self._lsremote(ud, d, "refs/tags/*")
except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess) as e:
bb.note("Could not list remote: %s" % str(e))
return pupver
+ rev_tag_re = re.compile(r"([0-9a-f]{40})\s+refs/tags/(.*)")
+ pver_re = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or r"(?P<pver>([0-9][\.|_]?)+)")
+ nonrel_re = re.compile(r"(alpha|beta|rc|final)+")
+
verstring = ""
- revision = ""
for line in output.split("\n"):
if not line:
break
- tag_head = line.split("/")[-1]
+ m = rev_tag_re.match(line)
+ if not m:
+ continue
+
+ (revision, tag) = m.groups()
+
# Ignore non-released branches
- m = re.search(r"(alpha|beta|rc|final)+", tag_head)
- if m:
+ if nonrel_re.search(tag):
continue
# search for version in the line
- tag = tagregex.search(tag_head)
- if tag is None:
+ m = pver_re.search(tag)
+ if not m:
continue
- tag = tag.group('pver')
- tag = tag.replace("_", ".")
+ pver = m.group('pver').replace("_", ".")
- if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0:
+ if verstring and bb.utils.vercmp(("0", pver, ""), ("0", verstring, "")) < 0:
continue
- verstring = tag
- revision = line.split()[0]
+ verstring = pver
pupver = (verstring, revision)
return pupver
diff --git a/lib/bb/fetch2/gitsm.py b/lib/bb/fetch2/gitsm.py
index a4527bf36..f7f3af721 100644
--- a/lib/bb/fetch2/gitsm.py
+++ b/lib/bb/fetch2/gitsm.py
@@ -88,9 +88,9 @@ class GitSM(Git):
subrevision[m] = module_hash.split()[2]
# Convert relative to absolute uri based on parent uri
- if uris[m].startswith('..'):
+ if uris[m].startswith('..') or uris[m].startswith('./'):
newud = copy.copy(ud)
- newud.path = os.path.realpath(os.path.join(newud.path, uris[m]))
+ newud.path = os.path.normpath(os.path.join(newud.path, uris[m]))
uris[m] = Git._get_repo_url(self, newud)
for module in submodules:
@@ -115,10 +115,21 @@ class GitSM(Git):
# This has to be a file reference
proto = "file"
url = "gitsm://" + uris[module]
+ if url.endswith("{}{}".format(ud.host, ud.path)):
+ raise bb.fetch2.FetchError("Submodule refers to the parent repository. This will cause deadlock situation in current version of Bitbake." \
+ "Consider using git fetcher instead.")
url += ';protocol=%s' % proto
url += ";name=%s" % module
url += ";subpath=%s" % module
+ url += ";nobranch=1"
+ url += ";lfs=%s" % self._need_lfs(ud)
+ # Note that adding "user=" here to give credentials to the
+ # submodule is not supported. Since using SRC_URI to give git://
+ # URL a password is not supported, one have to use one of the
+ # recommended way (eg. ~/.netrc or SSH config) which does specify
+ # the user (See comment in git.py).
+ # So, we will not take patches adding "user=" support here.
ld = d.createCopy()
# Not necessary to set SRC_URI, since we're passing the URI to
@@ -140,16 +151,6 @@ class GitSM(Git):
if Git.need_update(self, ud, d):
return True
- try:
- # Check for the nugget dropped by the download operation
- known_srcrevs = runfetchcmd("%s config --get-all bitbake.srcrev" % \
- (ud.basecmd), d, workdir=ud.clonedir)
-
- if ud.revisions[ud.names[0]] in known_srcrevs.split():
- return False
- except bb.fetch2.FetchError:
- pass
-
need_update_list = []
def need_update_submodule(ud, url, module, modpath, workdir, d):
url += ";bareclone=1;nobranch=1"
@@ -172,13 +173,8 @@ class GitSM(Git):
shutil.rmtree(tmpdir)
else:
self.process_submodules(ud, ud.clonedir, need_update_submodule, d)
- if len(need_update_list) == 0:
- # We already have the required commits of all submodules. Drop
- # a nugget so we don't need to check again.
- runfetchcmd("%s config --add bitbake.srcrev %s" % \
- (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
- if len(need_update_list) > 0:
+ if need_update_list:
logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list)))
return True
@@ -209,9 +205,6 @@ class GitSM(Git):
shutil.rmtree(tmpdir)
else:
self.process_submodules(ud, ud.clonedir, download_submodule, d)
- # Drop a nugget for the srcrev we've fetched (used by need_update)
- runfetchcmd("%s config --add bitbake.srcrev %s" % \
- (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
def unpack(self, ud, destdir, d):
def unpack_submodules(ud, url, module, modpath, workdir, d):
@@ -225,6 +218,10 @@ class GitSM(Git):
try:
newfetch = Fetch([url], d, cache=False)
+ # modpath is needed by unpack tracer to calculate submodule
+ # checkout dir
+ new_ud = newfetch.ud[url]
+ new_ud.modpath = modpath
newfetch.unpack(root=os.path.dirname(os.path.join(repo_conf, 'modules', module)))
except Exception as e:
logger.error('gitsm: submodule unpack failed: %s %s' % (type(e).__name__, str(e)))
@@ -250,10 +247,12 @@ class GitSM(Git):
ret = self.process_submodules(ud, ud.destdir, unpack_submodules, d)
if not ud.bareclone and ret:
- # All submodules should already be downloaded and configured in the tree. This simply sets
- # up the configuration and checks out the files. The main project config should remain
- # unmodified, and no download from the internet should occur.
- runfetchcmd("%s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)
+ # All submodules should already be downloaded and configured in the tree. This simply
+ # sets up the configuration and checks out the files. The main project config should
+ # remain unmodified, and no download from the internet should occur. As such, lfs smudge
+ # should also be skipped as these files were already smudged in the fetch stage if lfs
+ # was enabled.
+ runfetchcmd("GIT_LFS_SKIP_SMUDGE=1 %s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)
def implicit_urldata(self, ud, d):
import shutil, subprocess, tempfile
diff --git a/lib/bb/fetch2/hg.py b/lib/bb/fetch2/hg.py
index 063e13008..cbff8c490 100644
--- a/lib/bb/fetch2/hg.py
+++ b/lib/bb/fetch2/hg.py
@@ -242,6 +242,7 @@ class Hg(FetchMethod):
revflag = "-r %s" % ud.revision
subdir = ud.parm.get("destsuffix", ud.module)
codir = "%s/%s" % (destdir, subdir)
+ ud.unpack_tracer.unpack("hg", codir)
scmdata = ud.parm.get("scmdata", "")
if scmdata != "nokeep":
diff --git a/lib/bb/fetch2/local.py b/lib/bb/fetch2/local.py
index e7d1c8c58..7d7668110 100644
--- a/lib/bb/fetch2/local.py
+++ b/lib/bb/fetch2/local.py
@@ -41,9 +41,9 @@ class Local(FetchMethod):
"""
Return the local filename of a given url assuming a successful fetch.
"""
- return self.localpaths(urldata, d)[-1]
+ return self.localfile_searchpaths(urldata, d)[-1]
- def localpaths(self, urldata, d):
+ def localfile_searchpaths(self, urldata, d):
"""
Return the local filename of a given url assuming a successful fetch.
"""
@@ -51,18 +51,14 @@ class Local(FetchMethod):
path = urldata.decodedurl
newpath = path
if path[0] == "/":
+ logger.debug2("Using absolute %s" % (path))
return [path]
filespath = d.getVar('FILESPATH')
if filespath:
logger.debug2("Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
newpath, hist = bb.utils.which(filespath, path, history=True)
+ logger.debug2("Using %s for %s" % (newpath, path))
searched.extend(hist)
- if not os.path.exists(newpath):
- dldirfile = os.path.join(d.getVar("DL_DIR"), path)
- logger.debug2("Defaulting to %s for %s" % (dldirfile, path))
- bb.utils.mkdirhier(os.path.dirname(dldirfile))
- searched.append(dldirfile)
- return searched
return searched
def need_update(self, ud, d):
@@ -78,9 +74,7 @@ class Local(FetchMethod):
filespath = d.getVar('FILESPATH')
if filespath:
locations = filespath.split(":")
- locations.append(d.getVar("DL_DIR"))
-
- msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations)
+ msg = "Unable to find file " + urldata.url + " anywhere to download to " + urldata.localpath + ". The paths that were searched were:\n " + "\n ".join(locations)
raise FetchError(msg)
return True
diff --git a/lib/bb/fetch2/npm.py b/lib/bb/fetch2/npm.py
index 47898509f..15f3f19bc 100644
--- a/lib/bb/fetch2/npm.py
+++ b/lib/bb/fetch2/npm.py
@@ -44,17 +44,24 @@ def npm_package(package):
"""Convert the npm package name to remove unsupported character"""
# Scoped package names (with the @) use the same naming convention
# as the 'npm pack' command.
- if package.startswith("@"):
- return re.sub("/", "-", package[1:])
- return package
+ name = re.sub("/", "-", package)
+ name = name.lower()
+ name = re.sub(r"[^\-a-z0-9]", "", name)
+ name = name.strip("-")
+ return name
+
def npm_filename(package, version):
"""Get the filename of a npm package"""
return npm_package(package) + "-" + version + ".tgz"
-def npm_localfile(package, version):
+def npm_localfile(package, version=None):
"""Get the local filename of a npm package"""
- return os.path.join("npm2", npm_filename(package, version))
+ if version is not None:
+ filename = npm_filename(package, version)
+ else:
+ filename = package
+ return os.path.join("npm2", filename)
def npm_integrity(integrity):
"""
@@ -69,41 +76,52 @@ def npm_unpack(tarball, destdir, d):
bb.utils.mkdirhier(destdir)
cmd = "tar --extract --gzip --file=%s" % shlex.quote(tarball)
cmd += " --no-same-owner"
+ cmd += " --delay-directory-restore"
cmd += " --strip-components=1"
runfetchcmd(cmd, d, workdir=destdir)
+ runfetchcmd("chmod -R +X '%s'" % (destdir), d, quiet=True, workdir=destdir)
class NpmEnvironment(object):
"""
Using a npm config file seems more reliable than using cli arguments.
This class allows to create a controlled environment for npm commands.
"""
- def __init__(self, d, configs=None):
+ def __init__(self, d, configs=[], npmrc=None):
self.d = d
- self.configs = configs
+
+ self.user_config = tempfile.NamedTemporaryFile(mode="w", buffering=1)
+ for key, value in configs:
+ self.user_config.write("%s=%s\n" % (key, value))
+
+ if npmrc:
+ self.global_config_name = npmrc
+ else:
+ self.global_config_name = "/dev/null"
+
+ def __del__(self):
+ if self.user_config:
+ self.user_config.close()
def run(self, cmd, args=None, configs=None, workdir=None):
"""Run npm command in a controlled environment"""
with tempfile.TemporaryDirectory() as tmpdir:
d = bb.data.createCopy(self.d)
+ d.setVar("PATH", d.getVar("PATH")) # PATH might contain $HOME - evaluate it before patching
d.setVar("HOME", tmpdir)
- cfgfile = os.path.join(tmpdir, "npmrc")
-
if not workdir:
workdir = tmpdir
def _run(cmd):
- cmd = "NPM_CONFIG_USERCONFIG=%s " % cfgfile + cmd
- cmd = "NPM_CONFIG_GLOBALCONFIG=%s " % cfgfile + cmd
+ cmd = "NPM_CONFIG_USERCONFIG=%s " % (self.user_config.name) + cmd
+ cmd = "NPM_CONFIG_GLOBALCONFIG=%s " % (self.global_config_name) + cmd
return runfetchcmd(cmd, d, workdir=workdir)
- if self.configs:
- for key, value in self.configs:
- _run("npm config set %s %s" % (key, shlex.quote(value)))
-
if configs:
+ bb.warn("Use of configs argument of NpmEnvironment.run() function"
+ " is deprecated. Please use args argument instead.")
for key, value in configs:
- _run("npm config set %s %s" % (key, shlex.quote(value)))
+ cmd += " --%s=%s" % (key, shlex.quote(value))
if args:
for key, value in args:
@@ -142,12 +160,12 @@ class Npm(FetchMethod):
raise ParameterError("Invalid 'version' parameter", ud.url)
# Extract the 'registry' part of the url
- ud.registry = re.sub(r"^npm://", "http://", ud.url.split(";")[0])
+ ud.registry = re.sub(r"^npm://", "https://", ud.url.split(";")[0])
# Using the 'downloadfilename' parameter as local filename
# or the npm package name.
if "downloadfilename" in ud.parm:
- ud.localfile = d.expand(ud.parm["downloadfilename"])
+ ud.localfile = npm_localfile(d.expand(ud.parm["downloadfilename"]))
else:
ud.localfile = npm_localfile(ud.package, ud.version)
@@ -165,14 +183,14 @@ class Npm(FetchMethod):
def _resolve_proxy_url(self, ud, d):
def _npm_view():
- configs = []
- configs.append(("json", "true"))
- configs.append(("registry", ud.registry))
+ args = []
+ args.append(("json", "true"))
+ args.append(("registry", ud.registry))
pkgver = shlex.quote(ud.package + "@" + ud.version)
cmd = ud.basecmd + " view %s" % pkgver
env = NpmEnvironment(d)
check_network_access(d, cmd, ud.registry)
- view_string = env.run(cmd, configs=configs)
+ view_string = env.run(cmd, args=args)
if not view_string:
raise FetchError("Unavailable package %s" % pkgver, ud.url)
@@ -280,6 +298,7 @@ class Npm(FetchMethod):
destsuffix = ud.parm.get("destsuffix", "npm")
destdir = os.path.join(rootdir, destsuffix)
npm_unpack(ud.localpath, destdir, d)
+ ud.unpack_tracer.unpack("npm", destdir)
def clean(self, ud, d):
"""Clean any existing full or partial download"""
diff --git a/lib/bb/fetch2/npmsw.py b/lib/bb/fetch2/npmsw.py
index 0c3511d8a..cb7edd425 100644
--- a/lib/bb/fetch2/npmsw.py
+++ b/lib/bb/fetch2/npmsw.py
@@ -24,11 +24,14 @@ import bb
from bb.fetch2 import Fetch
from bb.fetch2 import FetchMethod
from bb.fetch2 import ParameterError
+from bb.fetch2 import runfetchcmd
from bb.fetch2 import URI
from bb.fetch2.npm import npm_integrity
from bb.fetch2.npm import npm_localfile
from bb.fetch2.npm import npm_unpack
from bb.utils import is_semver
+from bb.utils import lockfile
+from bb.utils import unlockfile
def foreach_dependencies(shrinkwrap, callback=None, dev=False):
"""
@@ -38,8 +41,9 @@ def foreach_dependencies(shrinkwrap, callback=None, dev=False):
with:
name = the package name (string)
params = the package parameters (dictionary)
- deptree = the package dependency tree (array of strings)
+ destdir = the destination of the package (string)
"""
+ # For handling old style dependencies entries in shinkwrap files
def _walk_deps(deps, deptree):
for name in deps:
subtree = [*deptree, name]
@@ -49,9 +53,22 @@ def foreach_dependencies(shrinkwrap, callback=None, dev=False):
continue
elif deps[name].get("bundled", False):
continue
- callback(name, deps[name], subtree)
-
- _walk_deps(shrinkwrap.get("dependencies", {}), [])
+ destsubdirs = [os.path.join("node_modules", dep) for dep in subtree]
+ destsuffix = os.path.join(*destsubdirs)
+ callback(name, deps[name], destsuffix)
+
+ # packages entry means new style shrinkwrap file, else use dependencies
+ packages = shrinkwrap.get("packages", None)
+ if packages is not None:
+ for package in packages:
+ if package.startswith('node_modules/'):
+ name = package.split('node_modules/')[-1]
+ package_infos = packages.get(package, {})
+ if dev == False and package_infos.get("dev", False):
+ continue
+ callback(name, package_infos, package)
+ else:
+ _walk_deps(shrinkwrap.get("dependencies", {}), [])
class NpmShrinkWrap(FetchMethod):
"""Class to fetch all package from a shrinkwrap file"""
@@ -72,19 +89,31 @@ class NpmShrinkWrap(FetchMethod):
# Resolve the dependencies
ud.deps = []
- def _resolve_dependency(name, params, deptree):
+ def _resolve_dependency(name, params, destsuffix):
url = None
localpath = None
extrapaths = []
- destsubdirs = [os.path.join("node_modules", dep) for dep in deptree]
- destsuffix = os.path.join(*destsubdirs)
+ unpack = True
integrity = params.get("integrity", None)
resolved = params.get("resolved", None)
version = params.get("version", None)
+ depname = params.get("name", None)
+ link = params.get("link", None)
+
+ # Handle local link sources without version
+ if not version and resolved and link:
+ localpath = resolved
+ if not localpath.endswith(".tgz"):
+ unpack = False
# Handle registry sources
- if is_semver(version) and resolved and integrity:
+ elif is_semver(version) and integrity:
+
+ # Handle duplicate dependencies without url
+ if not resolved:
+ return
+
localfile = npm_localfile(name, version)
uri = URI(resolved)
@@ -109,7 +138,7 @@ class NpmShrinkWrap(FetchMethod):
# Handle http tarball sources
elif version.startswith("http") and integrity:
- localfile = os.path.join("npm2", os.path.basename(version))
+ localfile = npm_localfile(os.path.basename(version))
uri = URI(version)
uri.params["downloadfilename"] = localfile
@@ -121,8 +150,29 @@ class NpmShrinkWrap(FetchMethod):
localpath = os.path.join(d.getVar("DL_DIR"), localfile)
+ # Handle local tarball and link sources
+ # TODO: properly resolve file:, file:// and missing file: prefix
+ elif version.startswith("file:"):
+ localpath = version[5:]
+ if not version.endswith(".tgz"):
+ unpack = False
+
# Handle git sources
- elif version.startswith("git"):
+ elif version.startswith(("git", "bitbucket","gist")) or (
+ not version.endswith((".tgz", ".tar", ".tar.gz"))
+ and not version.startswith((".", "@", "/"))
+ and "/" in version
+ ):
+ if version.startswith("github:"):
+ version = "git+https://github.com/" + version[len("github:"):]
+ elif version.startswith("gist:"):
+ version = "git+https://gist.github.com/" + version[len("gist:"):]
+ elif version.startswith("bitbucket:"):
+ version = "git+https://bitbucket.org/" + version[len("bitbucket:"):]
+ elif version.startswith("gitlab:"):
+ version = "git+https://gitlab.com/" + version[len("gitlab:"):]
+ elif not version.startswith(("git+","git:")):
+ version = "git+https://github.com/" + version
regex = re.compile(r"""
^
git\+
@@ -148,15 +198,17 @@ class NpmShrinkWrap(FetchMethod):
url = str(uri)
- # local tarball sources and local link sources are unsupported
else:
- raise ParameterError("Unsupported dependency: %s" % name, ud.url)
+ raise ParameterError("Unsupported dependency: name '%s', depname '%s', version '%s', resolved '%s'" % (name, depname, version, resolved), ud.url)
+ # name is needed by unpack tracer for module mapping
ud.deps.append({
+ "name": name,
"url": url,
"localpath": localpath,
"extrapaths": extrapaths,
"destsuffix": destsuffix,
+ "unpack": unpack,
})
try:
@@ -177,17 +229,24 @@ class NpmShrinkWrap(FetchMethod):
# This fetcher resolves multiple URIs from a shrinkwrap file and then
# forwards it to a proxy fetcher. The management of the donestamp file,
# the lockfile and the checksums are forwarded to the proxy fetcher.
- ud.proxy = Fetch([dep["url"] for dep in ud.deps], data)
+ shrinkwrap_urls = [dep["url"] for dep in ud.deps if dep["url"]]
+ if shrinkwrap_urls:
+ ud.proxy = Fetch(shrinkwrap_urls, data)
ud.needdonestamp = False
@staticmethod
def _foreach_proxy_method(ud, handle):
returns = []
- for proxy_url in ud.proxy.urls:
- proxy_ud = ud.proxy.ud[proxy_url]
- proxy_d = ud.proxy.d
- proxy_ud.setup_localpath(proxy_d)
- returns.append(handle(proxy_ud.method, proxy_ud, proxy_d))
+ #Check if there are dependencies before try to fetch them
+ if len(ud.deps) > 0:
+ if hasattr(ud, 'proxy'):
+ for proxy_url in ud.proxy.urls:
+ proxy_ud = ud.proxy.ud[proxy_url]
+ proxy_d = ud.proxy.d
+ proxy_ud.setup_localpath(proxy_d)
+ lf = lockfile(proxy_ud.lockfile)
+ returns.append(handle(proxy_ud.method, proxy_ud, proxy_d))
+ unlockfile(lf)
return returns
def verify_donestamp(self, ud, d):
@@ -224,6 +283,7 @@ class NpmShrinkWrap(FetchMethod):
destsuffix = ud.parm.get("destsuffix")
if destsuffix:
destdir = os.path.join(rootdir, destsuffix)
+ ud.unpack_tracer.unpack("npm-shrinkwrap", destdir)
bb.utils.mkdirhier(destdir)
bb.utils.copyfile(ud.shrinkwrap_file,
@@ -237,7 +297,22 @@ class NpmShrinkWrap(FetchMethod):
for dep in manual:
depdestdir = os.path.join(destdir, dep["destsuffix"])
- npm_unpack(dep["localpath"], depdestdir, d)
+ if dep["url"]:
+ npm_unpack(dep["localpath"], depdestdir, d)
+ else:
+ depsrcdir= os.path.join(destdir, dep["localpath"])
+ if dep["unpack"]:
+ npm_unpack(depsrcdir, depdestdir, d)
+ else:
+ if os.path.commonpath([os.path.abspath(depsrcdir)]) == os.path.commonpath([os.path.abspath(depsrcdir), os.path.abspath(depdestdir)]):
+ # If the depsrcdir is some parent directory of depdestdir use symlink there, to avoid trying to copy itself
+ bb.utils.mkdirhier(os.path.dirname(depdestdir))
+ cmd = 'ln -snf "%s" "%s"' % (depsrcdir, depdestdir)
+ runfetchcmd(cmd, d)
+ else:
+ bb.utils.mkdirhier(depdestdir)
+ cmd = 'cp -fpPRH "%s/." .' % (depsrcdir)
+ runfetchcmd(cmd, d, workdir=depdestdir)
def clean(self, ud, d):
"""Clean any existing full or partial download"""
diff --git a/lib/bb/fetch2/osc.py b/lib/bb/fetch2/osc.py
index d9ce44390..495ac8a30 100644
--- a/lib/bb/fetch2/osc.py
+++ b/lib/bb/fetch2/osc.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
"""
@@ -9,6 +11,7 @@ Based on the svn "Fetch" implementation.
import logging
import os
+import re
import bb
from bb.fetch2 import FetchMethod
from bb.fetch2 import FetchError
@@ -36,6 +39,7 @@ class Osc(FetchMethod):
# Create paths to osc checkouts
oscdir = d.getVar("OSCDIR") or (d.getVar("DL_DIR") + "/osc")
relpath = self._strip_leading_slashes(ud.path)
+ ud.oscdir = oscdir
ud.pkgdir = os.path.join(oscdir, ud.host)
ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
@@ -43,13 +47,13 @@ class Osc(FetchMethod):
ud.revision = ud.parm['rev']
else:
pv = d.getVar("PV", False)
- rev = bb.fetch2.srcrev_internal_helper(ud, d)
+ rev = bb.fetch2.srcrev_internal_helper(ud, d, '')
if rev:
ud.revision = rev
else:
ud.revision = ""
- ud.localfile = d.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision))
+ ud.localfile = d.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), relpath.replace('/', '.'), ud.revision))
def _buildosccommand(self, ud, d, command):
"""
@@ -59,26 +63,49 @@ class Osc(FetchMethod):
basecmd = d.getVar("FETCHCMD_osc") or "/usr/bin/env osc"
- proto = ud.parm.get('protocol', 'ocs')
+ proto = ud.parm.get('protocol', 'https')
options = []
config = "-c %s" % self.generate_config(ud, d)
- if ud.revision:
+ if getattr(ud, 'revision', ''):
options.append("-r %s" % ud.revision)
coroot = self._strip_leading_slashes(ud.path)
if command == "fetch":
- osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options))
+ osccmd = "%s %s -A %s://%s co %s/%s %s" % (basecmd, config, proto, ud.host, coroot, ud.module, " ".join(options))
elif command == "update":
- osccmd = "%s %s up %s" % (basecmd, config, " ".join(options))
+ osccmd = "%s %s -A %s://%s up %s" % (basecmd, config, proto, ud.host, " ".join(options))
+ elif command == "api_source":
+ osccmd = "%s %s -A %s://%s api source/%s/%s" % (basecmd, config, proto, ud.host, coroot, ud.module)
else:
raise FetchError("Invalid osc command %s" % command, ud.url)
return osccmd
+ def _latest_revision(self, ud, d, name):
+ """
+ Fetch latest revision for the given package
+ """
+ api_source_cmd = self._buildosccommand(ud, d, "api_source")
+
+ output = runfetchcmd(api_source_cmd, d)
+ match = re.match(r'<directory ?.* rev="(\d+)".*>', output)
+ if match is None:
+ raise FetchError("Unable to parse osc response", ud.url)
+ return match.groups()[0]
+
+ def _revision_key(self, ud, d, name):
+ """
+ Return a unique key for the url
+ """
+ # Collapse adjacent slashes
+ slash_re = re.compile(r"/+")
+ rev = getattr(ud, 'revision', "latest")
+ return "osc:%s%s.%s.%s" % (ud.host, slash_re.sub(".", ud.path), name, rev)
+
def download(self, ud, d):
"""
Fetch url
@@ -86,7 +113,7 @@ class Osc(FetchMethod):
logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'")
- if os.access(os.path.join(d.getVar('OSCDIR'), ud.path, ud.module), os.R_OK):
+ if os.access(ud.moddir, os.R_OK):
oscupdatecmd = self._buildosccommand(ud, d, "update")
logger.info("Update "+ ud.url)
# update sources there
@@ -114,20 +141,23 @@ class Osc(FetchMethod):
Generate a .oscrc to be used for this run.
"""
- config_path = os.path.join(d.getVar('OSCDIR'), "oscrc")
+ config_path = os.path.join(ud.oscdir, "oscrc")
+ if not os.path.exists(ud.oscdir):
+ bb.utils.mkdirhier(ud.oscdir)
+
if (os.path.exists(config_path)):
os.remove(config_path)
f = open(config_path, 'w')
+ proto = ud.parm.get('protocol', 'https')
f.write("[general]\n")
- f.write("apisrv = %s\n" % ud.host)
- f.write("scheme = http\n")
+ f.write("apiurl = %s://%s\n" % (proto, ud.host))
f.write("su-wrapper = su -c\n")
f.write("build-root = %s\n" % d.getVar('WORKDIR'))
f.write("urllist = %s\n" % d.getVar("OSCURLLIST"))
f.write("extra-pkgs = gzip\n")
f.write("\n")
- f.write("[%s]\n" % ud.host)
+ f.write("[%s://%s]\n" % (proto, ud.host))
f.write("user = %s\n" % ud.parm["user"])
f.write("pass = %s\n" % ud.parm["pswd"])
f.close()
diff --git a/lib/bb/fetch2/perforce.py b/lib/bb/fetch2/perforce.py
index e2a41a4a1..3b6fa4b1e 100644
--- a/lib/bb/fetch2/perforce.py
+++ b/lib/bb/fetch2/perforce.py
@@ -134,7 +134,7 @@ class Perforce(FetchMethod):
ud.setup_revisions(d)
- ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, cleandedmodule, ud.revision))
+ ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, cleanedmodule, ud.revision))
def _buildp4command(self, ud, d, command, depot_filename=None):
"""
diff --git a/lib/bb/fetch2/sftp.py b/lib/bb/fetch2/sftp.py
index f87f292e5..7884cce94 100644
--- a/lib/bb/fetch2/sftp.py
+++ b/lib/bb/fetch2/sftp.py
@@ -103,7 +103,7 @@ class SFTP(FetchMethod):
if path[:3] == '/~/':
path = path[3:]
- remote = '%s%s:%s' % (user, urlo.hostname, path)
+ remote = '"%s%s:%s"' % (user, urlo.hostname, path)
cmd = '%s %s %s %s' % (basecmd, port, remote, lpath)
diff --git a/lib/bb/fetch2/ssh.py b/lib/bb/fetch2/ssh.py
index 2c8557e1f..0cbb2a6f2 100644
--- a/lib/bb/fetch2/ssh.py
+++ b/lib/bb/fetch2/ssh.py
@@ -32,6 +32,7 @@ IETF secsh internet draft:
import re, os
from bb.fetch2 import check_network_access, FetchMethod, ParameterError, runfetchcmd
+import urllib
__pattern__ = re.compile(r'''
@@ -40,9 +41,9 @@ __pattern__ = re.compile(r'''
( # Optional username/password block
(?P<user>\S+) # username
(:(?P<pass>\S+))? # colon followed by the password (optional)
- )?
(?P<cparam>(;[^;]+)*)? # connection parameters block (optional)
@
+ )?
(?P<host>\S+?) # non-greedy match of the host
(:(?P<port>[0-9]+))? # colon followed by the port (optional)
/
@@ -70,6 +71,7 @@ class SSH(FetchMethod):
"git:// prefix with protocol=ssh", urldata.url)
m = __pattern__.match(urldata.url)
path = m.group('path')
+ path = urllib.parse.unquote(path)
host = m.group('host')
urldata.localpath = os.path.join(d.getVar('DL_DIR'),
os.path.basename(os.path.normpath(path)))
@@ -96,6 +98,11 @@ class SSH(FetchMethod):
fr += '@%s' % host
else:
fr = host
+
+ if path[0] != '~':
+ path = '/%s' % path
+ path = urllib.parse.unquote(path)
+
fr += ':%s' % path
cmd = 'scp -B -r %s %s %s/' % (
@@ -108,3 +115,41 @@ class SSH(FetchMethod):
runfetchcmd(cmd, d)
+ def checkstatus(self, fetch, urldata, d):
+ """
+ Check the status of the url
+ """
+ m = __pattern__.match(urldata.url)
+ path = m.group('path')
+ host = m.group('host')
+ port = m.group('port')
+ user = m.group('user')
+ password = m.group('pass')
+
+ if port:
+ portarg = '-P %s' % port
+ else:
+ portarg = ''
+
+ if user:
+ fr = user
+ if password:
+ fr += ':%s' % password
+ fr += '@%s' % host
+ else:
+ fr = host
+
+ if path[0] != '~':
+ path = '/%s' % path
+ path = urllib.parse.unquote(path)
+
+ cmd = 'ssh -o BatchMode=true %s %s [ -f %s ]' % (
+ portarg,
+ fr,
+ path
+ )
+
+ check_network_access(d, cmd, urldata.url)
+ runfetchcmd(cmd, d)
+
+ return True
diff --git a/lib/bb/fetch2/svn.py b/lib/bb/fetch2/svn.py
index 80102b44f..d40e4d290 100644
--- a/lib/bb/fetch2/svn.py
+++ b/lib/bb/fetch2/svn.py
@@ -57,7 +57,12 @@ class Svn(FetchMethod):
if 'rev' in ud.parm:
ud.revision = ud.parm['rev']
- ud.localfile = d.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision))
+ # Whether to use the @REV peg-revision syntax in the svn command or not
+ ud.pegrevision = True
+ if 'nopegrevision' in ud.parm:
+ ud.pegrevision = False
+
+ ud.localfile = d.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ["0", "1"][ud.pegrevision]))
def _buildsvncommand(self, ud, d, command):
"""
@@ -98,7 +103,8 @@ class Svn(FetchMethod):
if ud.revision:
options.append("-r %s" % ud.revision)
- suffix = "@%s" % (ud.revision)
+ if ud.pegrevision:
+ suffix = "@%s" % (ud.revision)
if command == "fetch":
transportuser = ud.parm.get("transportuser", "")
diff --git a/lib/bb/fetch2/wget.py b/lib/bb/fetch2/wget.py
index 6d82f3af0..fbfa6938a 100644
--- a/lib/bb/fetch2/wget.py
+++ b/lib/bb/fetch2/wget.py
@@ -26,7 +26,6 @@ from bb.fetch2 import FetchMethod
from bb.fetch2 import FetchError
from bb.fetch2 import logger
from bb.fetch2 import runfetchcmd
-from bb.utils import export_proxies
from bs4 import BeautifulSoup
from bs4 import SoupStrainer
@@ -52,18 +51,24 @@ class WgetProgressHandler(bb.progress.LineFilterProgressHandler):
class Wget(FetchMethod):
+ """Class to fetch urls via 'wget'"""
# CDNs like CloudFlare may do a 'browser integrity test' which can fail
# with the standard wget/urllib User-Agent, so pretend to be a modern
# browser.
user_agent = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0"
- """Class to fetch urls via 'wget'"""
+ def check_certs(self, d):
+ """
+ Should certificates be checked?
+ """
+ return (d.getVar("BB_CHECK_SSL_CERTS") or "1") != "0"
+
def supports(self, ud, d):
"""
Check to see if a given url can be fetched with wget.
"""
- return ud.type in ['http', 'https', 'ftp']
+ return ud.type in ['http', 'https', 'ftp', 'ftps']
def recommends_checksum(self, urldata):
return True
@@ -82,7 +87,13 @@ class Wget(FetchMethod):
if not ud.localfile:
ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."))
- self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate"
+ self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30"
+
+ if ud.type == 'ftp' or ud.type == 'ftps':
+ self.basecmd += " --passive-ftp"
+
+ if not self.check_certs(d):
+ self.basecmd += " --no-check-certificate"
def _runwget(self, ud, d, command, quiet, workdir=None):
@@ -97,13 +108,22 @@ class Wget(FetchMethod):
fetchcmd = self.basecmd
- if 'downloadfilename' in ud.parm:
- localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile)
- bb.utils.mkdirhier(os.path.dirname(localpath))
- fetchcmd += " -O %s" % shlex.quote(localpath)
+ localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) + ".tmp"
+ bb.utils.mkdirhier(os.path.dirname(localpath))
+ fetchcmd += " -O %s" % shlex.quote(localpath)
if ud.user and ud.pswd:
- fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (ud.user, ud.pswd)
+ fetchcmd += " --auth-no-challenge"
+ if ud.parm.get("redirectauth", "1") == "1":
+ # An undocumented feature of wget is that if the
+ # username/password are specified on the URI, wget will only
+ # send the Authorization header to the first host and not to
+ # any hosts that it is redirected to. With the increasing
+ # usage of temporary AWS URLs, this difference now matters as
+ # AWS will reject any request that has authentication both in
+ # the query parameters (from the redirect) and in the
+ # Authorization header.
+ fetchcmd += " --user=%s --password=%s" % (ud.user, ud.pswd)
uri = ud.url.split(";")[0]
if os.path.exists(ud.localpath):
@@ -114,6 +134,15 @@ class Wget(FetchMethod):
self._runwget(ud, d, fetchcmd, False)
+ # Try and verify any checksum now, meaning if it isn't correct, we don't remove the
+ # original file, which might be a race (imagine two recipes referencing the same
+ # source, one with an incorrect checksum)
+ bb.fetch2.verify_checksum(ud, d, localpath=localpath, fatal_nochecksum=False)
+
+ # Remove the ".tmp" and move the file into position atomically
+ # Our lock prevents multiple writers but mirroring code may grab incomplete files
+ os.rename(localpath, localpath[:-4])
+
# Sanity check since wget can pretend it succeed when it didn't
# Also, this used to happen if sourceforge sent us to the mirror page
if not os.path.exists(ud.localpath):
@@ -209,7 +238,7 @@ class Wget(FetchMethod):
# We let the request fail and expect it to be
# tried once more ("try_again" in check_status()),
# with the dead connection removed from the cache.
- # If it still fails, we give up, which can happend for bad
+ # If it still fails, we give up, which can happen for bad
# HTTP proxy settings.
fetch.connection_cache.remove_connection(h.host, h.port)
raise urllib.error.URLError(err)
@@ -282,64 +311,76 @@ class Wget(FetchMethod):
newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
newreq.get_method = req.get_method
return newreq
- exported_proxies = export_proxies(d)
-
- handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback]
- if exported_proxies:
- handlers.append(urllib.request.ProxyHandler())
- handlers.append(CacheHTTPHandler())
- # Since Python 2.7.9 ssl cert validation is enabled by default
- # see PEP-0476, this causes verification errors on some https servers
- # so disable by default.
- import ssl
- if hasattr(ssl, '_create_unverified_context'):
- handlers.append(urllib.request.HTTPSHandler(context=ssl._create_unverified_context()))
- opener = urllib.request.build_opener(*handlers)
-
- try:
- uri = ud.url.split(";")[0]
- r = urllib.request.Request(uri)
- r.get_method = lambda: "HEAD"
- # Some servers (FusionForge, as used on Alioth) require that the
- # optional Accept header is set.
- r.add_header("Accept", "*/*")
- r.add_header("User-Agent", self.user_agent)
- def add_basic_auth(login_str, request):
- '''Adds Basic auth to http request, pass in login:password as string'''
- import base64
- encodeuser = base64.b64encode(login_str.encode('utf-8')).decode("utf-8")
- authheader = "Basic %s" % encodeuser
- r.add_header("Authorization", authheader)
-
- if ud.user and ud.pswd:
- add_basic_auth(ud.user + ':' + ud.pswd, r)
- try:
- import netrc
- n = netrc.netrc()
- login, unused, password = n.authenticators(urllib.parse.urlparse(uri).hostname)
- add_basic_auth("%s:%s" % (login, password), r)
- except (TypeError, ImportError, IOError, netrc.NetrcParseError):
- pass
-
- with opener.open(r) as response:
- pass
- except urllib.error.URLError as e:
- if try_again:
- logger.debug2("checkstatus: trying again")
- return self.checkstatus(fetch, ud, d, False)
+ # We need to update the environment here as both the proxy and HTTPS
+ # handlers need variables set. The proxy needs http_proxy and friends to
+ # be set, and HTTPSHandler ends up calling into openssl to load the
+ # certificates. In buildtools configurations this will be looking at the
+ # wrong place for certificates by default: we set SSL_CERT_FILE to the
+ # right location in the buildtools environment script but as BitBake
+ # prunes prunes the environment this is lost. When binaries are executed
+ # runfetchcmd ensures these values are in the environment, but this is
+ # pure Python so we need to update the environment.
+ #
+ # Avoid tramping the environment too much by using bb.utils.environment
+ # to scope the changes to the build_opener request, which is when the
+ # environment lookups happen.
+ newenv = bb.fetch2.get_fetcher_environment(d)
+
+ with bb.utils.environment(**newenv):
+ import ssl
+
+ if self.check_certs(d):
+ context = ssl.create_default_context()
else:
- # debug for now to avoid spamming the logs in e.g. remote sstate searches
- logger.debug2("checkstatus() urlopen failed: %s" % e)
- return False
- except ConnectionResetError as e:
- if try_again:
- logger.debug2("checkstatus: trying again")
- return self.checkstatus(fetch, ud, d, False)
- else:
- # debug for now to avoid spamming the logs in e.g. remote sstate searches
- logger.debug2("checkstatus() urlopen failed: %s" % e)
- return False
+ context = ssl._create_unverified_context()
+
+ handlers = [FixedHTTPRedirectHandler,
+ HTTPMethodFallback,
+ urllib.request.ProxyHandler(),
+ CacheHTTPHandler(),
+ urllib.request.HTTPSHandler(context=context)]
+ opener = urllib.request.build_opener(*handlers)
+
+ try:
+ uri_base = ud.url.split(";")[0]
+ uri = "{}://{}{}".format(urllib.parse.urlparse(uri_base).scheme, ud.host, ud.path)
+ r = urllib.request.Request(uri)
+ r.get_method = lambda: "HEAD"
+ # Some servers (FusionForge, as used on Alioth) require that the
+ # optional Accept header is set.
+ r.add_header("Accept", "*/*")
+ r.add_header("User-Agent", self.user_agent)
+ def add_basic_auth(login_str, request):
+ '''Adds Basic auth to http request, pass in login:password as string'''
+ import base64
+ encodeuser = base64.b64encode(login_str.encode('utf-8')).decode("utf-8")
+ authheader = "Basic %s" % encodeuser
+ r.add_header("Authorization", authheader)
+
+ if ud.user and ud.pswd:
+ add_basic_auth(ud.user + ':' + ud.pswd, r)
+
+ try:
+ import netrc
+ auth_data = netrc.netrc().authenticators(urllib.parse.urlparse(uri).hostname)
+ if auth_data:
+ login, _, password = auth_data
+ add_basic_auth("%s:%s" % (login, password), r)
+ except (FileNotFoundError, netrc.NetrcParseError):
+ pass
+
+ with opener.open(r, timeout=30) as response:
+ pass
+ except (urllib.error.URLError, ConnectionResetError, TimeoutError) as e:
+ if try_again:
+ logger.debug2("checkstatus: trying again")
+ return self.checkstatus(fetch, ud, d, False)
+ else:
+ # debug for now to avoid spamming the logs in e.g. remote sstate searches
+ logger.debug2("checkstatus() urlopen failed for %s: %s" % (uri,e))
+ return False
+
return True
def _parse_path(self, regex, s):
@@ -472,7 +513,7 @@ class Wget(FetchMethod):
version_dir = ['', '', '']
version = ['', '', '']
- dirver_regex = re.compile(r"(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])+(\d+))")
+ dirver_regex = re.compile(r"(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])*(\d+))")
s = dirver_regex.search(dirver)
if s:
version_dir[1] = s.group('ver')
@@ -548,7 +589,7 @@ class Wget(FetchMethod):
# src.rpm extension was added only for rpm package. Can be removed if the rpm
# packaged will always be considered as having to be manually upgraded
- psuffix_regex = r"(tar\.gz|tgz|tar\.bz2|zip|xz|tar\.lz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
+ psuffix_regex = r"(tar\.\w+|tgz|zip|xz|rpm|bz2|orig\.tar\.\w+|src\.tar\.\w+|src\.tgz|svnr\d+\.tar\.\w+|stable\.tar\.\w+|src\.rpm)"
# match name, version and archive type of a package
package_regex_comp = re.compile(r"(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
@@ -599,10 +640,10 @@ class Wget(FetchMethod):
# search for version matches on folders inside the path, like:
# "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
dirver_regex = re.compile(r"(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
- m = dirver_regex.search(path)
+ m = dirver_regex.findall(path)
if m:
pn = d.getVar('PN')
- dirver = m.group('dirver')
+ dirver = m[-1][0]
dirver_pn_regex = re.compile(r"%s\d?" % (re.escape(pn)))
if not dirver_pn_regex.search(dirver):
diff --git a/lib/bb/main.py b/lib/bb/main.py
index 06bad495a..bca8ebfa0 100755
--- a/lib/bb/main.py
+++ b/lib/bb/main.py
@@ -12,11 +12,12 @@
import os
import sys
import logging
-import optparse
+import argparse
import warnings
import fcntl
import time
import traceback
+import datetime
import bb
from bb import event
@@ -43,18 +44,18 @@ def present_options(optionlist):
else:
return optionlist[0]
-class BitbakeHelpFormatter(optparse.IndentedHelpFormatter):
- def format_option(self, option):
+class BitbakeHelpFormatter(argparse.HelpFormatter):
+ def _get_help_string(self, action):
# We need to do this here rather than in the text we supply to
# add_option() because we don't want to call list_extension_modules()
# on every execution (since it imports all of the modules)
# Note also that we modify option.help rather than the returned text
# - this is so that we don't have to re-format the text ourselves
- if option.dest == 'ui':
+ if action.dest == 'ui':
valid_uis = list_extension_modules(bb.ui, 'main')
- option.help = option.help.replace('@CHOICES@', present_options(valid_uis))
+ return action.help.replace('@CHOICES@', present_options(valid_uis))
- return optparse.IndentedHelpFormatter.format_option(self, option)
+ return action.help
def list_extension_modules(pkg, checkattr):
"""
@@ -112,189 +113,209 @@ def _showwarning(message, category, filename, lineno, file=None, line=None):
warnlog.warning(s)
warnings.showwarning = _showwarning
-warnings.filterwarnings("ignore")
-warnings.filterwarnings("default", module="(<string>$|(oe|bb)\.)")
-warnings.filterwarnings("ignore", category=PendingDeprecationWarning)
-warnings.filterwarnings("ignore", category=ImportWarning)
-warnings.filterwarnings("ignore", category=DeprecationWarning, module="<string>$")
-warnings.filterwarnings("ignore", message="With-statements now directly support multiple context managers")
-
def create_bitbake_parser():
- parser = optparse.OptionParser(
- formatter=BitbakeHelpFormatter(),
- version="BitBake Build Tool Core version %s" % bb.__version__,
- usage="""%prog [options] [recipename/target recipe:do_task ...]
-
- Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
- It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
- will provide the layer, BBFILES and other configuration information.""")
-
- parser.add_option("-b", "--buildfile", action="store", dest="buildfile", default=None,
- help="Execute tasks from a specific .bb recipe directly. WARNING: Does "
- "not handle any dependencies from other recipes.")
-
- parser.add_option("-k", "--continue", action="store_false", dest="abort", default=True,
- help="Continue as much as possible after an error. While the target that "
- "failed and anything depending on it cannot be built, as much as "
- "possible will be built before stopping.")
-
- parser.add_option("-f", "--force", action="store_true", dest="force", default=False,
- help="Force the specified targets/task to run (invalidating any "
- "existing stamp file).")
-
- parser.add_option("-c", "--cmd", action="store", dest="cmd",
- help="Specify the task to execute. The exact options available "
- "depend on the metadata. Some examples might be 'compile'"
- " or 'populate_sysroot' or 'listtasks' may give a list of "
- "the tasks available.")
-
- parser.add_option("-C", "--clear-stamp", action="store", dest="invalidate_stamp",
- help="Invalidate the stamp for the specified task such as 'compile' "
- "and then run the default task for the specified target(s).")
-
- parser.add_option("-r", "--read", action="append", dest="prefile", default=[],
- help="Read the specified file before bitbake.conf.")
-
- parser.add_option("-R", "--postread", action="append", dest="postfile", default=[],
- help="Read the specified file after bitbake.conf.")
-
- parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False,
- help="Enable tracing of shell tasks (with 'set -x'). "
- "Also print bb.note(...) messages to stdout (in "
- "addition to writing them to ${T}/log.do_<task>).")
-
- parser.add_option("-D", "--debug", action="count", dest="debug", default=0,
- help="Increase the debug level. You can specify this "
- "more than once. -D sets the debug level to 1, "
- "where only bb.debug(1, ...) messages are printed "
- "to stdout; -DD sets the debug level to 2, where "
- "both bb.debug(1, ...) and bb.debug(2, ...) "
- "messages are printed; etc. Without -D, no debug "
- "messages are printed. Note that -D only affects "
- "output to stdout. All debug messages are written "
- "to ${T}/log.do_taskname, regardless of the debug "
- "level.")
-
- parser.add_option("-q", "--quiet", action="count", dest="quiet", default=0,
- help="Output less log message data to the terminal. You can specify this more than once.")
-
- parser.add_option("-n", "--dry-run", action="store_true", dest="dry_run", default=False,
- help="Don't execute, just go through the motions.")
-
- parser.add_option("-S", "--dump-signatures", action="append", dest="dump_signatures",
- default=[], metavar="SIGNATURE_HANDLER",
- help="Dump out the signature construction information, with no task "
- "execution. The SIGNATURE_HANDLER parameter is passed to the "
- "handler. Two common values are none and printdiff but the handler "
- "may define more/less. none means only dump the signature, printdiff"
- " means compare the dumped signature with the cached one.")
-
- parser.add_option("-p", "--parse-only", action="store_true",
- dest="parse_only", default=False,
- help="Quit after parsing the BB recipes.")
-
- parser.add_option("-s", "--show-versions", action="store_true",
- dest="show_versions", default=False,
- help="Show current and preferred versions of all recipes.")
-
- parser.add_option("-e", "--environment", action="store_true",
- dest="show_environment", default=False,
- help="Show the global or per-recipe environment complete with information"
- " about where variables were set/changed.")
-
- parser.add_option("-g", "--graphviz", action="store_true", dest="dot_graph", default=False,
- help="Save dependency tree information for the specified "
- "targets in the dot syntax.")
-
- parser.add_option("-I", "--ignore-deps", action="append",
- dest="extra_assume_provided", default=[],
- help="Assume these dependencies don't exist and are already provided "
- "(equivalent to ASSUME_PROVIDED). Useful to make dependency "
- "graphs more appealing")
-
- parser.add_option("-l", "--log-domains", action="append", dest="debug_domains", default=[],
- help="Show debug logging for the specified logging domains")
-
- parser.add_option("-P", "--profile", action="store_true", dest="profile", default=False,
- help="Profile the command and save reports.")
+ parser = argparse.ArgumentParser(
+ description="""\
+ It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
+ will provide the layer, BBFILES and other configuration information.
+ """,
+ formatter_class=BitbakeHelpFormatter,
+ allow_abbrev=False,
+ add_help=False, # help is manually added below in a specific argument group
+ )
+
+ general_group = parser.add_argument_group('General options')
+ task_group = parser.add_argument_group('Task control options')
+ exec_group = parser.add_argument_group('Execution control options')
+ logging_group = parser.add_argument_group('Logging/output control options')
+ server_group = parser.add_argument_group('Server options')
+ config_group = parser.add_argument_group('Configuration options')
+
+ general_group.add_argument("targets", nargs="*", metavar="recipename/target",
+ help="Execute the specified task (default is 'build') for these target "
+ "recipes (.bb files).")
+
+ general_group.add_argument("-s", "--show-versions", action="store_true",
+ help="Show current and preferred versions of all recipes.")
+
+ general_group.add_argument("-e", "--environment", action="store_true",
+ dest="show_environment",
+ help="Show the global or per-recipe environment complete with information"
+ " about where variables were set/changed.")
+
+ general_group.add_argument("-g", "--graphviz", action="store_true", dest="dot_graph",
+ help="Save dependency tree information for the specified "
+ "targets in the dot syntax.")
# @CHOICES@ is substituted out by BitbakeHelpFormatter above
- parser.add_option("-u", "--ui", action="store", dest="ui",
- default=os.environ.get('BITBAKE_UI', 'knotty'),
- help="The user interface to use (@CHOICES@ - default %default).")
-
- parser.add_option("", "--token", action="store", dest="xmlrpctoken",
- default=os.environ.get("BBTOKEN"),
- help="Specify the connection token to be used when connecting "
- "to a remote server.")
-
- parser.add_option("", "--revisions-changed", action="store_true",
- dest="revisions_changed", default=False,
- help="Set the exit code depending on whether upstream floating "
- "revisions have changed or not.")
-
- parser.add_option("", "--server-only", action="store_true",
- dest="server_only", default=False,
- help="Run bitbake without a UI, only starting a server "
- "(cooker) process.")
-
- parser.add_option("-B", "--bind", action="store", dest="bind", default=False,
- help="The name/address for the bitbake xmlrpc server to bind to.")
-
- parser.add_option("-T", "--idle-timeout", type=float, dest="server_timeout",
- default=os.getenv("BB_SERVER_TIMEOUT"),
- help="Set timeout to unload bitbake server due to inactivity, "
- "set to -1 means no unload, "
- "default: Environment variable BB_SERVER_TIMEOUT.")
-
- parser.add_option("", "--no-setscene", action="store_true",
- dest="nosetscene", default=False,
- help="Do not run any setscene tasks. sstate will be ignored and "
- "everything needed, built.")
-
- parser.add_option("", "--skip-setscene", action="store_true",
- dest="skipsetscene", default=False,
- help="Skip setscene tasks if they would be executed. Tasks previously "
- "restored from sstate will be kept, unlike --no-setscene")
-
- parser.add_option("", "--setscene-only", action="store_true",
- dest="setsceneonly", default=False,
- help="Only run setscene tasks, don't run any real tasks.")
-
- parser.add_option("", "--remote-server", action="store", dest="remote_server",
- default=os.environ.get("BBSERVER"),
- help="Connect to the specified server.")
-
- parser.add_option("-m", "--kill-server", action="store_true",
- dest="kill_server", default=False,
- help="Terminate any running bitbake server.")
-
- parser.add_option("", "--observe-only", action="store_true",
- dest="observe_only", default=False,
- help="Connect to a server as an observing-only client.")
-
- parser.add_option("", "--status-only", action="store_true",
- dest="status_only", default=False,
- help="Check the status of the remote bitbake server.")
-
- parser.add_option("-w", "--write-log", action="store", dest="writeeventlog",
- default=os.environ.get("BBEVENTLOG"),
- help="Writes the event log of the build to a bitbake event json file. "
- "Use '' (empty string) to assign the name automatically.")
-
- parser.add_option("", "--runall", action="append", dest="runall",
- help="Run the specified task for any recipe in the taskgraph of the specified target (even if it wouldn't otherwise have run).")
-
- parser.add_option("", "--runonly", action="append", dest="runonly",
- help="Run only the specified task within the taskgraph of the specified targets (and any task dependencies those tasks may have).")
+ general_group.add_argument("-u", "--ui",
+ default=os.environ.get('BITBAKE_UI', 'knotty'),
+ help="The user interface to use (@CHOICES@ - default %(default)s).")
+
+ general_group.add_argument("--version", action="store_true",
+ help="Show programs version and exit.")
+
+ general_group.add_argument('-h', '--help', action='help',
+ help='Show this help message and exit.')
+
+
+ task_group.add_argument("-f", "--force", action="store_true",
+ help="Force the specified targets/task to run (invalidating any "
+ "existing stamp file).")
+
+ task_group.add_argument("-c", "--cmd",
+ help="Specify the task to execute. The exact options available "
+ "depend on the metadata. Some examples might be 'compile'"
+ " or 'populate_sysroot' or 'listtasks' may give a list of "
+ "the tasks available.")
+
+ task_group.add_argument("-C", "--clear-stamp", dest="invalidate_stamp",
+ help="Invalidate the stamp for the specified task such as 'compile' "
+ "and then run the default task for the specified target(s).")
+
+ task_group.add_argument("--runall", action="append", default=[],
+ help="Run the specified task for any recipe in the taskgraph of the "
+ "specified target (even if it wouldn't otherwise have run).")
+
+ task_group.add_argument("--runonly", action="append",
+ help="Run only the specified task within the taskgraph of the "
+ "specified targets (and any task dependencies those tasks may have).")
+
+ task_group.add_argument("--no-setscene", action="store_true",
+ dest="nosetscene",
+ help="Do not run any setscene tasks. sstate will be ignored and "
+ "everything needed, built.")
+
+ task_group.add_argument("--skip-setscene", action="store_true",
+ dest="skipsetscene",
+ help="Skip setscene tasks if they would be executed. Tasks previously "
+ "restored from sstate will be kept, unlike --no-setscene.")
+
+ task_group.add_argument("--setscene-only", action="store_true",
+ dest="setsceneonly",
+ help="Only run setscene tasks, don't run any real tasks.")
+
+
+ exec_group.add_argument("-n", "--dry-run", action="store_true",
+ help="Don't execute, just go through the motions.")
+
+ exec_group.add_argument("-p", "--parse-only", action="store_true",
+ help="Quit after parsing the BB recipes.")
+
+ exec_group.add_argument("-k", "--continue", action="store_false", dest="halt",
+ help="Continue as much as possible after an error. While the target that "
+ "failed and anything depending on it cannot be built, as much as "
+ "possible will be built before stopping.")
+
+ exec_group.add_argument("-P", "--profile", action="store_true",
+ help="Profile the command and save reports.")
+
+ exec_group.add_argument("-S", "--dump-signatures", action="append",
+ default=[], metavar="SIGNATURE_HANDLER",
+ help="Dump out the signature construction information, with no task "
+ "execution. The SIGNATURE_HANDLER parameter is passed to the "
+ "handler. Two common values are none and printdiff but the handler "
+ "may define more/less. none means only dump the signature, printdiff"
+ " means recursively compare the dumped signature with the most recent"
+ " one in a local build or sstate cache (can be used to find out why tasks re-run"
+ " when that is not expected)")
+
+ exec_group.add_argument("--revisions-changed", action="store_true",
+ help="Set the exit code depending on whether upstream floating "
+ "revisions have changed or not.")
+
+ exec_group.add_argument("-b", "--buildfile",
+ help="Execute tasks from a specific .bb recipe directly. WARNING: Does "
+ "not handle any dependencies from other recipes.")
+
+ logging_group.add_argument("-D", "--debug", action="count", default=0,
+ help="Increase the debug level. You can specify this "
+ "more than once. -D sets the debug level to 1, "
+ "where only bb.debug(1, ...) messages are printed "
+ "to stdout; -DD sets the debug level to 2, where "
+ "both bb.debug(1, ...) and bb.debug(2, ...) "
+ "messages are printed; etc. Without -D, no debug "
+ "messages are printed. Note that -D only affects "
+ "output to stdout. All debug messages are written "
+ "to ${T}/log.do_taskname, regardless of the debug "
+ "level.")
+
+ logging_group.add_argument("-l", "--log-domains", action="append", dest="debug_domains",
+ default=[],
+ help="Show debug logging for the specified logging domains.")
+
+ logging_group.add_argument("-v", "--verbose", action="store_true",
+ help="Enable tracing of shell tasks (with 'set -x'). "
+ "Also print bb.note(...) messages to stdout (in "
+ "addition to writing them to ${T}/log.do_<task>).")
+
+ logging_group.add_argument("-q", "--quiet", action="count", default=0,
+ help="Output less log message data to the terminal. You can specify this "
+ "more than once.")
+
+ logging_group.add_argument("-w", "--write-log", dest="writeeventlog",
+ default=os.environ.get("BBEVENTLOG"),
+ help="Writes the event log of the build to a bitbake event json file. "
+ "Use '' (empty string) to assign the name automatically.")
+
+
+ server_group.add_argument("-B", "--bind", default=False,
+ help="The name/address for the bitbake xmlrpc server to bind to.")
+
+ server_group.add_argument("-T", "--idle-timeout", type=float, dest="server_timeout",
+ default=os.getenv("BB_SERVER_TIMEOUT"),
+ help="Set timeout to unload bitbake server due to inactivity, "
+ "set to -1 means no unload, "
+ "default: Environment variable BB_SERVER_TIMEOUT.")
+
+ server_group.add_argument("--remote-server",
+ default=os.environ.get("BBSERVER"),
+ help="Connect to the specified server.")
+
+ server_group.add_argument("-m", "--kill-server", action="store_true",
+ help="Terminate any running bitbake server.")
+
+ server_group.add_argument("--token", dest="xmlrpctoken",
+ default=os.environ.get("BBTOKEN"),
+ help="Specify the connection token to be used when connecting "
+ "to a remote server.")
+
+ server_group.add_argument("--observe-only", action="store_true",
+ help="Connect to a server as an observing-only client.")
+
+ server_group.add_argument("--status-only", action="store_true",
+ help="Check the status of the remote bitbake server.")
+
+ server_group.add_argument("--server-only", action="store_true",
+ help="Run bitbake without a UI, only starting a server "
+ "(cooker) process.")
+
+
+ config_group.add_argument("-r", "--read", action="append", dest="prefile", default=[],
+ help="Read the specified file before bitbake.conf.")
+
+ config_group.add_argument("-R", "--postread", action="append", dest="postfile", default=[],
+ help="Read the specified file after bitbake.conf.")
+
+
+ config_group.add_argument("-I", "--ignore-deps", action="append",
+ dest="extra_assume_provided", default=[],
+ help="Assume these dependencies don't exist and are already provided "
+ "(equivalent to ASSUME_PROVIDED). Useful to make dependency "
+ "graphs more appealing.")
+
return parser
class BitBakeConfigParameters(cookerdata.ConfigParameters):
def parseCommandLine(self, argv=sys.argv):
parser = create_bitbake_parser()
- options, targets = parser.parse_args(argv)
+ options = parser.parse_intermixed_args(argv[1:])
+
+ if options.version:
+ print("BitBake Build Tool Core version %s" % bb.__version__)
+ sys.exit(0)
if options.quiet and options.verbose:
parser.error("options --quiet and --verbose are mutually exclusive")
@@ -326,7 +347,7 @@ class BitBakeConfigParameters(cookerdata.ConfigParameters):
else:
options.xmlrpcinterface = (None, 0)
- return options, targets[1:]
+ return options, options.targets
def bitbake_main(configParams, configuration):
@@ -391,6 +412,9 @@ def bitbake_main(configParams, configuration):
return 1
+def timestamp():
+ return datetime.datetime.now().strftime('%H:%M:%S.%f')
+
def setup_bitbake(configParams, extrafeatures=None):
# Ensure logging messages get sent to the UI as events
handler = bb.event.LogHandler()
@@ -398,6 +422,11 @@ def setup_bitbake(configParams, extrafeatures=None):
# In status only mode there are no logs and no UI
logger.addHandler(handler)
+ if configParams.dump_signatures:
+ if extrafeatures is None:
+ extrafeatures = []
+ extrafeatures.append(bb.cooker.CookerFeatures.RECIPE_SIGGEN_INFO)
+
if configParams.server_only:
featureset = []
ui_module = None
@@ -425,7 +454,7 @@ def setup_bitbake(configParams, extrafeatures=None):
retries = 8
while retries:
try:
- topdir, lock = lockBitbake()
+ topdir, lock, lockfile = lockBitbake()
sockname = topdir + "/bitbake.sock"
if lock:
if configParams.status_only or configParams.kill_server:
@@ -436,18 +465,22 @@ def setup_bitbake(configParams, extrafeatures=None):
logger.info("Starting bitbake server...")
# Clear the event queue since we already displayed messages
bb.event.ui_queue = []
- server = bb.server.process.BitBakeServer(lock, sockname, featureset, configParams.server_timeout, configParams.xmlrpcinterface)
+ server = bb.server.process.BitBakeServer(lock, sockname, featureset, configParams.server_timeout, configParams.xmlrpcinterface, configParams.profile)
else:
logger.info("Reconnecting to bitbake server...")
if not os.path.exists(sockname):
- logger.info("Previous bitbake instance shutting down?, waiting to retry...")
+ logger.info("Previous bitbake instance shutting down?, waiting to retry... (%s)" % timestamp())
+ procs = bb.server.process.get_lockfile_process_msg(lockfile)
+ if procs:
+ logger.info("Processes holding bitbake.lock (missing socket %s):\n%s" % (sockname, procs))
+ logger.info("Directory listing: %s" % (str(os.listdir(topdir))))
i = 0
lock = None
# Wait for 5s or until we can get the lock
while not lock and i < 50:
time.sleep(0.1)
- _, lock = lockBitbake()
+ _, lock, _ = lockBitbake()
i += 1
if lock:
bb.utils.unlockfile(lock)
@@ -466,9 +499,9 @@ def setup_bitbake(configParams, extrafeatures=None):
retries -= 1
tryno = 8 - retries
if isinstance(e, (bb.server.process.ProcessTimeout, BrokenPipeError, EOFError, SystemExit)):
- logger.info("Retrying server connection (#%d)..." % tryno)
+ logger.info("Retrying server connection (#%d)... (%s)" % (tryno, timestamp()))
else:
- logger.info("Retrying server connection (#%d)... (%s)" % (tryno, traceback.format_exc()))
+ logger.info("Retrying server connection (#%d)... (%s, %s)" % (tryno, traceback.format_exc(), timestamp()))
if not retries:
bb.fatal("Unable to connect to bitbake server, or start one (server startup failures would be in bitbake-cookerdaemon.log).")
@@ -497,5 +530,5 @@ def lockBitbake():
bb.error("Unable to find conf/bblayers.conf or conf/bitbake.conf. BBPATH is unset and/or not in a build directory?")
raise BBMainFatal
lockfile = topdir + "/bitbake.lock"
- return topdir, bb.utils.lockfile(lockfile, False, False)
+ return topdir, bb.utils.lockfile(lockfile, False, False), lockfile
diff --git a/lib/bb/monitordisk.py b/lib/bb/monitordisk.py
index 98f2109ed..f92821035 100644
--- a/lib/bb/monitordisk.py
+++ b/lib/bb/monitordisk.py
@@ -76,7 +76,12 @@ def getDiskData(BBDirs):
return None
action = pathSpaceInodeRe.group(1)
- if action not in ("ABORT", "STOPTASKS", "WARN"):
+ if action == "ABORT":
+ # Emit a deprecation warning
+ logger.warnonce("The BB_DISKMON_DIRS \"ABORT\" action has been renamed to \"HALT\", update configuration")
+ action = "HALT"
+
+ if action not in ("HALT", "STOPTASKS", "WARN"):
printErr("Unknown disk space monitor action: %s" % action)
return None
@@ -177,7 +182,7 @@ class diskMonitor:
# use them to avoid printing too many warning messages
self.preFreeS = {}
self.preFreeI = {}
- # This is for STOPTASKS and ABORT, to avoid printing the message
+ # This is for STOPTASKS and HALT, to avoid printing the message
# repeatedly while waiting for the tasks to finish
self.checked = {}
for k in self.devDict:
@@ -219,8 +224,8 @@ class diskMonitor:
self.checked[k] = True
rq.finish_runqueue(False)
bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration)
- elif action == "ABORT" and not self.checked[k]:
- logger.error("Immediately abort since the disk space monitor action is \"ABORT\"!")
+ elif action == "HALT" and not self.checked[k]:
+ logger.error("Immediately halt since the disk space monitor action is \"HALT\"!")
self.checked[k] = True
rq.finish_runqueue(True)
bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration)
@@ -229,9 +234,10 @@ class diskMonitor:
freeInode = st.f_favail
if minInode and freeInode < minInode:
- # Some filesystems use dynamic inodes so can't run out
- # (e.g. btrfs). This is reported by the inode count being 0.
- if st.f_files == 0:
+ # Some filesystems use dynamic inodes so can't run out.
+ # This is reported by the inode count being 0 (btrfs) or the free
+ # inode count being -1 (cephfs).
+ if st.f_files == 0 or st.f_favail == -1:
self.devDict[k][2] = None
continue
# Always show warning, the self.checked would always be False if the action is WARN
@@ -245,8 +251,8 @@ class diskMonitor:
self.checked[k] = True
rq.finish_runqueue(False)
bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration)
- elif action == "ABORT" and not self.checked[k]:
- logger.error("Immediately abort since the disk space monitor action is \"ABORT\"!")
+ elif action == "HALT" and not self.checked[k]:
+ logger.error("Immediately halt since the disk space monitor action is \"HALT\"!")
self.checked[k] = True
rq.finish_runqueue(True)
bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration)
diff --git a/lib/bb/msg.py b/lib/bb/msg.py
index 291b38ff7..3e18596fa 100644
--- a/lib/bb/msg.py
+++ b/lib/bb/msg.py
@@ -30,7 +30,9 @@ class BBLogFormatter(logging.Formatter):
PLAIN = logging.INFO + 1
VERBNOTE = logging.INFO + 2
ERROR = logging.ERROR
+ ERRORONCE = logging.ERROR - 1
WARNING = logging.WARNING
+ WARNONCE = logging.WARNING - 1
CRITICAL = logging.CRITICAL
levelnames = {
@@ -42,7 +44,9 @@ class BBLogFormatter(logging.Formatter):
PLAIN : '',
VERBNOTE: 'NOTE',
WARNING : 'WARNING',
+ WARNONCE : 'WARNING',
ERROR : 'ERROR',
+ ERRORONCE : 'ERROR',
CRITICAL: 'ERROR',
}
@@ -58,7 +62,9 @@ class BBLogFormatter(logging.Formatter):
PLAIN : BASECOLOR,
VERBNOTE: BASECOLOR,
WARNING : YELLOW,
+ WARNONCE : YELLOW,
ERROR : RED,
+ ERRORONCE : RED,
CRITICAL: RED,
}
@@ -121,6 +127,22 @@ class BBLogFilter(object):
return True
return False
+class LogFilterShowOnce(logging.Filter):
+ def __init__(self):
+ self.seen_warnings = set()
+ self.seen_errors = set()
+
+ def filter(self, record):
+ if record.levelno == bb.msg.BBLogFormatter.WARNONCE:
+ if record.msg in self.seen_warnings:
+ return False
+ self.seen_warnings.add(record.msg)
+ if record.levelno == bb.msg.BBLogFormatter.ERRORONCE:
+ if record.msg in self.seen_errors:
+ return False
+ self.seen_errors.add(record.msg)
+ return True
+
class LogFilterGEQLevel(logging.Filter):
def __init__(self, level):
self.strlevel = str(level)
@@ -206,8 +228,9 @@ def logger_create(name, output=sys.stderr, level=logging.INFO, preserve_handlers
"""Standalone logger creation function"""
logger = logging.getLogger(name)
console = logging.StreamHandler(output)
+ console.addFilter(bb.msg.LogFilterShowOnce())
format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
- if color == 'always' or (color == 'auto' and output.isatty()):
+ if color == 'always' or (color == 'auto' and output.isatty() and os.environ.get('NO_COLOR', '') == ''):
format.enable_color()
console.setFormatter(format)
if preserve_handlers:
@@ -293,10 +316,17 @@ def setLoggingConfig(defaultconfig, userconfigfile=None):
# Convert all level parameters to integers in case users want to use the
# bitbake defined level names
- for h in logconfig["handlers"].values():
+ for name, h in logconfig["handlers"].items():
if "level" in h:
h["level"] = bb.msg.stringToLevel(h["level"])
+ # Every handler needs its own instance of the once filter.
+ once_filter_name = name + ".showonceFilter"
+ logconfig.setdefault("filters", {})[once_filter_name] = {
+ "()": "bb.msg.LogFilterShowOnce",
+ }
+ h.setdefault("filters", []).append(once_filter_name)
+
for l in logconfig["loggers"].values():
if "level" in l:
l["level"] = bb.msg.stringToLevel(l["level"])
diff --git a/lib/bb/parse/__init__.py b/lib/bb/parse/__init__.py
index c01807ba8..a4358f137 100644
--- a/lib/bb/parse/__init__.py
+++ b/lib/bb/parse/__init__.py
@@ -60,6 +60,14 @@ def cached_mtime_noerror(f):
return 0
return __mtime_cache[f]
+def check_mtime(f, mtime):
+ try:
+ current_mtime = os.stat(f)[stat.ST_MTIME]
+ __mtime_cache[f] = current_mtime
+ except OSError:
+ current_mtime = 0
+ return current_mtime == mtime
+
def update_mtime(f):
try:
__mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
@@ -99,12 +107,12 @@ def supports(fn, data):
return 1
return 0
-def handle(fn, data, include = 0):
+def handle(fn, data, include=0, baseconfig=False):
"""Call the handler that is appropriate for this file"""
for h in handlers:
if h['supports'](fn, data):
with data.inchistory.include(fn):
- return h['handle'](fn, data, include)
+ return h['handle'](fn, data, include, baseconfig)
raise ParseError("not a BitBake file", fn)
def init(fn, data):
@@ -113,6 +121,8 @@ def init(fn, data):
return h['init'](data)
def init_parser(d):
+ if hasattr(bb.parse, "siggen"):
+ bb.parse.siggen.exit()
bb.parse.siggen = bb.siggen.init(d)
def resolve_file(fn, d):
diff --git a/lib/bb/parse/ast.py b/lib/bb/parse/ast.py
index 50a88f7da..7581d003f 100644
--- a/lib/bb/parse/ast.py
+++ b/lib/bb/parse/ast.py
@@ -9,6 +9,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import sys
import bb
from bb import methodpool
from bb.parse import logger
@@ -130,6 +131,10 @@ class DataNode(AstNode):
else:
val = groupd["value"]
+ if ":append" in key or ":remove" in key or ":prepend" in key:
+ if op in ["append", "prepend", "postdot", "predot", "ques"]:
+ bb.warn(key + " " + groupd[op] + " is not a recommended operator combination, please replace it.")
+
flag = None
if 'flag' in groupd and groupd['flag'] is not None:
flag = groupd['flag']
@@ -145,7 +150,7 @@ class DataNode(AstNode):
data.setVar(key, val, parsing=True, **loginfo)
class MethodNode(AstNode):
- tr_tbl = str.maketrans('/.+-@%&', '_______')
+ tr_tbl = str.maketrans('/.+-@%&~', '________')
def __init__(self, filename, lineno, func_name, body, python, fakeroot):
AstNode.__init__(self, filename, lineno)
@@ -206,10 +211,12 @@ class ExportFuncsNode(AstNode):
def eval(self, data):
+ sentinel = " # Export function set\n"
for func in self.n:
calledfunc = self.classname + "_" + func
- if data.getVar(func, False) and not data.getVarFlag(func, 'export_func', False):
+ basevar = data.getVar(func, False)
+ if basevar and sentinel not in basevar:
continue
if data.getVar(func, False):
@@ -219,19 +226,18 @@ class ExportFuncsNode(AstNode):
for flag in [ "func", "python" ]:
if data.getVarFlag(calledfunc, flag, False):
data.setVarFlag(func, flag, data.getVarFlag(calledfunc, flag, False))
- for flag in [ "dirs" ]:
+ for flag in ["dirs", "cleandirs", "fakeroot"]:
if data.getVarFlag(func, flag, False):
data.setVarFlag(calledfunc, flag, data.getVarFlag(func, flag, False))
data.setVarFlag(func, "filename", "autogenerated")
data.setVarFlag(func, "lineno", 1)
if data.getVarFlag(calledfunc, "python", False):
- data.setVar(func, " bb.build.exec_func('" + calledfunc + "', d)\n", parsing=True)
+ data.setVar(func, sentinel + " bb.build.exec_func('" + calledfunc + "', d)\n", parsing=True)
else:
if "-" in self.classname:
bb.fatal("The classname %s contains a dash character and is calling an sh function %s using EXPORT_FUNCTIONS. Since a dash is illegal in sh function names, this cannot work, please rename the class or don't use EXPORT_FUNCTIONS." % (self.classname, calledfunc))
- data.setVar(func, " " + calledfunc + "\n", parsing=True)
- data.setVarFlag(func, 'export_func', '1')
+ data.setVar(func, sentinel + " " + calledfunc + "\n", parsing=True)
class AddTaskNode(AstNode):
def __init__(self, filename, lineno, func, before, after):
@@ -265,6 +271,41 @@ class BBHandlerNode(AstNode):
data.setVarFlag(h, "handler", 1)
data.setVar('__BBHANDLERS', bbhands)
+class PyLibNode(AstNode):
+ def __init__(self, filename, lineno, libdir, namespace):
+ AstNode.__init__(self, filename, lineno)
+ self.libdir = libdir
+ self.namespace = namespace
+
+ def eval(self, data):
+ global_mods = (data.getVar("BB_GLOBAL_PYMODULES") or "").split()
+ for m in global_mods:
+ if m not in bb.utils._context:
+ bb.utils._context[m] = __import__(m)
+
+ libdir = data.expand(self.libdir)
+ if libdir not in sys.path:
+ sys.path.append(libdir)
+ try:
+ bb.utils._context[self.namespace] = __import__(self.namespace)
+ toimport = getattr(bb.utils._context[self.namespace], "BBIMPORTS", [])
+ for i in toimport:
+ bb.utils._context[self.namespace] = __import__(self.namespace + "." + i)
+ mod = getattr(bb.utils._context[self.namespace], i)
+ fn = getattr(mod, "__file__")
+ funcs = {}
+ for f in dir(mod):
+ if f.startswith("_"):
+ continue
+ fcall = getattr(mod, f)
+ if not callable(fcall):
+ continue
+ funcs[f] = fcall
+ bb.codeparser.add_module_functions(fn, funcs, "%s.%s" % (self.namespace, i))
+
+ except AttributeError as e:
+ bb.error("Error importing OE modules: %s" % str(e))
+
class InheritNode(AstNode):
def __init__(self, filename, lineno, classes):
AstNode.__init__(self, filename, lineno)
@@ -273,6 +314,16 @@ class InheritNode(AstNode):
def eval(self, data):
bb.parse.BBHandler.inherit(self.classes, self.filename, self.lineno, data)
+class InheritDeferredNode(AstNode):
+ def __init__(self, filename, lineno, classes):
+ AstNode.__init__(self, filename, lineno)
+ self.inherit = (classes, filename, lineno)
+
+ def eval(self, data):
+ inherits = data.getVar('__BBDEFINHERITS', False) or []
+ inherits.append(self.inherit)
+ data.setVar('__BBDEFINHERITS', inherits)
+
def handleInclude(statements, filename, lineno, m, force):
statements.append(IncludeNode(filename, lineno, m.group(1), force))
@@ -316,10 +367,17 @@ def handleDelTask(statements, filename, lineno, m):
def handleBBHandlers(statements, filename, lineno, m):
statements.append(BBHandlerNode(filename, lineno, m.group(1)))
+def handlePyLib(statements, filename, lineno, m):
+ statements.append(PyLibNode(filename, lineno, m.group(1), m.group(2)))
+
def handleInherit(statements, filename, lineno, m):
classes = m.group(1)
statements.append(InheritNode(filename, lineno, classes))
+def handleInheritDeferred(statements, filename, lineno, m):
+ classes = m.group(1)
+ statements.append(InheritDeferredNode(filename, lineno, classes))
+
def runAnonFuncs(d):
code = []
for funcname in d.getVar("__BBANONFUNCS", False) or []:
@@ -329,6 +387,10 @@ def runAnonFuncs(d):
def finalize(fn, d, variant = None):
saved_handlers = bb.event.get_handlers().copy()
try:
+ # Found renamed variables. Exit immediately
+ if d.getVar("_FAILPARSINGERRORHANDLED", False) == True:
+ raise bb.BBHandledException()
+
for var in d.getVar('__BBHANDLERS', False) or []:
# try to add the handler
handlerfn = d.getVarFlag(var, "filename", False)
@@ -353,6 +415,9 @@ def finalize(fn, d, variant = None):
d.setVar('BBINCLUDED', bb.parse.get_file_depends(d))
+ if d.getVar('__BBAUTOREV_SEEN') and d.getVar('__BBSRCREV_SEEN') and not d.getVar("__BBAUTOREV_ACTED_UPON"):
+ bb.fatal("AUTOREV/SRCPV set too late for the fetcher to work properly, please set the variables earlier in parsing. Erroring instead of later obtuse build failures.")
+
bb.event.fire(bb.event.RecipeParsed(fn), d)
finally:
bb.event.set_handlers(saved_handlers)
@@ -379,6 +444,14 @@ def multi_finalize(fn, d):
logger.debug("Appending .bbappend file %s to %s", append, fn)
bb.parse.BBHandler.handle(append, d, True)
+ while True:
+ inherits = d.getVar('__BBDEFINHERITS', False) or []
+ if not inherits:
+ break
+ inherit, filename, lineno = inherits.pop(0)
+ d.setVar('__BBDEFINHERITS', inherits)
+ bb.parse.BBHandler.inherit(inherit, filename, lineno, d, deferred=True)
+
onlyfinalise = d.getVar("__ONLYFINALISE", False)
safe_d = d
diff --git a/lib/bb/parse/parse_py/BBHandler.py b/lib/bb/parse/parse_py/BBHandler.py
index f8988b863..c13e4b975 100644
--- a/lib/bb/parse/parse_py/BBHandler.py
+++ b/lib/bb/parse/parse_py/BBHandler.py
@@ -19,11 +19,9 @@ from . import ConfHandler
from .. import resolve_file, ast, logger, ParseError
from .ConfHandler import include, init
-# For compatibility
-bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"])
-
-__func_start_regexp__ = re.compile(r"(((?P<py>python(?=(\s|\()))|(?P<fr>fakeroot(?=\s)))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" )
+__func_start_regexp__ = re.compile(r"(((?P<py>python(?=(\s|\()))|(?P<fr>fakeroot(?=\s)))\s*)*(?P<func>[\w\.\-\+\{\}\$:]+)?\s*\(\s*\)\s*{$" )
__inherit_regexp__ = re.compile(r"inherit\s+(.+)" )
+__inherit_def_regexp__ = re.compile(r"inherit_defer\s+(.+)" )
__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" )
__addtask_regexp__ = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*")
__deltask_regexp__ = re.compile(r"deltask\s+(.+)")
@@ -36,6 +34,7 @@ __infunc__ = []
__inpython__ = False
__body__ = []
__classname__ = ""
+__residue__ = []
cached_statements = {}
@@ -43,31 +42,46 @@ def supports(fn, d):
"""Return True if fn has a supported extension"""
return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"]
-def inherit(files, fn, lineno, d):
+def inherit(files, fn, lineno, d, deferred=False):
__inherit_cache = d.getVar('__inherit_cache', False) or []
+ #if "${" in files and not deferred:
+ # bb.warn("%s:%s has non deferred conditional inherit" % (fn, lineno))
files = d.expand(files).split()
for file in files:
- if not os.path.isabs(file) and not file.endswith(".bbclass"):
- file = os.path.join('classes', '%s.bbclass' % file)
-
- if not os.path.isabs(file):
- bbpath = d.getVar("BBPATH")
- abs_fn, attempts = bb.utils.which(bbpath, file, history=True)
- for af in attempts:
- if af != abs_fn:
- bb.parse.mark_dependency(d, af)
- if abs_fn:
- file = abs_fn
+ classtype = d.getVar("__bbclasstype", False)
+ origfile = file
+ for t in ["classes-" + classtype, "classes"]:
+ file = origfile
+ if not os.path.isabs(file) and not file.endswith(".bbclass"):
+ file = os.path.join(t, '%s.bbclass' % file)
+
+ if not os.path.isabs(file):
+ bbpath = d.getVar("BBPATH")
+ abs_fn, attempts = bb.utils.which(bbpath, file, history=True)
+ for af in attempts:
+ if af != abs_fn:
+ bb.parse.mark_dependency(d, af)
+ if abs_fn:
+ file = abs_fn
+
+ if os.path.exists(file):
+ break
+
+ if not os.path.exists(file):
+ raise ParseError("Could not inherit file %s" % (file), fn, lineno)
if not file in __inherit_cache:
logger.debug("Inheriting %s (from %s:%d)" % (file, fn, lineno))
__inherit_cache.append( file )
d.setVar('__inherit_cache', __inherit_cache)
- include(fn, file, lineno, d, "inherit")
+ try:
+ bb.parse.handle(file, d, True)
+ except (IOError, OSError) as exc:
+ raise ParseError("Could not inherit file %s: %s" % (fn, exc.strerror), fn, lineno)
__inherit_cache = d.getVar('__inherit_cache', False) or []
def get_statements(filename, absolute_filename, base_name):
- global cached_statements
+ global cached_statements, __residue__, __body__
try:
return cached_statements[absolute_filename]
@@ -87,12 +101,17 @@ def get_statements(filename, absolute_filename, base_name):
# add a blank line to close out any python definition
feeder(lineno, "", filename, base_name, statements, eof=True)
+ if __residue__:
+ raise ParseError("Unparsed lines %s: %s" % (filename, str(__residue__)), filename, lineno)
+ if __body__:
+ raise ParseError("Unparsed lines from unclosed function %s: %s" % (filename, str(__body__)), filename, lineno)
+
if filename.endswith(".bbclass") or filename.endswith(".inc"):
cached_statements[absolute_filename] = statements
return statements
-def handle(fn, d, include):
- global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__, __classname__
+def handle(fn, d, include, baseconfig=False):
+ global __infunc__, __body__, __residue__, __classname__
__body__ = []
__infunc__ = []
__classname__ = ""
@@ -144,7 +163,7 @@ def handle(fn, d, include):
return d
def feeder(lineno, s, fn, root, statements, eof=False):
- global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__, __infunc__, __body__, bb, __residue__, __classname__
+ global __inpython__, __infunc__, __body__, __residue__, __classname__
# Check tabs in python functions:
# - def py_funcname(): covered by __inpython__
@@ -181,10 +200,10 @@ def feeder(lineno, s, fn, root, statements, eof=False):
if s and s[0] == '#':
if len(__residue__) != 0 and __residue__[0][0] != "#":
- bb.fatal("There is a comment on line %s of file %s (%s) which is in the middle of a multiline expression.\nBitbake used to ignore these but no longer does so, please fix your metadata as errors are likely as a result of this change." % (lineno, fn, s))
+ bb.fatal("There is a comment on line %s of file %s:\n'''\n%s\n'''\nwhich is in the middle of a multiline expression. This syntax is invalid, please correct it." % (lineno, fn, s))
if len(__residue__) != 0 and __residue__[0][0] == "#" and (not s or s[0] != "#"):
- bb.fatal("There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed." % (lineno, fn, s))
+ bb.fatal("There is a confusing multiline partially commented expression on line %s of file %s:\n%s\nPlease clarify whether this is all a comment or should be parsed." % (lineno - len(__residue__), fn, "\n".join(__residue__)))
if s and s[-1] == '\\':
__residue__.append(s[:-1])
@@ -255,7 +274,12 @@ def feeder(lineno, s, fn, root, statements, eof=False):
ast.handleInherit(statements, fn, lineno, m)
return
- return ConfHandler.feeder(lineno, s, fn, statements)
+ m = __inherit_def_regexp__.match(s)
+ if m:
+ ast.handleInheritDeferred(statements, fn, lineno, m)
+ return
+
+ return ConfHandler.feeder(lineno, s, fn, statements, conffile=False)
# Add us to the handlers list
from .. import handlers
diff --git a/lib/bb/parse/parse_py/ConfHandler.py b/lib/bb/parse/parse_py/ConfHandler.py
index f171c5c93..7826dee7d 100644
--- a/lib/bb/parse/parse_py/ConfHandler.py
+++ b/lib/bb/parse/parse_py/ConfHandler.py
@@ -20,8 +20,8 @@ from bb.parse import ParseError, resolve_file, ast, logger, handle
__config_regexp__ = re.compile( r"""
^
(?P<exp>export\s+)?
- (?P<var>[a-zA-Z0-9\-_+.${}/~]+?)
- (\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?
+ (?P<var>[a-zA-Z0-9\-_+.${}/~:]+?)
+ (\[(?P<flag>[a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@]*)\])?
\s* (
(?P<colon>:=) |
@@ -45,13 +45,11 @@ __include_regexp__ = re.compile( r"include\s+(.+)" )
__require_regexp__ = re.compile( r"require\s+(.+)" )
__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/~]+)$" )
__unset_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)$" )
-__unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)\[([a-zA-Z0-9\-_+.]+)\]$" )
+__unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)\[([a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@]+)\]$" )
+__addpylib_regexp__ = re.compile(r"addpylib\s+(.+)\s+(.+)" )
def init(data):
- topdir = data.getVar('TOPDIR', False)
- if not topdir:
- data.setVar('TOPDIR', os.getcwd())
-
+ return
def supports(fn, d):
return fn[-5:] == ".conf"
@@ -105,12 +103,12 @@ def include_single_file(parentfn, fn, lineno, data, error_out):
# We have an issue where a UI might want to enforce particular settings such as
# an empty DISTRO variable. If configuration files do something like assigning
# a weak default, it turns out to be very difficult to filter out these changes,
-# particularly when the weak default might appear half way though parsing a chain
+# particularly when the weak default might appear half way though parsing a chain
# of configuration files. We therefore let the UIs hook into configuration file
# parsing. This turns out to be a hard problem to solve any other way.
confFilters = []
-def handle(fn, data, include):
+def handle(fn, data, include, baseconfig=False):
init(data)
if include == 0:
@@ -128,21 +126,26 @@ def handle(fn, data, include):
s = f.readline()
if not s:
break
+ origlineno = lineno
+ origline = s
w = s.strip()
# skip empty lines
if not w:
continue
s = s.rstrip()
while s[-1] == '\\':
- s2 = f.readline().rstrip()
+ line = f.readline()
+ origline += line
+ s2 = line.rstrip()
lineno = lineno + 1
if (not s2 or s2 and s2[0] != "#") and s[0] == "#" :
- bb.fatal("There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed." % (lineno, fn, s))
+ bb.fatal("There is a confusing multiline, partially commented expression starting on line %s of file %s:\n%s\nPlease clarify whether this is all a comment or should be parsed." % (origlineno, fn, origline))
+
s = s[:-1] + s2
# skip comments
if s[0] == '#':
continue
- feeder(lineno, s, abs_fn, statements)
+ feeder(lineno, s, abs_fn, statements, baseconfig=baseconfig)
# DONE WITH PARSING... time to evaluate
data.setVar('FILE', abs_fn)
@@ -150,14 +153,14 @@ def handle(fn, data, include):
if oldfile:
data.setVar('FILE', oldfile)
- f.close()
-
for f in confFilters:
f(fn, data)
return data
-def feeder(lineno, s, fn, statements):
+# baseconfig is set for the bblayers/layer.conf cookerdata config parsing
+# The function is also used by BBHandler, conffile would be False
+def feeder(lineno, s, fn, statements, baseconfig=False, conffile=True):
m = __config_regexp__.match(s)
if m:
groupd = m.groupdict()
@@ -189,6 +192,11 @@ def feeder(lineno, s, fn, statements):
ast.handleUnsetFlag(statements, fn, lineno, m)
return
+ m = __addpylib_regexp__.match(s)
+ if baseconfig and conffile and m:
+ ast.handlePyLib(statements, fn, lineno, m)
+ return
+
raise ParseError("unparsed line: '%s'" % s, fn, lineno);
# Add us to the handlers list
diff --git a/lib/bb/persist_data.py b/lib/bb/persist_data.py
index c6a209fb3..bcca791ed 100644
--- a/lib/bb/persist_data.py
+++ b/lib/bb/persist_data.py
@@ -12,14 +12,14 @@ currently, providing a key/value store accessed by 'domain'.
#
import collections
+import collections.abc
import contextlib
import functools
import logging
import os.path
import sqlite3
import sys
-import warnings
-from collections import Mapping
+from collections.abc import Mapping
sqlversion = sqlite3.sqlite_version_info
if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
@@ -29,7 +29,7 @@ if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
logger = logging.getLogger("BitBake.PersistData")
@functools.total_ordering
-class SQLTable(collections.MutableMapping):
+class SQLTable(collections.abc.MutableMapping):
class _Decorators(object):
@staticmethod
def retry(*, reconnect=True):
@@ -63,7 +63,7 @@ class SQLTable(collections.MutableMapping):
"""
Decorator that starts a database transaction and creates a database
cursor for performing queries. If no exception is thrown, the
- database results are commited. If an exception occurs, the database
+ database results are committed. If an exception occurs, the database
is rolled back. In all cases, the cursor is closed after the
function ends.
@@ -208,7 +208,7 @@ class SQLTable(collections.MutableMapping):
def __lt__(self, other):
if not isinstance(other, Mapping):
- raise NotImplemented
+ raise NotImplementedError()
return len(self) < len(other)
@@ -238,55 +238,6 @@ class SQLTable(collections.MutableMapping):
def has_key(self, key):
return key in self
-
-class PersistData(object):
- """Deprecated representation of the bitbake persistent data store"""
- def __init__(self, d):
- warnings.warn("Use of PersistData is deprecated. Please use "
- "persist(domain, d) instead.",
- category=DeprecationWarning,
- stacklevel=2)
-
- self.data = persist(d)
- logger.debug("Using '%s' as the persistent data cache",
- self.data.filename)
-
- def addDomain(self, domain):
- """
- Add a domain (pending deprecation)
- """
- return self.data[domain]
-
- def delDomain(self, domain):
- """
- Removes a domain and all the data it contains
- """
- del self.data[domain]
-
- def getKeyValues(self, domain):
- """
- Return a list of key + value pairs for a domain
- """
- return list(self.data[domain].items())
-
- def getValue(self, domain, key):
- """
- Return the value of a key for a domain
- """
- return self.data[domain][key]
-
- def setValue(self, domain, key, value):
- """
- Sets the value of a key for a domain
- """
- self.data[domain][key] = value
-
- def delValue(self, domain, key):
- """
- Deletes a key/value pair
- """
- del self.data[domain][key]
-
def persist(domain, d):
"""Convenience factory for SQLTable objects based upon metadata"""
import bb.utils
@@ -298,4 +249,23 @@ def persist(domain, d):
bb.utils.mkdirhier(cachedir)
cachefile = os.path.join(cachedir, "bb_persist_data.sqlite3")
- return SQLTable(cachefile, domain)
+
+ try:
+ return SQLTable(cachefile, domain)
+ except sqlite3.OperationalError:
+ # Sqlite fails to open database when its path is too long.
+ # After testing, 504 is the biggest path length that can be opened by
+ # sqlite.
+ # Note: This code is called before sanity.bbclass and its path length
+ # check
+ max_len = 504
+ if len(cachefile) > max_len:
+ logger.critical("The path of the cache file is too long "
+ "({0} chars > {1}) to be opened by sqlite! "
+ "Your cache file is \"{2}\"".format(
+ len(cachefile),
+ max_len,
+ cachefile))
+ sys.exit(1)
+ else:
+ raise
diff --git a/lib/bb/process.py b/lib/bb/process.py
index 7c3995cce..4c7b6d39d 100644
--- a/lib/bb/process.py
+++ b/lib/bb/process.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -60,7 +62,7 @@ class Popen(subprocess.Popen):
"close_fds": True,
"preexec_fn": subprocess_setup,
"stdout": subprocess.PIPE,
- "stderr": subprocess.STDOUT,
+ "stderr": subprocess.PIPE,
"stdin": subprocess.PIPE,
"shell": False,
}
@@ -142,7 +144,7 @@ def _logged_communicate(pipe, log, input, extrafiles):
while pipe.poll() is None:
read_all_pipes(log, rin, outdata, errdata)
- # Pocess closed, drain all pipes...
+ # Process closed, drain all pipes...
read_all_pipes(log, rin, outdata, errdata)
finally:
log.flush()
@@ -181,5 +183,8 @@ def run(cmd, input=None, log=None, extrafiles=None, **options):
stderr = stderr.decode("utf-8")
if pipe.returncode != 0:
+ if log:
+ # Don't duplicate the output in the exception if logging it
+ raise ExecutionError(cmd, pipe.returncode, None, None)
raise ExecutionError(cmd, pipe.returncode, stdout, stderr)
return stdout, stderr
diff --git a/lib/bb/progress.py b/lib/bb/progress.py
index 52d704d64..9518be77f 100644
--- a/lib/bb/progress.py
+++ b/lib/bb/progress.py
@@ -148,7 +148,7 @@ class MultiStageProgressReporter:
for tasks made up of python code spread across multiple
classes / functions - the progress reporter object can
be passed around or stored at the object level and calls
- to next_stage() and update() made whereever needed.
+ to next_stage() and update() made wherever needed.
"""
def __init__(self, d, stage_weights, debug=False):
"""
diff --git a/lib/bb/providers.py b/lib/bb/providers.py
index 3ec11a40e..e11a4637d 100644
--- a/lib/bb/providers.py
+++ b/lib/bb/providers.py
@@ -94,7 +94,7 @@ def versionVariableMatch(cfgData, keyword, pn):
# pn can contain '_', e.g. gcc-cross-x86_64 and an override cannot
# hence we do this manually rather than use OVERRIDES
- ver = cfgData.getVar("%s_VERSION_pn-%s" % (keyword, pn))
+ ver = cfgData.getVar("%s_VERSION:pn-%s" % (keyword, pn))
if not ver:
ver = cfgData.getVar("%s_VERSION_%s" % (keyword, pn))
if not ver:
@@ -133,7 +133,7 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
if required_v is not None:
if preferred_v is not None:
- logger.warn("REQUIRED_VERSION and PREFERRED_VERSION for package %s%s are both set using REQUIRED_VERSION %s", pn, itemstr, required_v)
+ logger.warning("REQUIRED_VERSION and PREFERRED_VERSION for package %s%s are both set using REQUIRED_VERSION %s", pn, itemstr, required_v)
else:
logger.debug("REQUIRED_VERSION is set for package %s%s", pn, itemstr)
# REQUIRED_VERSION always takes precedence over PREFERRED_VERSION
@@ -173,7 +173,7 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
pv_str = '%s:%s' % (preferred_e, pv_str)
if preferred_file is None:
if not required:
- logger.warn("preferred version %s of %s not available%s", pv_str, pn, itemstr)
+ logger.warning("preferred version %s of %s not available%s", pv_str, pn, itemstr)
available_vers = []
for file_set in pkg_pn:
for f in file_set:
@@ -185,7 +185,7 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
available_vers.append(ver_str)
if available_vers:
available_vers.sort()
- logger.warn("versions of %s available: %s", pn, ' '.join(available_vers))
+ logger.warning("versions of %s available: %s", pn, ' '.join(available_vers))
if required:
logger.error("required version %s of %s not available%s", pv_str, pn, itemstr)
else:
@@ -396,8 +396,8 @@ def getRuntimeProviders(dataCache, rdepend):
return rproviders
# Only search dynamic packages if we can't find anything in other variables
- for pattern in dataCache.packages_dynamic:
- pattern = pattern.replace(r'+', r"\+")
+ for pat_key in dataCache.packages_dynamic:
+ pattern = pat_key.replace(r'+', r"\+")
if pattern in regexp_cache:
regexp = regexp_cache[pattern]
else:
@@ -408,7 +408,7 @@ def getRuntimeProviders(dataCache, rdepend):
raise
regexp_cache[pattern] = regexp
if regexp.match(rdepend):
- rproviders += dataCache.packages_dynamic[pattern]
+ rproviders += dataCache.packages_dynamic[pat_key]
logger.debug("Assuming %s is a dynamic package, but it may not exist" % rdepend)
return rproviders
diff --git a/lib/bb/runqueue.py b/lib/bb/runqueue.py
index 6c41fe6d4..bc7e18175 100644
--- a/lib/bb/runqueue.py
+++ b/lib/bb/runqueue.py
@@ -24,6 +24,7 @@ import pickle
from multiprocessing import Process
import shlex
import pprint
+import time
bblogger = logging.getLogger("BitBake")
logger = logging.getLogger("BitBake.RunQueue")
@@ -85,15 +86,19 @@ class RunQueueStats:
"""
Holds statistics on the tasks handled by the associated runQueue
"""
- def __init__(self, total):
+ def __init__(self, total, setscene_total):
self.completed = 0
self.skipped = 0
self.failed = 0
self.active = 0
+ self.setscene_active = 0
+ self.setscene_covered = 0
+ self.setscene_notcovered = 0
+ self.setscene_total = setscene_total
self.total = total
def copy(self):
- obj = self.__class__(self.total)
+ obj = self.__class__(self.total, self.setscene_total)
obj.__dict__.update(self.__dict__)
return obj
@@ -112,6 +117,13 @@ class RunQueueStats:
def taskActive(self):
self.active = self.active + 1
+ def updateCovered(self, covered, notcovered):
+ self.setscene_covered = covered
+ self.setscene_notcovered = notcovered
+
+ def updateActiveSetscene(self, active):
+ self.setscene_active = active
+
# These values indicate the next step due to be run in the
# runQueue state machine
runQueuePrepare = 2
@@ -143,11 +155,82 @@ class RunQueueScheduler(object):
self.stamps = {}
for tid in self.rqdata.runtaskentries:
(mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
- self.stamps[tid] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True)
+ self.stamps[tid] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
if tid in self.rq.runq_buildable:
- self.buildable.append(tid)
+ self.buildable.add(tid)
self.rev_prio_map = None
+ self.is_pressure_usable()
+
+ def is_pressure_usable(self):
+ """
+ If monitoring pressure, return True if pressure files can be open and read. For example
+ openSUSE /proc/pressure/* files have readable file permissions but when read the error EOPNOTSUPP (Operation not supported)
+ is returned.
+ """
+ if self.rq.max_cpu_pressure or self.rq.max_io_pressure or self.rq.max_memory_pressure:
+ try:
+ with open("/proc/pressure/cpu") as cpu_pressure_fds, \
+ open("/proc/pressure/io") as io_pressure_fds, \
+ open("/proc/pressure/memory") as memory_pressure_fds:
+
+ self.prev_cpu_pressure = cpu_pressure_fds.readline().split()[4].split("=")[1]
+ self.prev_io_pressure = io_pressure_fds.readline().split()[4].split("=")[1]
+ self.prev_memory_pressure = memory_pressure_fds.readline().split()[4].split("=")[1]
+ self.prev_pressure_time = time.time()
+ self.check_pressure = True
+ except:
+ bb.note("The /proc/pressure files can't be read. Continuing build without monitoring pressure")
+ self.check_pressure = False
+ else:
+ self.check_pressure = False
+
+ def exceeds_max_pressure(self):
+ """
+ Monitor the difference in total pressure at least once per second, if
+ BB_PRESSURE_MAX_{CPU|IO|MEMORY} are set, return True if above threshold.
+ """
+ if self.check_pressure:
+ with open("/proc/pressure/cpu") as cpu_pressure_fds, \
+ open("/proc/pressure/io") as io_pressure_fds, \
+ open("/proc/pressure/memory") as memory_pressure_fds:
+ # extract "total" from /proc/pressure/{cpu|io}
+ curr_cpu_pressure = cpu_pressure_fds.readline().split()[4].split("=")[1]
+ curr_io_pressure = io_pressure_fds.readline().split()[4].split("=")[1]
+ curr_memory_pressure = memory_pressure_fds.readline().split()[4].split("=")[1]
+ now = time.time()
+ tdiff = now - self.prev_pressure_time
+ psi_accumulation_interval = 1.0
+ cpu_pressure = (float(curr_cpu_pressure) - float(self.prev_cpu_pressure)) / tdiff
+ io_pressure = (float(curr_io_pressure) - float(self.prev_io_pressure)) / tdiff
+ memory_pressure = (float(curr_memory_pressure) - float(self.prev_memory_pressure)) / tdiff
+ exceeds_cpu_pressure = self.rq.max_cpu_pressure and cpu_pressure > self.rq.max_cpu_pressure
+ exceeds_io_pressure = self.rq.max_io_pressure and io_pressure > self.rq.max_io_pressure
+ exceeds_memory_pressure = self.rq.max_memory_pressure and memory_pressure > self.rq.max_memory_pressure
+
+ if tdiff > psi_accumulation_interval:
+ self.prev_cpu_pressure = curr_cpu_pressure
+ self.prev_io_pressure = curr_io_pressure
+ self.prev_memory_pressure = curr_memory_pressure
+ self.prev_pressure_time = now
+
+ pressure_state = (exceeds_cpu_pressure, exceeds_io_pressure, exceeds_memory_pressure)
+ pressure_values = (round(cpu_pressure,1), self.rq.max_cpu_pressure, round(io_pressure,1), self.rq.max_io_pressure, round(memory_pressure,1), self.rq.max_memory_pressure)
+ if hasattr(self, "pressure_state") and pressure_state != self.pressure_state:
+ bb.note("Pressure status changed to CPU: %s, IO: %s, Mem: %s (CPU: %s/%s, IO: %s/%s, Mem: %s/%s) - using %s/%s bitbake threads" % (pressure_state + pressure_values + (len(self.rq.runq_running.difference(self.rq.runq_complete)), self.rq.number_tasks)))
+ self.pressure_state = pressure_state
+ return (exceeds_cpu_pressure or exceeds_io_pressure or exceeds_memory_pressure)
+ elif self.rq.max_loadfactor:
+ limit = False
+ loadfactor = float(os.getloadavg()[0]) / os.cpu_count()
+ # bb.warn("Comparing %s to %s" % (loadfactor, self.rq.max_loadfactor))
+ if loadfactor > self.rq.max_loadfactor:
+ limit = True
+ if hasattr(self, "loadfactor_limit") and limit != self.loadfactor_limit:
+ bb.note("Load average limiting set to %s as load average: %s - using %s/%s bitbake threads" % (limit, loadfactor, len(self.rq.runq_running.difference(self.rq.runq_complete)), self.rq.number_tasks))
+ self.loadfactor_limit = limit
+ return limit
+ return False
def next_buildable_task(self):
"""
@@ -161,6 +244,12 @@ class RunQueueScheduler(object):
if not buildable:
return None
+ # Bitbake requires that at least one task be active. Only check for pressure if
+ # this is the case, otherwise the pressure limitation could result in no tasks
+ # being active and no new tasks started thereby, at times, breaking the scheduler.
+ if self.rq.stats.active and self.exceeds_max_pressure():
+ return None
+
# Filter out tasks that have a max number of threads that have been exceeded
skip_buildable = {}
for running in self.rq.runq_running.difference(self.rq.runq_complete):
@@ -191,11 +280,11 @@ class RunQueueScheduler(object):
best = None
bestprio = None
for tid in buildable:
- taskname = taskname_from_tid(tid)
- if taskname in skip_buildable and skip_buildable[taskname] >= int(self.skip_maxthread[taskname]):
- continue
prio = self.rev_prio_map[tid]
if bestprio is None or bestprio > prio:
+ taskname = taskname_from_tid(tid)
+ if taskname in skip_buildable and skip_buildable[taskname] >= int(self.skip_maxthread[taskname]):
+ continue
stamp = self.stamps[tid]
if stamp in self.rq.build_stamps.values():
continue
@@ -374,10 +463,9 @@ class RunQueueData:
self.rq = rq
self.warn_multi_bb = False
- self.stampwhitelist = cfgData.getVar("BB_STAMP_WHITELIST") or ""
- self.multi_provider_whitelist = (cfgData.getVar("MULTI_PROVIDER_WHITELIST") or "").split()
- self.setscenewhitelist = get_setscene_enforce_whitelist(cfgData, targets)
- self.setscenewhitelist_checked = False
+ self.multi_provider_allowed = (cfgData.getVar("BB_MULTI_PROVIDER_ALLOWED") or "").split()
+ self.setscene_ignore_tasks = get_setscene_enforce_ignore_tasks(cfgData, targets)
+ self.setscene_ignore_tasks_checked = False
self.setscene_enforce = (cfgData.getVar('BB_SETSCENE_ENFORCE') == "1")
self.init_progress_reporter = bb.progress.DummyMultiStageProcessProgressReporter()
@@ -475,7 +563,7 @@ class RunQueueData:
msgs.append(" Task %s (dependent Tasks %s)\n" % (dep, self.runq_depends_names(self.runtaskentries[dep].depends)))
msgs.append("\n")
if len(valid_chains) > 10:
- msgs.append("Aborted dependency loops search after 10 matches.\n")
+ msgs.append("Halted dependency loops search after 10 matches.\n")
raise TooManyLoops
continue
scan = False
@@ -536,7 +624,7 @@ class RunQueueData:
next_points.append(revdep)
task_done[revdep] = True
endpoints = next_points
- if len(next_points) == 0:
+ if not next_points:
break
# Circular dependency sanity check
@@ -578,15 +666,18 @@ class RunQueueData:
found = False
for mc in self.taskData:
- if len(taskData[mc].taskentries) > 0:
+ if taskData[mc].taskentries:
found = True
break
if not found:
# Nothing to do
return 0
+ bb.parse.siggen.setup_datacache(self.dataCaches)
+
self.init_progress_reporter.start()
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Step A - Work out a list of tasks to run
#
@@ -632,6 +723,8 @@ class RunQueueData:
frommc = mcdependency[1]
mcdep = mcdependency[2]
deptask = mcdependency[4]
+ if mcdep not in taskData:
+ bb.fatal("Multiconfig '%s' is referenced in multiconfig dependency '%s' but not enabled in BBMULTICONFIG?" % (mcdep, dep))
if mc == frommc:
fn = taskData[mcdep].build_targets[pn][0]
newdep = '%s:%s' % (fn,deptask)
@@ -733,6 +826,7 @@ class RunQueueData:
#self.dump_data()
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Resolve recursive 'recrdeptask' dependencies (Part B)
#
@@ -762,7 +856,7 @@ class RunQueueData:
# Find the dependency chain endpoints
endpoints = set()
for tid in self.runtaskentries:
- if len(deps[tid]) == 0:
+ if not deps[tid]:
endpoints.add(tid)
# Iterate the chains collating dependencies
while endpoints:
@@ -773,11 +867,11 @@ class RunQueueData:
cumulativedeps[dep].update(cumulativedeps[tid])
if tid in deps[dep]:
deps[dep].remove(tid)
- if len(deps[dep]) == 0:
+ if not deps[dep]:
next.add(dep)
endpoints = next
#for tid in deps:
- # if len(deps[tid]) != 0:
+ # if deps[tid]:
# bb.warn("Sanity test failure, dependencies left for %s (%s)" % (tid, deps[tid]))
# Loop here since recrdeptasks can depend upon other recrdeptasks and we have to
@@ -829,6 +923,7 @@ class RunQueueData:
self.runtaskentries[tid].depends.difference_update(recursivetasksselfref)
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
#self.dump_data()
@@ -867,7 +962,7 @@ class RunQueueData:
bb.debug(1, "Task %s is marked nostamp, cannot invalidate this task" % taskname)
else:
logger.verbose("Invalidate task %s, %s", taskname, fn)
- bb.parse.siggen.invalidate_task(taskname, self.dataCaches[mc], taskfn)
+ bb.parse.siggen.invalidate_task(taskname, taskfn)
self.target_tids = []
for (mc, target, task, fn) in self.targets:
@@ -910,47 +1005,54 @@ class RunQueueData:
mark_active(tid, 1)
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Step C - Prune all inactive tasks
#
# Once all active tasks are marked, prune the ones we don't need.
- delcount = {}
- for tid in list(self.runtaskentries.keys()):
- if tid not in runq_build:
- delcount[tid] = self.runtaskentries[tid]
- del self.runtaskentries[tid]
-
# Handle --runall
if self.cooker.configuration.runall:
# re-run the mark_active and then drop unused tasks from new list
- runq_build = {}
- for task in self.cooker.configuration.runall:
- if not task.startswith("do_"):
- task = "do_{0}".format(task)
- runall_tids = set()
- for tid in list(self.runtaskentries):
- wanttid = "{0}:{1}".format(fn_from_tid(tid), task)
- if wanttid in delcount:
- self.runtaskentries[wanttid] = delcount[wanttid]
- if wanttid in self.runtaskentries:
- runall_tids.add(wanttid)
-
- for tid in list(runall_tids):
- mark_active(tid,1)
- if self.cooker.configuration.force:
- invalidate_task(tid, False)
+ runall_tids = set()
+ added = True
+ while added:
+ reduced_tasklist = set(self.runtaskentries.keys())
+ for tid in list(self.runtaskentries.keys()):
+ if tid not in runq_build:
+ reduced_tasklist.remove(tid)
+ runq_build = {}
- for tid in list(self.runtaskentries.keys()):
- if tid not in runq_build:
- delcount[tid] = self.runtaskentries[tid]
- del self.runtaskentries[tid]
+ orig = runall_tids
+ runall_tids = set()
+ for task in self.cooker.configuration.runall:
+ if not task.startswith("do_"):
+ task = "do_{0}".format(task)
+ for tid in reduced_tasklist:
+ wanttid = "{0}:{1}".format(fn_from_tid(tid), task)
+ if wanttid in self.runtaskentries:
+ runall_tids.add(wanttid)
+
+ for tid in list(runall_tids):
+ mark_active(tid, 1)
+ self.target_tids.append(tid)
+ if self.cooker.configuration.force:
+ invalidate_task(tid, False)
+ added = runall_tids - orig
+
+ delcount = set()
+ for tid in list(self.runtaskentries.keys()):
+ if tid not in runq_build:
+ delcount.add(tid)
+ del self.runtaskentries[tid]
- if len(self.runtaskentries) == 0:
+ if self.cooker.configuration.runall:
+ if not self.runtaskentries:
bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the recipes of the taskgraphs of the targets %s" % (str(self.cooker.configuration.runall), str(self.targets)))
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Handle runonly
if self.cooker.configuration.runonly:
@@ -960,19 +1062,19 @@ class RunQueueData:
for task in self.cooker.configuration.runonly:
if not task.startswith("do_"):
task = "do_{0}".format(task)
- runonly_tids = { k: v for k, v in self.runtaskentries.items() if taskname_from_tid(k) == task }
+ runonly_tids = [k for k in self.runtaskentries.keys() if taskname_from_tid(k) == task]
- for tid in list(runonly_tids):
- mark_active(tid,1)
+ for tid in runonly_tids:
+ mark_active(tid, 1)
if self.cooker.configuration.force:
invalidate_task(tid, False)
for tid in list(self.runtaskentries.keys()):
if tid not in runq_build:
- delcount[tid] = self.runtaskentries[tid]
+ delcount.add(tid)
del self.runtaskentries[tid]
- if len(self.runtaskentries) == 0:
+ if not self.runtaskentries:
bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the taskgraphs of the targets %s" % (str(self.cooker.configuration.runonly), str(self.targets)))
#
@@ -980,8 +1082,8 @@ class RunQueueData:
#
# Check to make sure we still have tasks to run
- if len(self.runtaskentries) == 0:
- if not taskData[''].abort:
+ if not self.runtaskentries:
+ if not taskData[''].halt:
bb.msg.fatal("RunQueue", "All buildable tasks have been run but the build is incomplete (--continue mode). Errors for the tasks that failed will have been printed above.")
else:
bb.msg.fatal("RunQueue", "No active tasks and not in --continue mode?! Please report this bug.")
@@ -991,6 +1093,7 @@ class RunQueueData:
logger.verbose("Assign Weightings")
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Generate a list of reverse dependencies to ease future calculations
for tid in self.runtaskentries:
@@ -998,13 +1101,14 @@ class RunQueueData:
self.runtaskentries[dep].revdeps.add(tid)
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Identify tasks at the end of dependency chains
# Error on circular dependency loops (length two)
endpoints = []
for tid in self.runtaskentries:
revdeps = self.runtaskentries[tid].revdeps
- if len(revdeps) == 0:
+ if not revdeps:
endpoints.append(tid)
for dep in revdeps:
if dep in self.runtaskentries[tid].depends:
@@ -1014,12 +1118,14 @@ class RunQueueData:
logger.verbose("Compute totals (have %s endpoint(s))", len(endpoints))
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Calculate task weights
# Check of higher length circular dependencies
self.runq_weight = self.calculate_task_weights(endpoints)
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Sanity Check - Check for multiple tasks building the same provider
for mc in self.dataCaches:
@@ -1040,7 +1146,7 @@ class RunQueueData:
for prov in prov_list:
if len(prov_list[prov]) < 2:
continue
- if prov in self.multi_provider_whitelist:
+ if prov in self.multi_provider_allowed:
continue
seen_pn = []
# If two versions of the same PN are being built its fatal, we don't support it.
@@ -1050,12 +1156,12 @@ class RunQueueData:
seen_pn.append(pn)
else:
bb.fatal("Multiple versions of %s are due to be built (%s). Only one version of a given PN should be built in any given build. You likely need to set PREFERRED_VERSION_%s to select the correct version or don't depend on multiple versions." % (pn, " ".join(prov_list[prov]), pn))
- msg = "Multiple .bb files are due to be built which each provide %s:\n %s" % (prov, "\n ".join(prov_list[prov]))
+ msgs = ["Multiple .bb files are due to be built which each provide %s:\n %s" % (prov, "\n ".join(prov_list[prov]))]
#
# Construct a list of things which uniquely depend on each provider
# since this may help the user figure out which dependency is triggering this warning
#
- msg += "\nA list of tasks depending on these providers is shown and may help explain where the dependency comes from."
+ msgs.append("\nA list of tasks depending on these providers is shown and may help explain where the dependency comes from.")
deplist = {}
commondeps = None
for provfn in prov_list[prov]:
@@ -1075,12 +1181,12 @@ class RunQueueData:
commondeps &= deps
deplist[provfn] = deps
for provfn in deplist:
- msg += "\n%s has unique dependees:\n %s" % (provfn, "\n ".join(deplist[provfn] - commondeps))
+ msgs.append("\n%s has unique dependees:\n %s" % (provfn, "\n ".join(deplist[provfn] - commondeps)))
#
# Construct a list of provides and runtime providers for each recipe
# (rprovides has to cover RPROVIDES, PACKAGES, PACKAGES_DYNAMIC)
#
- msg += "\nIt could be that one recipe provides something the other doesn't and should. The following provider and runtime provider differences may be helpful."
+ msgs.append("\nIt could be that one recipe provides something the other doesn't and should. The following provider and runtime provider differences may be helpful.")
provide_results = {}
rprovide_results = {}
commonprovs = None
@@ -1107,30 +1213,20 @@ class RunQueueData:
else:
commonrprovs &= rprovides
rprovide_results[provfn] = rprovides
- #msg += "\nCommon provides:\n %s" % ("\n ".join(commonprovs))
- #msg += "\nCommon rprovides:\n %s" % ("\n ".join(commonrprovs))
+ #msgs.append("\nCommon provides:\n %s" % ("\n ".join(commonprovs)))
+ #msgs.append("\nCommon rprovides:\n %s" % ("\n ".join(commonrprovs)))
for provfn in prov_list[prov]:
- msg += "\n%s has unique provides:\n %s" % (provfn, "\n ".join(provide_results[provfn] - commonprovs))
- msg += "\n%s has unique rprovides:\n %s" % (provfn, "\n ".join(rprovide_results[provfn] - commonrprovs))
+ msgs.append("\n%s has unique provides:\n %s" % (provfn, "\n ".join(provide_results[provfn] - commonprovs)))
+ msgs.append("\n%s has unique rprovides:\n %s" % (provfn, "\n ".join(rprovide_results[provfn] - commonrprovs)))
if self.warn_multi_bb:
- logger.verbnote(msg)
+ logger.verbnote("".join(msgs))
else:
- logger.error(msg)
+ logger.error("".join(msgs))
self.init_progress_reporter.next_stage()
-
- # Create a whitelist usable by the stamp checks
- self.stampfnwhitelist = {}
- for mc in self.taskData:
- self.stampfnwhitelist[mc] = []
- for entry in self.stampwhitelist.split():
- if entry not in self.taskData[mc].build_targets:
- continue
- fn = self.taskData.build_targets[entry][0]
- self.stampfnwhitelist[mc].append(fn)
-
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Iterate over the task list looking for tasks with a 'setscene' function
self.runq_setscene_tids = set()
@@ -1143,6 +1239,7 @@ class RunQueueData:
self.runq_setscene_tids.add(tid)
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Invalidate task if force mode active
if self.cooker.configuration.force:
@@ -1159,6 +1256,7 @@ class RunQueueData:
invalidate_task(fn + ":" + st, True)
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Create and print to the logs a virtual/xxxx -> PN (fn) table
for mc in taskData:
@@ -1171,18 +1269,20 @@ class RunQueueData:
bb.parse.siggen.tasks_resolved(virtmap, virtpnmap, self.dataCaches[mc])
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids)
# Iterate over the task list and call into the siggen code
dealtwith = set()
todeal = set(self.runtaskentries)
- while len(todeal) > 0:
+ while todeal:
for tid in todeal.copy():
- if len(self.runtaskentries[tid].depends - dealtwith) == 0:
+ if not (self.runtaskentries[tid].depends - dealtwith):
dealtwith.add(tid)
todeal.remove(tid)
self.prepare_task_hash(tid)
+ bb.event.check_for_interrupts(self.cooker.data)
bb.parse.siggen.writeout_file_checksum_cache()
@@ -1190,9 +1290,8 @@ class RunQueueData:
return len(self.runtaskentries)
def prepare_task_hash(self, tid):
- dc = bb.parse.siggen.get_data_caches(self.dataCaches, mc_from_tid(tid))
- bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, dc)
- self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, dc)
+ bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches)
+ self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches)
self.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(tid)
def dump_data(self):
@@ -1218,7 +1317,6 @@ class RunQueue:
self.cfgData = cfgData
self.rqdata = RunQueueData(self, cooker, cfgData, dataCaches, taskData, targets)
- self.stamppolicy = cfgData.getVar("BB_STAMP_POLICY") or "perfile"
self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION") or None
self.depvalidate = cfgData.getVar("BB_SETSCENE_DEPVALID") or None
@@ -1237,32 +1335,40 @@ class RunQueue:
self.worker = {}
self.fakeworker = {}
+ @staticmethod
+ def send_pickled_data(worker, data, name):
+ msg = bytearray()
+ msg.extend(b"<" + name.encode() + b">")
+ pickled_data = pickle.dumps(data)
+ msg.extend(len(pickled_data).to_bytes(4, 'big'))
+ msg.extend(pickled_data)
+ msg.extend(b"</" + name.encode() + b">")
+ worker.stdin.write(msg)
+
def _start_worker(self, mc, fakeroot = False, rqexec = None):
logger.debug("Starting bitbake-worker")
magic = "decafbad"
if self.cooker.configuration.profile:
magic = "decafbadbad"
fakerootlogs = None
+
+ workerscript = os.path.realpath(os.path.dirname(__file__) + "/../../bin/bitbake-worker")
if fakeroot:
magic = magic + "beef"
mcdata = self.cooker.databuilder.mcdata[mc]
fakerootcmd = shlex.split(mcdata.getVar("FAKEROOTCMD"))
fakerootenv = (mcdata.getVar("FAKEROOTBASEENV") or "").split()
env = os.environ.copy()
- for key, value in (var.split('=') for var in fakerootenv):
+ for key, value in (var.split('=',1) for var in fakerootenv):
env[key] = value
- worker = subprocess.Popen(fakerootcmd + ["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
+ worker = subprocess.Popen(fakerootcmd + [sys.executable, workerscript, magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
fakerootlogs = self.rqdata.dataCaches[mc].fakerootlogs
else:
- worker = subprocess.Popen(["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
+ worker = subprocess.Popen([sys.executable, workerscript, magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
bb.utils.nonblockingfd(worker.stdout)
workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec, fakerootlogs=fakerootlogs)
workerdata = {
- "taskdeps" : self.rqdata.dataCaches[mc].task_deps,
- "fakerootenv" : self.rqdata.dataCaches[mc].fakerootenv,
- "fakerootdirs" : self.rqdata.dataCaches[mc].fakerootdirs,
- "fakerootnoenv" : self.rqdata.dataCaches[mc].fakerootnoenv,
"sigdata" : bb.parse.siggen.get_taskdata(),
"logdefaultlevel" : bb.msg.loggerDefaultLogLevel,
"build_verbose_shell" : self.cooker.configuration.build_verbose_shell,
@@ -1276,9 +1382,9 @@ class RunQueue:
"umask" : self.cfgData.getVar("BB_DEFAULT_UMASK"),
}
- worker.stdin.write(b"<cookerconfig>" + pickle.dumps(self.cooker.configuration) + b"</cookerconfig>")
- worker.stdin.write(b"<extraconfigdata>" + pickle.dumps(self.cooker.extraconfigdata) + b"</extraconfigdata>")
- worker.stdin.write(b"<workerdata>" + pickle.dumps(workerdata) + b"</workerdata>")
+ RunQueue.send_pickled_data(worker, self.cooker.configuration, "cookerconfig")
+ RunQueue.send_pickled_data(worker, self.cooker.extraconfigdata, "extraconfigdata")
+ RunQueue.send_pickled_data(worker, workerdata, "workerdata")
worker.stdin.flush()
return RunQueueWorker(worker, workerpipe)
@@ -1288,7 +1394,7 @@ class RunQueue:
return
logger.debug("Teardown for bitbake-worker")
try:
- worker.process.stdin.write(b"<quit></quit>")
+ RunQueue.send_pickled_data(worker.process, b"", "quit")
worker.process.stdin.flush()
worker.process.stdin.close()
except IOError:
@@ -1300,12 +1406,12 @@ class RunQueue:
continue
worker.pipe.close()
- def start_worker(self):
+ def start_worker(self, rqexec):
if self.worker:
self.teardown_workers()
self.teardown = False
for mc in self.rqdata.dataCaches:
- self.worker[mc] = self._start_worker(mc)
+ self.worker[mc] = self._start_worker(mc, False, rqexec)
def start_fakeworker(self, rqexec, mc):
if not mc in self.fakeworker:
@@ -1347,15 +1453,7 @@ class RunQueue:
if taskname is None:
taskname = tn
- if self.stamppolicy == "perfile":
- fulldeptree = False
- else:
- fulldeptree = True
- stampwhitelist = []
- if self.stamppolicy == "whitelist":
- stampwhitelist = self.rqdata.stampfnwhitelist[mc]
-
- stampfile = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn)
+ stampfile = bb.parse.siggen.stampfile_mcfn(taskname, taskfn)
# If the stamp is missing, it's not current
if not os.access(stampfile, os.F_OK):
@@ -1367,7 +1465,7 @@ class RunQueue:
logger.debug2("%s.%s is nostamp\n", fn, taskname)
return False
- if taskname != "do_setscene" and taskname.endswith("_setscene"):
+ if taskname.endswith("_setscene"):
return True
if cache is None:
@@ -1378,15 +1476,15 @@ class RunQueue:
for dep in self.rqdata.runtaskentries[tid].depends:
if iscurrent:
(mc2, fn2, taskname2, taskfn2) = split_tid_mcfn(dep)
- stampfile2 = bb.build.stampfile(taskname2, self.rqdata.dataCaches[mc2], taskfn2)
- stampfile3 = bb.build.stampfile(taskname2 + "_setscene", self.rqdata.dataCaches[mc2], taskfn2)
+ stampfile2 = bb.parse.siggen.stampfile_mcfn(taskname2, taskfn2)
+ stampfile3 = bb.parse.siggen.stampfile_mcfn(taskname2 + "_setscene", taskfn2)
t2 = get_timestamp(stampfile2)
t3 = get_timestamp(stampfile3)
if t3 and not t2:
continue
if t3 and t3 > t2:
continue
- if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist):
+ if fn == fn2:
if not t2:
logger.debug2('Stampfile %s does not exist', stampfile2)
iscurrent = False
@@ -1436,10 +1534,11 @@ class RunQueue:
"""
Run the tasks in a queue prepared by rqdata.prepare()
Upon failure, optionally try to recover the build using any alternate providers
- (if the abort on failure configuration option isn't set)
+ (if the halt on failure configuration option isn't set)
"""
retval = True
+ bb.event.check_for_interrupts(self.cooker.data)
if self.state is runQueuePrepare:
# NOTE: if you add, remove or significantly refactor the stages of this
@@ -1468,10 +1567,13 @@ class RunQueue:
if not self.dm_event_handler_registered:
res = bb.event.register(self.dm_event_handler_name,
- lambda x: self.dm.check(self) if self.state in [runQueueRunning, runQueueCleanUp] else False,
+ lambda x, y: self.dm.check(self) if self.state in [runQueueRunning, runQueueCleanUp] else False,
('bb.event.HeartbeatEvent',), data=self.cfgData)
self.dm_event_handler_registered = True
+ self.rqdata.init_progress_reporter.next_stage()
+ self.rqexe = RunQueueExecute(self)
+
dump = self.cooker.configuration.dump_signatures
if dump:
self.rqdata.init_progress_reporter.finish()
@@ -1483,16 +1585,14 @@ class RunQueue:
self.state = runQueueComplete
if self.state is runQueueSceneInit:
- self.rqdata.init_progress_reporter.next_stage()
- self.start_worker()
- self.rqdata.init_progress_reporter.next_stage()
- self.rqexe = RunQueueExecute(self)
+ self.start_worker(self.rqexe)
+ self.rqdata.init_progress_reporter.finish()
# If we don't have any setscene functions, skip execution
- if len(self.rqdata.runq_setscene_tids) == 0:
+ if not self.rqdata.runq_setscene_tids:
logger.info('No setscene tasks')
for tid in self.rqdata.runtaskentries:
- if len(self.rqdata.runtaskentries[tid].depends) == 0:
+ if not self.rqdata.runtaskentries[tid].depends:
self.rqexe.setbuildable(tid)
self.rqexe.tasks_notcovered.add(tid)
self.rqexe.sqdone = True
@@ -1565,29 +1665,28 @@ class RunQueue:
else:
self.rqexe.finish()
- def rq_dump_sigfn(self, fn, options):
- bb_cache = bb.cache.NoCache(self.cooker.databuilder)
- mc = bb.runqueue.mc_from_tid(fn)
- the_data = bb_cache.loadDataFull(fn, self.cooker.collections[mc].get_file_appends(fn))
- siggen = bb.parse.siggen
- dataCaches = self.rqdata.dataCaches
- siggen.dump_sigfn(fn, dataCaches, options)
+ def _rq_dump_sigtid(self, tids):
+ for tid in tids:
+ (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
+ dataCaches = self.rqdata.dataCaches
+ bb.parse.siggen.dump_sigtask(taskfn, taskname, dataCaches[mc].stamp[taskfn], True)
def dump_signatures(self, options):
- fns = set()
- bb.note("Reparsing files to collect dependency data")
+ if bb.cooker.CookerFeatures.RECIPE_SIGGEN_INFO not in self.cooker.featureset:
+ bb.fatal("The dump signatures functionality needs the RECIPE_SIGGEN_INFO feature enabled")
- for tid in self.rqdata.runtaskentries:
- fn = fn_from_tid(tid)
- fns.add(fn)
+ bb.note("Writing task signature files")
max_process = int(self.cfgData.getVar("BB_NUMBER_PARSE_THREADS") or os.cpu_count() or 1)
+ def chunkify(l, n):
+ return [l[i::n] for i in range(n)]
+ tids = chunkify(list(self.rqdata.runtaskentries), max_process)
# We cannot use the real multiprocessing.Pool easily due to some local data
# that can't be pickled. This is a cheap multi-process solution.
launched = []
- while fns:
+ while tids:
if len(launched) < max_process:
- p = Process(target=self.rq_dump_sigfn, args=(fns.pop(), options))
+ p = Process(target=self._rq_dump_sigtid, args=(tids.pop(), ))
p.start()
launched.append(p)
for q in launched:
@@ -1602,6 +1701,17 @@ class RunQueue:
return
def print_diffscenetasks(self):
+ def get_root_invalid_tasks(task, taskdepends, valid, noexec, visited_invalid):
+ invalidtasks = []
+ for t in taskdepends[task].depends:
+ if t not in valid and t not in visited_invalid:
+ invalidtasks.extend(get_root_invalid_tasks(t, taskdepends, valid, noexec, visited_invalid))
+ visited_invalid.add(t)
+
+ direct_invalid = [t for t in taskdepends[task].depends if t not in valid]
+ if not direct_invalid and task not in noexec:
+ invalidtasks = [task]
+ return invalidtasks
noexec = []
tocheck = set()
@@ -1635,46 +1745,49 @@ class RunQueue:
valid_new.add(dep)
invalidtasks = set()
- for tid in self.rqdata.runtaskentries:
- if tid not in valid_new and tid not in noexec:
- invalidtasks.add(tid)
- found = set()
- processed = set()
- for tid in invalidtasks:
+ toptasks = set(["{}:{}".format(t[3], t[2]) for t in self.rqdata.targets])
+ for tid in toptasks:
toprocess = set([tid])
while toprocess:
next = set()
+ visited_invalid = set()
for t in toprocess:
- for dep in self.rqdata.runtaskentries[t].depends:
- if dep in invalidtasks:
- found.add(tid)
- if dep not in processed:
- processed.add(dep)
+ if t not in valid_new and t not in noexec:
+ invalidtasks.update(get_root_invalid_tasks(t, self.rqdata.runtaskentries, valid_new, noexec, visited_invalid))
+ continue
+ if t in self.rqdata.runq_setscene_tids:
+ for dep in self.rqexe.sqdata.sq_deps[t]:
next.add(dep)
+ continue
+
+ for dep in self.rqdata.runtaskentries[t].depends:
+ next.add(dep)
+
toprocess = next
- if tid in found:
- toprocess = set()
tasklist = []
- for tid in invalidtasks.difference(found):
+ for tid in invalidtasks:
tasklist.append(tid)
if tasklist:
bb.plain("The differences between the current build and any cached tasks start at the following tasks:\n" + "\n".join(tasklist))
- return invalidtasks.difference(found)
+ return invalidtasks
def write_diffscenetasks(self, invalidtasks):
+ bb.siggen.check_siggen_version(bb.siggen)
# Define recursion callback
def recursecb(key, hash1, hash2):
hashes = [hash1, hash2]
+ bb.debug(1, "Recursively looking for recipe {} hashes {}".format(key, hashes))
hashfiles = bb.siggen.find_siginfo(key, None, hashes, self.cfgData)
+ bb.debug(1, "Found hashfiles:\n{}".format(hashfiles))
recout = []
if len(hashfiles) == 2:
- out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb)
+ out2 = bb.siggen.compare_sigfiles(hashfiles[hash1]['path'], hashfiles[hash2]['path'], recursecb)
recout.extend(list(' ' + l for l in out2))
else:
recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2))
@@ -1685,20 +1798,25 @@ class RunQueue:
for tid in invalidtasks:
(mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
- h = self.rqdata.runtaskentries[tid].hash
- matches = bb.siggen.find_siginfo(pn, taskname, [], self.cfgData)
+ h = self.rqdata.runtaskentries[tid].unihash
+ bb.debug(1, "Looking for recipe {} task {}".format(pn, taskname))
+ matches = bb.siggen.find_siginfo(pn, taskname, [], self.cooker.databuilder.mcdata[mc])
+ bb.debug(1, "Found hashfiles:\n{}".format(matches))
match = None
- for m in matches:
- if h in m:
- match = m
+ for m in matches.values():
+ if h in m['path']:
+ match = m['path']
if match is None:
- bb.fatal("Can't find a task we're supposed to have written out? (hash: %s)?" % h)
+ bb.fatal("Can't find a task we're supposed to have written out? (hash: %s tid: %s)?" % (h, tid))
matches = {k : v for k, v in iter(matches.items()) if h not in k}
+ matches_local = {k : v for k, v in iter(matches.items()) if h not in k and not v['sstate']}
+ if matches_local:
+ matches = matches_local
if matches:
- latestmatch = sorted(matches.keys(), key=lambda f: matches[f])[-1]
+ latestmatch = matches[sorted(matches.keys(), key=lambda h: matches[h]['time'])[-1]]['path']
prevh = __find_sha256__.search(latestmatch).group(0)
output = bb.siggen.compare_sigfiles(latestmatch, match, recursecb)
- bb.plain("\nTask %s:%s couldn't be used from the cache because:\n We need hash %s, closest matching task was %s\n " % (pn, taskname, h, prevh) + '\n '.join(output))
+ bb.plain("\nTask %s:%s couldn't be used from the cache because:\n We need hash %s, most recent matching task was %s\n " % (pn, taskname, h, prevh) + '\n '.join(output))
class RunQueueExecute:
@@ -1711,6 +1829,10 @@ class RunQueueExecute:
self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS") or 1)
self.scheduler = self.cfgData.getVar("BB_SCHEDULER") or "speed"
+ self.max_cpu_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_CPU")
+ self.max_io_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_IO")
+ self.max_memory_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_MEMORY")
+ self.max_loadfactor = self.cfgData.getVar("BB_LOADFACTOR_MAX")
self.sq_buildable = set()
self.sq_running = set()
@@ -1728,6 +1850,8 @@ class RunQueueExecute:
self.build_stamps2 = []
self.failed_tids = []
self.sq_deferred = {}
+ self.sq_needed_harddeps = set()
+ self.sq_harddep_deferred = set()
self.stampcache = {}
@@ -1735,17 +1859,39 @@ class RunQueueExecute:
self.holdoff_need_update = True
self.sqdone = False
- self.stats = RunQueueStats(len(self.rqdata.runtaskentries))
- self.sq_stats = RunQueueStats(len(self.rqdata.runq_setscene_tids))
-
- for mc in rq.worker:
- rq.worker[mc].pipe.setrunqueueexec(self)
- for mc in rq.fakeworker:
- rq.fakeworker[mc].pipe.setrunqueueexec(self)
+ self.stats = RunQueueStats(len(self.rqdata.runtaskentries), len(self.rqdata.runq_setscene_tids))
if self.number_tasks <= 0:
bb.fatal("Invalid BB_NUMBER_THREADS %s" % self.number_tasks)
+ lower_limit = 1.0
+ upper_limit = 1000000.0
+ if self.max_cpu_pressure:
+ self.max_cpu_pressure = float(self.max_cpu_pressure)
+ if self.max_cpu_pressure < lower_limit:
+ bb.fatal("Invalid BB_PRESSURE_MAX_CPU %s, minimum value is %s." % (self.max_cpu_pressure, lower_limit))
+ if self.max_cpu_pressure > upper_limit:
+ bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_CPU is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_cpu_pressure))
+
+ if self.max_io_pressure:
+ self.max_io_pressure = float(self.max_io_pressure)
+ if self.max_io_pressure < lower_limit:
+ bb.fatal("Invalid BB_PRESSURE_MAX_IO %s, minimum value is %s." % (self.max_io_pressure, lower_limit))
+ if self.max_io_pressure > upper_limit:
+ bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_IO is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_io_pressure))
+
+ if self.max_memory_pressure:
+ self.max_memory_pressure = float(self.max_memory_pressure)
+ if self.max_memory_pressure < lower_limit:
+ bb.fatal("Invalid BB_PRESSURE_MAX_MEMORY %s, minimum value is %s." % (self.max_memory_pressure, lower_limit))
+ if self.max_memory_pressure > upper_limit:
+ bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_MEMORY is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_io_pressure))
+
+ if self.max_loadfactor:
+ self.max_loadfactor = float(self.max_loadfactor)
+ if self.max_loadfactor <= 0:
+ bb.fatal("Invalid BB_LOADFACTOR_MAX %s, needs to be greater than zero." % (self.max_loadfactor))
+
# List of setscene tasks which we've covered
self.scenequeue_covered = set()
# List of tasks which are covered (including setscene ones)
@@ -1755,11 +1901,6 @@ class RunQueueExecute:
self.tasks_notcovered = set()
self.scenequeue_notneeded = set()
- # We can't skip specified target tasks which aren't setscene tasks
- self.cantskip = set(self.rqdata.target_tids)
- self.cantskip.difference_update(self.rqdata.runq_setscene_tids)
- self.cantskip.intersection_update(self.rqdata.runtaskentries)
-
schedulers = self.get_schedulers()
for scheduler in schedulers:
if self.scheduler == scheduler.name:
@@ -1770,9 +1911,27 @@ class RunQueueExecute:
bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" %
(self.scheduler, ", ".join(obj.name for obj in schedulers)))
- #if len(self.rqdata.runq_setscene_tids) > 0:
+ #if self.rqdata.runq_setscene_tids:
self.sqdata = SQData()
- build_scenequeue_data(self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self)
+ build_scenequeue_data(self.sqdata, self.rqdata, self)
+
+ update_scenequeue_data(self.sqdata.sq_revdeps, self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=True)
+
+ # Compute a list of 'stale' sstate tasks where the current hash does not match the one
+ # in any stamp files. Pass the list out to metadata as an event.
+ found = {}
+ for tid in self.rqdata.runq_setscene_tids:
+ (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
+ stamps = bb.build.find_stale_stamps(taskname, taskfn)
+ if stamps:
+ if mc not in found:
+ found[mc] = {}
+ found[mc][tid] = stamps
+ for mc in found:
+ event = bb.event.StaleSetSceneTasks(found[mc])
+ bb.event.fire(event, self.cooker.databuilder.mcdata[mc])
+
+ self.build_taskdepdata_cache()
def runqueue_process_waitpid(self, task, status, fakerootlog=None):
@@ -1787,6 +1946,7 @@ class RunQueueExecute:
else:
self.sq_task_complete(task)
self.sq_live.remove(task)
+ self.stats.updateActiveSetscene(len(self.sq_live))
else:
if status != 0:
self.task_fail(task, status, fakerootlog=fakerootlog)
@@ -1797,20 +1957,20 @@ class RunQueueExecute:
def finish_now(self):
for mc in self.rq.worker:
try:
- self.rq.worker[mc].process.stdin.write(b"<finishnow></finishnow>")
+ RunQueue.send_pickled_data(self.rq.worker[mc].process, b"", "finishnow")
self.rq.worker[mc].process.stdin.flush()
except IOError:
# worker must have died?
pass
for mc in self.rq.fakeworker:
try:
- self.rq.fakeworker[mc].process.stdin.write(b"<finishnow></finishnow>")
+ RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, b"", "finishnow")
self.rq.fakeworker[mc].process.stdin.flush()
except IOError:
# worker must have died?
pass
- if len(self.failed_tids) != 0:
+ if self.failed_tids:
self.rq.state = runQueueFailed
return
@@ -1820,13 +1980,13 @@ class RunQueueExecute:
def finish(self):
self.rq.state = runQueueCleanUp
- active = self.stats.active + self.sq_stats.active
+ active = self.stats.active + len(self.sq_live)
if active > 0:
bb.event.fire(runQueueExitWait(active), self.cfgData)
self.rq.read_workers()
return self.rq.active_fds()
- if len(self.failed_tids) != 0:
+ if self.failed_tids:
self.rq.state = runQueueFailed
return True
@@ -1853,7 +2013,7 @@ class RunQueueExecute:
return valid
def can_start_task(self):
- active = self.stats.active + self.sq_stats.active
+ active = self.stats.active + len(self.sq_live)
can_start = active < self.number_tasks
return can_start
@@ -1873,8 +2033,7 @@ class RunQueueExecute:
try:
module = __import__(modname, fromlist=(name,))
except ImportError as exc:
- logger.critical("Unable to import scheduler '%s' from '%s': %s" % (name, modname, exc))
- raise SystemExit(1)
+ bb.fatal("Unable to import scheduler '%s' from '%s': %s" % (name, modname, exc))
else:
schedulers.add(getattr(module, name))
return schedulers
@@ -1904,6 +2063,20 @@ class RunQueueExecute:
self.setbuildable(revdep)
logger.debug("Marking task %s as buildable", revdep)
+ found = None
+ for t in sorted(self.sq_deferred.copy()):
+ if self.sq_deferred[t] == task:
+ # Allow the next deferred task to run. Any other deferred tasks should be deferred after that task.
+ # We shouldn't allow all to run at once as it is prone to races.
+ if not found:
+ bb.debug(1, "Deferred task %s now buildable" % t)
+ del self.sq_deferred[t]
+ update_scenequeue_data([t], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False)
+ found = t
+ else:
+ bb.debug(1, "Deferring %s after %s" % (t, found))
+ self.sq_deferred[t] = found
+
def task_complete(self, task):
self.stats.taskCompleted()
bb.event.fire(runQueueTaskCompleted(task, self.stats, self.rq), self.cfgData)
@@ -1918,7 +2091,7 @@ class RunQueueExecute:
self.stats.taskFailed()
self.failed_tids.append(task)
- fakeroot_log = ""
+ fakeroot_log = []
if fakerootlog and os.path.exists(fakerootlog):
with open(fakerootlog) as fakeroot_log_file:
fakeroot_failed = False
@@ -1928,14 +2101,14 @@ class RunQueueExecute:
fakeroot_failed = True
if 'doing new pid setup and server start' in line:
break
- fakeroot_log = line + fakeroot_log
+ fakeroot_log.append(line)
if not fakeroot_failed:
- fakeroot_log = None
+ fakeroot_log = []
- bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq, fakeroot_log=fakeroot_log), self.cfgData)
+ bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq, fakeroot_log=("".join(fakeroot_log) or None)), self.cfgData)
- if self.rqdata.taskData[''].abort:
+ if self.rqdata.taskData[''].halt:
self.rq.state = runQueueCleanUp
def task_skip(self, task, reason):
@@ -1950,7 +2123,7 @@ class RunQueueExecute:
err = False
if not self.sqdone:
logger.debug('We could skip tasks %s', "\n".join(sorted(self.scenequeue_covered)))
- completeevent = sceneQueueComplete(self.sq_stats, self.rq)
+ completeevent = sceneQueueComplete(self.stats, self.rq)
bb.event.fire(completeevent, self.cfgData)
if self.sq_deferred:
logger.error("Scenequeue had deferred entries: %s" % pprint.pformat(self.sq_deferred))
@@ -1984,7 +2157,7 @@ class RunQueueExecute:
if x not in self.tasks_scenequeue_done:
logger.error("Task %s was never processed by the setscene code" % x)
err = True
- if len(self.rqdata.runtaskentries[x].depends) == 0 and x not in self.runq_buildable:
+ if not self.rqdata.runtaskentries[x].depends and x not in self.runq_buildable:
logger.error("Task %s was never marked as buildable by the setscene code" % x)
err = True
return err
@@ -2007,8 +2180,11 @@ class RunQueueExecute:
if not self.sqdone and self.can_start_task():
# Find the next setscene to run
for nexttask in self.sorted_setscene_tids:
- if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values():
- if nexttask not in self.sqdata.unskippable and len(self.sqdata.sq_revdeps[nexttask]) > 0 and self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and self.check_dependencies(nexttask, self.sqdata.sq_revdeps[nexttask]):
+ if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values() and nexttask not in self.sq_harddep_deferred:
+ if nexttask not in self.sqdata.unskippable and self.sqdata.sq_revdeps[nexttask] and \
+ nexttask not in self.sq_needed_harddeps and \
+ self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and \
+ self.check_dependencies(nexttask, self.sqdata.sq_revdeps[nexttask]):
if nexttask not in self.rqdata.target_tids:
logger.debug2("Skipping setscene for task %s" % nexttask)
self.sq_task_skip(nexttask)
@@ -2016,6 +2192,19 @@ class RunQueueExecute:
if nexttask in self.sq_deferred:
del self.sq_deferred[nexttask]
return True
+ if nexttask in self.sqdata.sq_harddeps_rev and not self.sqdata.sq_harddeps_rev[nexttask].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
+ logger.debug2("Deferring %s due to hard dependencies" % nexttask)
+ updated = False
+ for dep in self.sqdata.sq_harddeps_rev[nexttask]:
+ if dep not in self.sq_needed_harddeps:
+ logger.debug2("Enabling task %s as it is a hard dependency" % dep)
+ self.sq_buildable.add(dep)
+ self.sq_needed_harddeps.add(dep)
+ updated = True
+ self.sq_harddep_deferred.add(nexttask)
+ if updated:
+ return True
+ continue
# If covered tasks are running, need to wait for them to complete
for t in self.sqdata.sq_covered_tasks[nexttask]:
if t in self.runq_running and t not in self.runq_complete:
@@ -2061,28 +2250,42 @@ class RunQueueExecute:
self.sq_task_failoutright(task)
return True
- startevent = sceneQueueTaskStarted(task, self.sq_stats, self.rq)
+ startevent = sceneQueueTaskStarted(task, self.stats, self.rq)
bb.event.fire(startevent, self.cfgData)
- taskdepdata = self.sq_build_taskdepdata(task)
-
taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
- taskhash = self.rqdata.get_task_hash(task)
- unihash = self.rqdata.get_task_unihash(task)
+ realfn = bb.cache.virtualfn2realfn(taskfn)[0]
+ runtask = {
+ 'fn' : taskfn,
+ 'task' : task,
+ 'taskname' : taskname,
+ 'taskhash' : self.rqdata.get_task_hash(task),
+ 'unihash' : self.rqdata.get_task_unihash(task),
+ 'quieterrors' : True,
+ 'appends' : self.cooker.collections[mc].get_file_appends(taskfn),
+ 'layername' : self.cooker.collections[mc].calc_bbfile_priority(realfn)[2],
+ 'taskdepdata' : self.sq_build_taskdepdata(task),
+ 'dry_run' : False,
+ 'taskdep': taskdep,
+ 'fakerootenv' : self.rqdata.dataCaches[mc].fakerootenv[taskfn],
+ 'fakerootdirs' : self.rqdata.dataCaches[mc].fakerootdirs[taskfn],
+ 'fakerootnoenv' : self.rqdata.dataCaches[mc].fakerootnoenv[taskfn]
+ }
+
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run:
if not mc in self.rq.fakeworker:
self.rq.start_fakeworker(self, mc)
- self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>")
+ RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, runtask, "runtask")
self.rq.fakeworker[mc].process.stdin.flush()
else:
- self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>")
+ RunQueue.send_pickled_data(self.rq.worker[mc].process, runtask, "runtask")
self.rq.worker[mc].process.stdin.flush()
- self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True)
+ self.build_stamps[task] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
self.build_stamps2.append(self.build_stamps[task])
self.sq_running.add(task)
self.sq_live.add(task)
- self.sq_stats.taskActive()
+ self.stats.updateActiveSetscene(len(self.sq_live))
if self.can_start_task():
return True
@@ -2113,9 +2316,9 @@ class RunQueueExecute:
if task is not None:
(mc, fn, taskname, taskfn) = split_tid_mcfn(task)
- if self.rqdata.setscenewhitelist is not None:
- if self.check_setscenewhitelist(task):
- self.task_fail(task, "setscene whitelist")
+ if self.rqdata.setscene_ignore_tasks is not None:
+ if self.check_setscene_ignore_tasks(task):
+ self.task_fail(task, "setscene ignore_tasks")
return True
if task in self.tasks_covered:
@@ -2138,18 +2341,32 @@ class RunQueueExecute:
self.runq_running.add(task)
self.stats.taskActive()
if not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce):
- bb.build.make_stamp(taskname, self.rqdata.dataCaches[mc], taskfn)
+ bb.build.make_stamp_mcfn(taskname, taskfn)
self.task_complete(task)
return True
else:
startevent = runQueueTaskStarted(task, self.stats, self.rq)
bb.event.fire(startevent, self.cfgData)
- taskdepdata = self.build_taskdepdata(task)
-
taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
- taskhash = self.rqdata.get_task_hash(task)
- unihash = self.rqdata.get_task_unihash(task)
+ realfn = bb.cache.virtualfn2realfn(taskfn)[0]
+ runtask = {
+ 'fn' : taskfn,
+ 'task' : task,
+ 'taskname' : taskname,
+ 'taskhash' : self.rqdata.get_task_hash(task),
+ 'unihash' : self.rqdata.get_task_unihash(task),
+ 'quieterrors' : False,
+ 'appends' : self.cooker.collections[mc].get_file_appends(taskfn),
+ 'layername' : self.cooker.collections[mc].calc_bbfile_priority(realfn)[2],
+ 'taskdepdata' : self.build_taskdepdata(task),
+ 'dry_run' : self.rqdata.setscene_enforce,
+ 'taskdep': taskdep,
+ 'fakerootenv' : self.rqdata.dataCaches[mc].fakerootenv[taskfn],
+ 'fakerootdirs' : self.rqdata.dataCaches[mc].fakerootdirs[taskfn],
+ 'fakerootnoenv' : self.rqdata.dataCaches[mc].fakerootnoenv[taskfn]
+ }
+
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce):
if not mc in self.rq.fakeworker:
try:
@@ -2159,31 +2376,31 @@ class RunQueueExecute:
self.rq.state = runQueueFailed
self.stats.taskFailed()
return True
- self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>")
+ RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, runtask, "runtask")
self.rq.fakeworker[mc].process.stdin.flush()
else:
- self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>")
+ RunQueue.send_pickled_data(self.rq.worker[mc].process, runtask, "runtask")
self.rq.worker[mc].process.stdin.flush()
- self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True)
+ self.build_stamps[task] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
self.build_stamps2.append(self.build_stamps[task])
self.runq_running.add(task)
self.stats.taskActive()
if self.can_start_task():
return True
- if self.stats.active > 0 or self.sq_stats.active > 0:
+ if self.stats.active > 0 or self.sq_live:
self.rq.read_workers()
return self.rq.active_fds()
# No more tasks can be run. If we have deferred setscene tasks we should run them.
if self.sq_deferred:
- tid = self.sq_deferred.pop(list(self.sq_deferred.keys())[0])
- logger.warning("Runqeueue deadlocked on deferred tasks, forcing task %s" % tid)
- self.sq_task_failoutright(tid)
+ deferred_tid = list(self.sq_deferred.keys())[0]
+ blocking_tid = self.sq_deferred.pop(deferred_tid)
+ logger.warning("Runqueue deadlocked on deferred tasks, forcing task %s blocked by %s" % (deferred_tid, blocking_tid))
return True
- if len(self.failed_tids) != 0:
+ if self.failed_tids:
self.rq.state = runQueueFailed
return True
@@ -2216,6 +2433,22 @@ class RunQueueExecute:
ret.add(dep)
return ret
+ # Build the individual cache entries in advance once to save time
+ def build_taskdepdata_cache(self):
+ taskdepdata_cache = {}
+ for task in self.rqdata.runtaskentries:
+ (mc, fn, taskname, taskfn) = split_tid_mcfn(task)
+ pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
+ deps = self.rqdata.runtaskentries[task].depends
+ provides = self.rqdata.dataCaches[mc].fn_provides[taskfn]
+ taskhash = self.rqdata.runtaskentries[task].hash
+ unihash = self.rqdata.runtaskentries[task].unihash
+ deps = self.filtermcdeps(task, mc, deps)
+ hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn]
+ taskdepdata_cache[task] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn]
+
+ self.taskdepdata_cache = taskdepdata_cache
+
# We filter out multiconfig dependencies from taskdepdata we pass to the tasks
# as most code can't handle them
def build_taskdepdata(self, task):
@@ -2227,15 +2460,9 @@ class RunQueueExecute:
while next:
additional = []
for revdep in next:
- (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep)
- pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
- deps = self.rqdata.runtaskentries[revdep].depends
- provides = self.rqdata.dataCaches[mc].fn_provides[taskfn]
- taskhash = self.rqdata.runtaskentries[revdep].hash
- unihash = self.rqdata.runtaskentries[revdep].unihash
- deps = self.filtermcdeps(task, mc, deps)
- taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash]
- for revdep2 in deps:
+ self.taskdepdata_cache[revdep][6] = self.rqdata.runtaskentries[revdep].unihash
+ taskdepdata[revdep] = self.taskdepdata_cache[revdep]
+ for revdep2 in self.taskdepdata_cache[revdep][3]:
if revdep2 not in taskdepdata:
additional.append(revdep2)
next = additional
@@ -2249,7 +2476,7 @@ class RunQueueExecute:
return
notcovered = set(self.scenequeue_notcovered)
- notcovered |= self.cantskip
+ notcovered |= self.sqdata.cantskip
for tid in self.scenequeue_notcovered:
notcovered |= self.sqdata.sq_covered_tasks[tid]
notcovered |= self.sqdata.unskippable.difference(self.rqdata.runq_setscene_tids)
@@ -2262,7 +2489,7 @@ class RunQueueExecute:
covered.intersection_update(self.tasks_scenequeue_done)
for tid in notcovered | covered:
- if len(self.rqdata.runtaskentries[tid].depends) == 0:
+ if not self.rqdata.runtaskentries[tid].depends:
self.setbuildable(tid)
elif self.rqdata.runtaskentries[tid].depends.issubset(self.runq_complete):
self.setbuildable(tid)
@@ -2304,6 +2531,9 @@ class RunQueueExecute:
self.rqdata.runtaskentries[hashtid].unihash = unihash
bb.parse.siggen.set_unihash(hashtid, unihash)
toprocess.add(hashtid)
+ if torehash:
+ # Need to save after set_unihash above
+ bb.parse.siggen.save_unitaskhashes()
# Work out all tasks which depend upon these
total = set()
@@ -2321,7 +2551,7 @@ class RunQueueExecute:
# Now iterate those tasks in dependency order to regenerate their taskhash/unihash
next = set()
for p in total:
- if len(self.rqdata.runtaskentries[p].depends) == 0:
+ if not self.rqdata.runtaskentries[p].depends:
next.add(p)
elif self.rqdata.runtaskentries[p].depends.isdisjoint(total):
next.add(p)
@@ -2331,11 +2561,10 @@ class RunQueueExecute:
current = next.copy()
next = set()
for tid in current:
- if len(self.rqdata.runtaskentries[p].depends) and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total):
+ if self.rqdata.runtaskentries[p].depends and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total):
continue
orighash = self.rqdata.runtaskentries[tid].hash
- dc = bb.parse.siggen.get_data_caches(self.rqdata.dataCaches, mc_from_tid(tid))
- newhash = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, dc)
+ newhash = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, self.rqdata.dataCaches)
origuni = self.rqdata.runtaskentries[tid].unihash
newuni = bb.parse.siggen.get_unihash(tid)
# FIXME, need to check it can come from sstate at all for determinism?
@@ -2361,9 +2590,9 @@ class RunQueueExecute:
if changed:
for mc in self.rq.worker:
- self.rq.worker[mc].process.stdin.write(b"<newtaskhashes>" + pickle.dumps(bb.parse.siggen.get_taskhashes()) + b"</newtaskhashes>")
+ RunQueue.send_pickled_data(self.rq.worker[mc].process, bb.parse.siggen.get_taskhashes(), "newtaskhashes")
for mc in self.rq.fakeworker:
- self.rq.fakeworker[mc].process.stdin.write(b"<newtaskhashes>" + pickle.dumps(bb.parse.siggen.get_taskhashes()) + b"</newtaskhashes>")
+ RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, bb.parse.siggen.get_taskhashes(), "newtaskhashes")
hashequiv_logger.debug(pprint.pformat("Tasks changed:\n%s" % (changed)))
@@ -2397,7 +2626,7 @@ class RunQueueExecute:
self.tasks_scenequeue_done.remove(tid)
for dep in self.sqdata.sq_covered_tasks[tid]:
if dep in self.runq_complete and dep not in self.runq_tasksrun:
- bb.error("Task %s marked as completed but now needing to rerun? Aborting build." % dep)
+ bb.error("Task %s marked as completed but now needing to rerun? Halting build." % dep)
self.failed_tids.append(tid)
self.rq.state = runQueueCleanUp
return
@@ -2410,17 +2639,6 @@ class RunQueueExecute:
self.sq_buildable.remove(tid)
if tid in self.sq_running:
self.sq_running.remove(tid)
- harddepfail = False
- for t in self.sqdata.sq_harddeps:
- if tid in self.sqdata.sq_harddeps[t] and t in self.scenequeue_notcovered:
- harddepfail = True
- break
- if not harddepfail and self.sqdata.sq_revdeps[tid].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
- if tid not in self.sq_buildable:
- self.sq_buildable.add(tid)
- if len(self.sqdata.sq_revdeps[tid]) == 0:
- self.sq_buildable.add(tid)
-
if tid in self.sqdata.outrightfail:
self.sqdata.outrightfail.remove(tid)
if tid in self.scenequeue_notcovered:
@@ -2431,7 +2649,7 @@ class RunQueueExecute:
self.scenequeue_notneeded.remove(tid)
(mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
- self.sqdata.stamps[tid] = bb.build.stampfile(taskname + "_setscene", self.rqdata.dataCaches[mc], taskfn, noextra=True)
+ self.sqdata.stamps[tid] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
if tid in self.stampcache:
del self.stampcache[tid]
@@ -2439,25 +2657,52 @@ class RunQueueExecute:
if tid in self.build_stamps:
del self.build_stamps[tid]
- update_tasks.append((tid, harddepfail, tid in self.sqdata.valid))
+ update_tasks.append(tid)
+
+ update_tasks2 = []
+ for tid in update_tasks:
+ harddepfail = False
+ for t in self.sqdata.sq_harddeps_rev[tid]:
+ if t in self.scenequeue_notcovered:
+ harddepfail = True
+ break
+ if not harddepfail and self.sqdata.sq_revdeps[tid].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
+ if tid not in self.sq_buildable:
+ self.sq_buildable.add(tid)
+ if not self.sqdata.sq_revdeps[tid]:
+ self.sq_buildable.add(tid)
+
+ update_tasks2.append((tid, harddepfail, tid in self.sqdata.valid))
- if update_tasks:
+ if update_tasks2:
self.sqdone = False
- update_scenequeue_data([t[0] for t in update_tasks], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False)
+ for mc in sorted(self.sqdata.multiconfigs):
+ for tid in sorted([t[0] for t in update_tasks2]):
+ if mc_from_tid(tid) != mc:
+ continue
+ h = pending_hash_index(tid, self.rqdata)
+ if h in self.sqdata.hashes and tid != self.sqdata.hashes[h]:
+ self.sq_deferred[tid] = self.sqdata.hashes[h]
+ bb.note("Deferring %s after %s" % (tid, self.sqdata.hashes[h]))
+ update_scenequeue_data([t[0] for t in update_tasks2], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False)
- for (tid, harddepfail, origvalid) in update_tasks:
+ for (tid, harddepfail, origvalid) in update_tasks2:
if tid in self.sqdata.valid and not origvalid:
hashequiv_logger.verbose("Setscene task %s became valid" % tid)
if harddepfail:
+ logger.debug2("%s has an unavailable hard dependency so skipping" % (tid))
self.sq_task_failoutright(tid)
if changed:
+ self.stats.updateCovered(len(self.scenequeue_covered), len(self.scenequeue_notcovered))
+ self.sq_needed_harddeps = set()
+ self.sq_harddep_deferred = set()
self.holdoff_need_update = True
def scenequeue_updatecounters(self, task, fail=False):
- for dep in sorted(self.sqdata.sq_deps[task]):
- if fail and task in self.sqdata.sq_harddeps and dep in self.sqdata.sq_harddeps[task]:
+ if fail and task in self.sqdata.sq_harddeps:
+ for dep in sorted(self.sqdata.sq_harddeps[task]):
if dep in self.scenequeue_covered or dep in self.scenequeue_notcovered:
# dependency could be already processed, e.g. noexec setscene task
continue
@@ -2467,6 +2712,7 @@ class RunQueueExecute:
logger.debug2("%s was unavailable and is a hard dependency of %s so skipping" % (task, dep))
self.sq_task_failoutright(dep)
continue
+ for dep in sorted(self.sqdata.sq_deps[task]):
if self.sqdata.sq_revdeps[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
if dep not in self.sq_buildable:
self.sq_buildable.add(dep)
@@ -2485,6 +2731,14 @@ class RunQueueExecute:
new.add(dep)
next = new
+ # If this task was one which other setscene tasks have a hard dependency upon, we need
+ # to walk through the hard dependencies and allow execution of those which have completed dependencies.
+ if task in self.sqdata.sq_harddeps:
+ for dep in self.sq_harddep_deferred.copy():
+ if self.sqdata.sq_harddeps_rev[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
+ self.sq_harddep_deferred.remove(dep)
+
+ self.stats.updateCovered(len(self.scenequeue_covered), len(self.scenequeue_notcovered))
self.holdoff_need_update = True
def sq_task_completeoutright(self, task):
@@ -2499,22 +2753,20 @@ class RunQueueExecute:
self.scenequeue_updatecounters(task)
def sq_check_taskfail(self, task):
- if self.rqdata.setscenewhitelist is not None:
+ if self.rqdata.setscene_ignore_tasks is not None:
realtask = task.split('_setscene')[0]
(mc, fn, taskname, taskfn) = split_tid_mcfn(realtask)
pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
- if not check_setscene_enforce_whitelist(pn, taskname, self.rqdata.setscenewhitelist):
+ if not check_setscene_enforce_ignore_tasks(pn, taskname, self.rqdata.setscene_ignore_tasks):
logger.error('Task %s.%s failed' % (pn, taskname + "_setscene"))
self.rq.state = runQueueCleanUp
def sq_task_complete(self, task):
- self.sq_stats.taskCompleted()
- bb.event.fire(sceneQueueTaskCompleted(task, self.sq_stats, self.rq), self.cfgData)
+ bb.event.fire(sceneQueueTaskCompleted(task, self.stats, self.rq), self.cfgData)
self.sq_task_completeoutright(task)
def sq_task_fail(self, task, result):
- self.sq_stats.taskFailed()
- bb.event.fire(sceneQueueTaskFailed(task, self.sq_stats, result, self), self.cfgData)
+ bb.event.fire(sceneQueueTaskFailed(task, self.stats, result, self), self.cfgData)
self.scenequeue_notcovered.add(task)
self.scenequeue_updatecounters(task, True)
self.sq_check_taskfail(task)
@@ -2522,8 +2774,6 @@ class RunQueueExecute:
def sq_task_failoutright(self, task):
self.sq_running.add(task)
self.sq_buildable.add(task)
- self.sq_stats.taskSkipped()
- self.sq_stats.taskCompleted()
self.scenequeue_notcovered.add(task)
self.scenequeue_updatecounters(task, True)
@@ -2531,8 +2781,6 @@ class RunQueueExecute:
self.sq_running.add(task)
self.sq_buildable.add(task)
self.sq_task_completeoutright(task)
- self.sq_stats.taskSkipped()
- self.sq_stats.taskCompleted()
def sq_build_taskdepdata(self, task):
def getsetscenedeps(tid):
@@ -2563,7 +2811,8 @@ class RunQueueExecute:
provides = self.rqdata.dataCaches[mc].fn_provides[taskfn]
taskhash = self.rqdata.runtaskentries[revdep].hash
unihash = self.rqdata.runtaskentries[revdep].unihash
- taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash]
+ hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn]
+ taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn]
for revdep2 in deps:
if revdep2 not in taskdepdata:
additional.append(revdep2)
@@ -2572,8 +2821,8 @@ class RunQueueExecute:
#bb.note("Task %s: " % task + str(taskdepdata).replace("], ", "],\n"))
return taskdepdata
- def check_setscenewhitelist(self, tid):
- # Check task that is going to run against the whitelist
+ def check_setscene_ignore_tasks(self, tid):
+ # Check task that is going to run against the ignore tasks list
(mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
# Ignore covered tasks
if tid in self.tasks_covered:
@@ -2587,14 +2836,15 @@ class RunQueueExecute:
return False
pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
- if not check_setscene_enforce_whitelist(pn, taskname, self.rqdata.setscenewhitelist):
+ if not check_setscene_enforce_ignore_tasks(pn, taskname, self.rqdata.setscene_ignore_tasks):
if tid in self.rqdata.runq_setscene_tids:
- msg = 'Task %s.%s attempted to execute unexpectedly and should have been setscened' % (pn, taskname)
+ msg = ['Task %s.%s attempted to execute unexpectedly and should have been setscened' % (pn, taskname)]
else:
- msg = 'Task %s.%s attempted to execute unexpectedly' % (pn, taskname)
+ msg = ['Task %s.%s attempted to execute unexpectedly' % (pn, taskname)]
for t in self.scenequeue_notcovered:
- msg = msg + "\nTask %s, unihash %s, taskhash %s" % (t, self.rqdata.runtaskentries[t].unihash, self.rqdata.runtaskentries[t].hash)
- logger.error(msg + '\nThis is usually due to missing setscene tasks. Those missing in this build were: %s' % pprint.pformat(self.scenequeue_notcovered))
+ msg.append("\nTask %s, unihash %s, taskhash %s" % (t, self.rqdata.runtaskentries[t].unihash, self.rqdata.runtaskentries[t].hash))
+ msg.append('\nThis is usually due to missing setscene tasks. Those missing in this build were: %s' % pprint.pformat(self.scenequeue_notcovered))
+ logger.error("".join(msg))
return True
return False
@@ -2606,6 +2856,7 @@ class SQData(object):
self.sq_revdeps = {}
# Injected inter-setscene task dependencies
self.sq_harddeps = {}
+ self.sq_harddeps_rev = {}
# Cache of stamp files so duplicates can't run in parallel
self.stamps = {}
# Setscene tasks directly depended upon by the build
@@ -2615,12 +2866,17 @@ class SQData(object):
# A list of normal tasks a setscene task covers
self.sq_covered_tasks = {}
-def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
+def build_scenequeue_data(sqdata, rqdata, sqrq):
sq_revdeps = {}
sq_revdeps_squash = {}
sq_collated_deps = {}
+ # We can't skip specified target tasks which aren't setscene tasks
+ sqdata.cantskip = set(rqdata.target_tids)
+ sqdata.cantskip.difference_update(rqdata.runq_setscene_tids)
+ sqdata.cantskip.intersection_update(rqdata.runtaskentries)
+
# We need to construct a dependency graph for the setscene functions. Intermediate
# dependencies between the setscene tasks only complicate the code. This code
# therefore aims to collapse the huge runqueue dependency tree into a smaller one
@@ -2633,7 +2889,7 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
for tid in rqdata.runtaskentries:
sq_revdeps[tid] = copy.copy(rqdata.runtaskentries[tid].revdeps)
sq_revdeps_squash[tid] = set()
- if (len(sq_revdeps[tid]) == 0) and tid not in rqdata.runq_setscene_tids:
+ if not sq_revdeps[tid] and tid not in rqdata.runq_setscene_tids:
#bb.warn("Added endpoint %s" % (tid))
endpoints[tid] = set()
@@ -2667,16 +2923,15 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
sq_revdeps_squash[point] = set()
if point in rqdata.runq_setscene_tids:
sq_revdeps_squash[point] = tasks
- tasks = set()
continue
for dep in rqdata.runtaskentries[point].depends:
if point in sq_revdeps[dep]:
sq_revdeps[dep].remove(point)
if tasks:
sq_revdeps_squash[dep] |= tasks
- if len(sq_revdeps[dep]) == 0 and dep not in rqdata.runq_setscene_tids:
+ if not sq_revdeps[dep] and dep not in rqdata.runq_setscene_tids:
newendpoints[dep] = task
- if len(newendpoints) != 0:
+ if newendpoints:
process_endpoints(newendpoints)
process_endpoints(endpoints)
@@ -2688,16 +2943,16 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
# Take the build endpoints (no revdeps) and find the sstate tasks they depend upon
new = True
for tid in rqdata.runtaskentries:
- if len(rqdata.runtaskentries[tid].revdeps) == 0:
+ if not rqdata.runtaskentries[tid].revdeps:
sqdata.unskippable.add(tid)
- sqdata.unskippable |= sqrq.cantskip
+ sqdata.unskippable |= sqdata.cantskip
while new:
new = False
orig = sqdata.unskippable.copy()
for tid in sorted(orig, reverse=True):
if tid in rqdata.runq_setscene_tids:
continue
- if len(rqdata.runtaskentries[tid].depends) == 0:
+ if not rqdata.runtaskentries[tid].depends:
# These are tasks which have no setscene tasks in their chain, need to mark as directly buildable
sqrq.setbuildable(tid)
sqdata.unskippable |= rqdata.runtaskentries[tid].depends
@@ -2712,8 +2967,8 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
for taskcounter, tid in enumerate(rqdata.runtaskentries):
if tid in rqdata.runq_setscene_tids:
pass
- elif len(sq_revdeps_squash[tid]) != 0:
- bb.msg.fatal("RunQueue", "Something went badly wrong during scenequeue generation, aborting. Please report this problem.")
+ elif sq_revdeps_squash[tid]:
+ bb.msg.fatal("RunQueue", "Something went badly wrong during scenequeue generation, halting. Please report this problem.")
else:
del sq_revdeps_squash[tid]
rqdata.init_progress_reporter.update(taskcounter)
@@ -2727,7 +2982,9 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
(mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
realtid = tid + "_setscene"
idepends = rqdata.taskData[mc].taskentries[realtid].idepends
- sqdata.stamps[tid] = bb.build.stampfile(taskname + "_setscene", rqdata.dataCaches[mc], taskfn, noextra=True)
+ sqdata.stamps[tid] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
+
+ sqdata.sq_harddeps_rev[tid] = set()
for (depname, idependtask) in idepends:
if depname not in rqdata.taskData[mc].build_targets:
@@ -2740,20 +2997,15 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
if deptid not in rqdata.runtaskentries:
bb.msg.fatal("RunQueue", "Task %s depends upon non-existent task %s:%s" % (realtid, depfn, idependtask))
+ logger.debug2("Adding hard setscene dependency %s for %s" % (deptid, tid))
+
if not deptid in sqdata.sq_harddeps:
sqdata.sq_harddeps[deptid] = set()
sqdata.sq_harddeps[deptid].add(tid)
-
- sq_revdeps_squash[tid].add(deptid)
- # Have to zero this to avoid circular dependencies
- sq_revdeps_squash[deptid] = set()
+ sqdata.sq_harddeps_rev[tid].add(deptid)
rqdata.init_progress_reporter.next_stage()
- for task in sqdata.sq_harddeps:
- for dep in sqdata.sq_harddeps[task]:
- sq_revdeps_squash[dep].add(task)
-
rqdata.init_progress_reporter.next_stage()
#for tid in sq_revdeps_squash:
@@ -2777,30 +3029,27 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
sqdata.multiconfigs = set()
for tid in sqdata.sq_revdeps:
sqdata.multiconfigs.add(mc_from_tid(tid))
- if len(sqdata.sq_revdeps[tid]) == 0:
+ if not sqdata.sq_revdeps[tid]:
sqrq.sq_buildable.add(tid)
- rqdata.init_progress_reporter.finish()
+ rqdata.init_progress_reporter.next_stage()
sqdata.noexec = set()
sqdata.stamppresent = set()
sqdata.valid = set()
- update_scenequeue_data(sqdata.sq_revdeps, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True)
-
- # Compute a list of 'stale' sstate tasks where the current hash does not match the one
- # in any stamp files. Pass the list out to metadata as an event.
- found = {}
- for tid in rqdata.runq_setscene_tids:
- (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
- stamps = bb.build.find_stale_stamps(taskname, rqdata.dataCaches[mc], taskfn)
- if stamps:
- if mc not in found:
- found[mc] = {}
- found[mc][tid] = stamps
- for mc in found:
- event = bb.event.StaleSetSceneTasks(found[mc])
- bb.event.fire(event, cooker.databuilder.mcdata[mc])
+ sqdata.hashes = {}
+ sqrq.sq_deferred = {}
+ for mc in sorted(sqdata.multiconfigs):
+ for tid in sorted(sqdata.sq_revdeps):
+ if mc_from_tid(tid) != mc:
+ continue
+ h = pending_hash_index(tid, rqdata)
+ if h not in sqdata.hashes:
+ sqdata.hashes[h] = tid
+ else:
+ sqrq.sq_deferred[tid] = sqdata.hashes[h]
+ bb.debug(1, "Deferring %s after %s" % (tid, sqdata.hashes[h]))
def check_setscene_stamps(tid, rqdata, rq, stampcache, noexecstamp=False):
@@ -2809,7 +3058,7 @@ def check_setscene_stamps(tid, rqdata, rq, stampcache, noexecstamp=False):
taskdep = rqdata.dataCaches[mc].task_deps[taskfn]
if 'noexec' in taskdep and taskname in taskdep['noexec']:
- bb.build.make_stamp(taskname + "_setscene", rqdata.dataCaches[mc], taskfn)
+ bb.build.make_stamp_mcfn(taskname + "_setscene", taskfn)
return True, False
if rq.check_stamp_task(tid, taskname + "_setscene", cache=stampcache):
@@ -2839,43 +3088,34 @@ def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, s
if noexec:
sqdata.noexec.add(tid)
sqrq.sq_task_skip(tid)
+ logger.debug2("%s is noexec so skipping setscene" % (tid))
continue
if stamppresent:
sqdata.stamppresent.add(tid)
sqrq.sq_task_skip(tid)
+ logger.debug2("%s has a valid stamp, skipping" % (tid))
continue
tocheck.add(tid)
sqdata.valid |= rq.validate_hashes(tocheck, cooker.data, len(sqdata.stamppresent), False, summary=summary)
- sqdata.hashes = {}
- sqrq.sq_deferred = {}
- for mc in sorted(sqdata.multiconfigs):
- for tid in sorted(sqdata.sq_revdeps):
- if mc_from_tid(tid) != mc:
- continue
- if tid in sqdata.stamppresent:
- continue
- if tid in sqdata.valid:
- continue
- if tid in sqdata.noexec:
- continue
- if tid in sqrq.scenequeue_notcovered:
- continue
- if tid in sqrq.scenequeue_covered:
- continue
-
- h = pending_hash_index(tid, rqdata)
- if h not in sqdata.hashes:
- if tid in tids:
- sqdata.outrightfail.add(tid)
- sqdata.hashes[h] = tid
- else:
- sqrq.sq_deferred[tid] = sqdata.hashes[h]
- bb.note("Deferring %s after %s" % (tid, sqdata.hashes[h]))
-
+ for tid in tids:
+ if tid in sqdata.stamppresent:
+ continue
+ if tid in sqdata.valid:
+ continue
+ if tid in sqdata.noexec:
+ continue
+ if tid in sqrq.scenequeue_covered:
+ continue
+ if tid in sqrq.scenequeue_notcovered:
+ continue
+ if tid in sqrq.sq_deferred:
+ continue
+ sqdata.outrightfail.add(tid)
+ logger.debug2("%s already handled (fallthrough), skipping" % (tid))
class TaskFailure(Exception):
"""
@@ -3005,15 +3245,12 @@ class runQueuePipe():
if pipeout:
pipeout.close()
bb.utils.nonblockingfd(self.input)
- self.queue = b""
+ self.queue = bytearray()
self.d = d
self.rq = rq
self.rqexec = rqexec
self.fakerootlogs = fakerootlogs
- def setrunqueueexec(self, rqexec):
- self.rqexec = rqexec
-
def read(self):
for workers, name in [(self.rq.worker, "Worker"), (self.rq.fakeworker, "Fakeroot")]:
for worker in workers.values():
@@ -3024,13 +3261,13 @@ class runQueuePipe():
start = len(self.queue)
try:
- self.queue = self.queue + (self.input.read(102400) or b"")
+ self.queue.extend(self.input.read(102400) or b"")
except (OSError, IOError) as e:
if e.errno != errno.EAGAIN:
raise
end = len(self.queue)
found = True
- while found and len(self.queue):
+ while found and self.queue:
found = False
index = self.queue.find(b"</event>")
while index != -1 and self.queue.startswith(b"<event>"):
@@ -3068,16 +3305,16 @@ class runQueuePipe():
def close(self):
while self.read():
continue
- if len(self.queue) > 0:
+ if self.queue:
print("Warning, worker left partial message: %s" % self.queue)
self.input.close()
-def get_setscene_enforce_whitelist(d, targets):
+def get_setscene_enforce_ignore_tasks(d, targets):
if d.getVar('BB_SETSCENE_ENFORCE') != '1':
return None
- whitelist = (d.getVar("BB_SETSCENE_ENFORCE_WHITELIST") or "").split()
+ ignore_tasks = (d.getVar("BB_SETSCENE_ENFORCE_IGNORE_TASKS") or "").split()
outlist = []
- for item in whitelist[:]:
+ for item in ignore_tasks[:]:
if item.startswith('%:'):
for (mc, target, task, fn) in targets:
outlist.append(target + ':' + item.split(':')[1])
@@ -3085,12 +3322,12 @@ def get_setscene_enforce_whitelist(d, targets):
outlist.append(item)
return outlist
-def check_setscene_enforce_whitelist(pn, taskname, whitelist):
+def check_setscene_enforce_ignore_tasks(pn, taskname, ignore_tasks):
import fnmatch
- if whitelist is not None:
+ if ignore_tasks is not None:
item = '%s:%s' % (pn, taskname)
- for whitelist_item in whitelist:
- if fnmatch.fnmatch(item, whitelist_item):
+ for ignore_tasks in ignore_tasks:
+ if fnmatch.fnmatch(item, ignore_tasks):
return True
return False
return True
diff --git a/lib/bb/server/process.py b/lib/bb/server/process.py
index 3e99bcef8..76b189291 100644
--- a/lib/bb/server/process.py
+++ b/lib/bb/server/process.py
@@ -26,6 +26,9 @@ import errno
import re
import datetime
import pickle
+import traceback
+import gc
+import stat
import bb.server.xmlrpcserver
from bb import daemonize
from multiprocessing import queues
@@ -35,9 +38,46 @@ logger = logging.getLogger('BitBake')
class ProcessTimeout(SystemExit):
pass
+def currenttime():
+ return datetime.datetime.now().strftime('%H:%M:%S.%f')
+
def serverlog(msg):
- print(str(os.getpid()) + " " + datetime.datetime.now().strftime('%H:%M:%S.%f') + " " + msg)
- sys.stdout.flush()
+ print(str(os.getpid()) + " " + currenttime() + " " + msg)
+ #Seems a flush here triggers filesytem sync like behaviour and long hangs in the server
+ #sys.stdout.flush()
+
+#
+# When we have lockfile issues, try and find infomation about which process is
+# using the lockfile
+#
+def get_lockfile_process_msg(lockfile):
+ # Some systems may not have lsof available
+ procs = None
+ try:
+ procs = subprocess.check_output(["lsof", '-w', lockfile], stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError:
+ # File was deleted?
+ pass
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ if procs is None:
+ # Fall back to fuser if lsof is unavailable
+ try:
+ procs = subprocess.check_output(["fuser", '-v', lockfile], stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError:
+ # File was deleted?
+ pass
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ if procs:
+ return procs.decode("utf-8")
+ return None
+
+class idleFinish():
+ def __init__(self, msg):
+ self.msg = msg
class ProcessServer():
profile_filename = "profile.log"
@@ -56,12 +96,19 @@ class ProcessServer():
self.maxuiwait = 30
self.xmlrpc = False
+ self.idle = None
+ # Need a lock for _idlefuns changes
self._idlefuns = {}
+ self._idlefuncsLock = threading.Lock()
+ self.idle_cond = threading.Condition(self._idlefuncsLock)
self.bitbake_lock = lock
self.bitbake_lock_name = lockname
self.sock = sock
self.sockname = sockname
+ # It is possible the directory may be renamed. Cache the inode of the socket file
+ # so we can tell if things changed.
+ self.sockinode = os.stat(self.sockname)[stat.ST_INO]
self.server_timeout = server_timeout
self.timeout = self.server_timeout
@@ -70,7 +117,9 @@ class ProcessServer():
def register_idle_function(self, function, data):
"""Register a function to be called while the server is idle"""
assert hasattr(function, '__call__')
- self._idlefuns[function] = data
+ with bb.utils.lock_timeout(self._idlefuncsLock):
+ self._idlefuns[function] = data
+ serverlog("Registering idle function %s" % str(function))
def run(self):
@@ -109,6 +158,31 @@ class ProcessServer():
return ret
+ def _idle_check(self):
+ return len(self._idlefuns) == 0 and self.cooker.command.currentAsyncCommand is None
+
+ def wait_for_idle(self, timeout=30):
+ # Wait for the idle loop to have cleared
+ with bb.utils.lock_timeout(self._idlefuncsLock):
+ return self.idle_cond.wait_for(self._idle_check, timeout) is not False
+
+ def set_async_cmd(self, cmd):
+ with bb.utils.lock_timeout(self._idlefuncsLock):
+ ret = self.idle_cond.wait_for(self._idle_check, 30)
+ if ret is False:
+ return False
+ self.cooker.command.currentAsyncCommand = cmd
+ return True
+
+ def clear_async_cmd(self):
+ with bb.utils.lock_timeout(self._idlefuncsLock):
+ self.cooker.command.currentAsyncCommand = None
+ self.idle_cond.notify_all()
+
+ def get_async_cmd(self):
+ with bb.utils.lock_timeout(self._idlefuncsLock):
+ return self.cooker.command.currentAsyncCommand
+
def main(self):
self.cooker.pre_serve()
@@ -123,14 +197,19 @@ class ProcessServer():
fds.append(self.xmlrpc)
seendata = False
serverlog("Entering server connection loop")
+ serverlog("Lockfile is: %s\nSocket is %s (%s)" % (self.bitbake_lock_name, self.sockname, os.path.exists(self.sockname)))
def disconnect_client(self, fds):
- serverlog("Disconnecting Client")
+ serverlog("Disconnecting Client (socket: %s)" % os.path.exists(self.sockname))
if self.controllersock:
fds.remove(self.controllersock)
self.controllersock.close()
self.controllersock = False
if self.haveui:
+ # Wait for the idle loop to have cleared (30s max)
+ if not self.wait_for_idle(30):
+ serverlog("Idle loop didn't finish queued commands after 30s, exiting.")
+ self.quit = True
fds.remove(self.command_channel)
bb.event.unregister_UIHhandler(self.event_handle, True)
self.command_channel_reply.writer.close()
@@ -142,12 +221,12 @@ class ProcessServer():
self.cooker.clientComplete()
self.haveui = False
ready = select.select(fds,[],[],0)[0]
- if newconnections:
+ if newconnections and not self.quit:
serverlog("Starting new client")
conn = newconnections.pop(-1)
fds.append(conn)
self.controllersock = conn
- elif self.timeout is None and not ready:
+ elif not self.timeout and not ready:
serverlog("No timeout, exiting.")
self.quit = True
@@ -214,11 +293,14 @@ class ProcessServer():
continue
try:
serverlog("Running command %s" % command)
- self.command_channel_reply.send(self.cooker.command.runCommand(command))
- serverlog("Command Completed")
+ reply = self.cooker.command.runCommand(command, self)
+ serverlog("Sending reply %s" % repr(reply))
+ self.command_channel_reply.send(reply)
+ serverlog("Command Completed (socket: %s)" % os.path.exists(self.sockname))
except Exception as e:
- serverlog('Exception in server main event loop running command %s (%s)' % (command, str(e)))
- logger.exception('Exception in server main event loop running command %s (%s)' % (command, str(e)))
+ stack = traceback.format_exc()
+ serverlog('Exception in server main event loop running command %s (%s)' % (command, stack))
+ logger.exception('Exception in server main event loop running command %s (%s)' % (command, stack))
if self.xmlrpc in ready:
self.xmlrpc.handle_requests()
@@ -241,19 +323,25 @@ class ProcessServer():
ready = self.idle_commands(.1, fds)
- if len(threading.enumerate()) != 1:
- serverlog("More than one thread left?: " + str(threading.enumerate()))
+ if self.idle:
+ self.idle.join()
- serverlog("Exiting")
+ serverlog("Exiting (socket: %s)" % os.path.exists(self.sockname))
# Remove the socket file so we don't get any more connections to avoid races
+ # The build directory could have been renamed so if the file isn't the one we created
+ # we shouldn't delete it.
try:
- os.unlink(self.sockname)
- except:
- pass
+ sockinode = os.stat(self.sockname)[stat.ST_INO]
+ if sockinode == self.sockinode:
+ os.unlink(self.sockname)
+ else:
+ serverlog("bitbake.sock inode mismatch (%s vs %s), not deleting." % (sockinode, self.sockinode))
+ except Exception as err:
+ serverlog("Removing socket file '%s' failed (%s)" % (self.sockname, err))
self.sock.close()
try:
- self.cooker.shutdown(True)
+ self.cooker.shutdown(True, idle=False)
self.cooker.notifier.stop()
self.cooker.confignotifier.stop()
except:
@@ -261,6 +349,9 @@ class ProcessServer():
self.cooker.post_serve()
+ if len(threading.enumerate()) != 1:
+ serverlog("More than one thread left?: " + str(threading.enumerate()))
+
# Flush logs before we release the lock
sys.stdout.flush()
sys.stderr.flush()
@@ -276,20 +367,21 @@ class ProcessServer():
except FileNotFoundError:
return None
- lockcontents = get_lock_contents(lockfile)
- serverlog("Original lockfile contents: " + str(lockcontents))
-
lock.close()
lock = None
while not lock:
i = 0
lock = None
+ if not os.path.exists(os.path.basename(lockfile)):
+ serverlog("Lockfile directory gone, exiting.")
+ return
+
while not lock and i < 30:
lock = bb.utils.lockfile(lockfile, shared=False, retry=False, block=False)
if not lock:
newlockcontents = get_lock_contents(lockfile)
- if newlockcontents != lockcontents:
+ if not newlockcontents[0].startswith([f"{os.getpid()}\n", f"{os.getpid()} "]):
# A new server was started, the lockfile contents changed, we can exit
serverlog("Lockfile now contains different contents, exiting: " + str(newlockcontents))
return
@@ -303,75 +395,108 @@ class ProcessServer():
return
if not lock:
- # Some systems may not have lsof available
- procs = None
+ procs = get_lockfile_process_msg(lockfile)
+ msg = ["Delaying shutdown due to active processes which appear to be holding bitbake.lock"]
+ if procs:
+ msg.append(":\n%s" % procs)
+ serverlog("".join(msg))
+
+ def idle_thread(self):
+ if self.cooker.configuration.profile:
+ try:
+ import cProfile as profile
+ except:
+ import profile
+ prof = profile.Profile()
+
+ ret = profile.Profile.runcall(prof, self.idle_thread_internal)
+
+ prof.dump_stats("profile-mainloop.log")
+ bb.utils.process_profilelog("profile-mainloop.log")
+ serverlog("Raw profiling information saved to profile-mainloop.log and processed statistics to profile-mainloop.log.processed")
+ else:
+ self.idle_thread_internal()
+
+ def idle_thread_internal(self):
+ def remove_idle_func(function):
+ with bb.utils.lock_timeout(self._idlefuncsLock):
+ del self._idlefuns[function]
+ self.idle_cond.notify_all()
+
+ while not self.quit:
+ nextsleep = 0.1
+ fds = []
+
+ with bb.utils.lock_timeout(self._idlefuncsLock):
+ items = list(self._idlefuns.items())
+
+ for function, data in items:
try:
- procs = subprocess.check_output(["lsof", '-w', lockfile], stderr=subprocess.STDOUT)
- except subprocess.CalledProcessError:
- # File was deleted?
- continue
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
- if procs is None:
- # Fall back to fuser if lsof is unavailable
- try:
- procs = subprocess.check_output(["fuser", '-v', lockfile], stderr=subprocess.STDOUT)
- except subprocess.CalledProcessError:
- # File was deleted?
+ retval = function(self, data, False)
+ if isinstance(retval, idleFinish):
+ serverlog("Removing idle function %s at idleFinish" % str(function))
+ remove_idle_func(function)
+ self.cooker.command.finishAsyncCommand(retval.msg)
+ nextsleep = None
+ elif retval is False:
+ serverlog("Removing idle function %s" % str(function))
+ remove_idle_func(function)
+ nextsleep = None
+ elif retval is True:
+ nextsleep = None
+ elif isinstance(retval, float) and nextsleep:
+ if (retval < nextsleep):
+ nextsleep = retval
+ elif nextsleep is None:
continue
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
+ else:
+ fds = fds + retval
+ except SystemExit:
+ raise
+ except Exception as exc:
+ if not isinstance(exc, bb.BBHandledException):
+ logger.exception('Running idle function')
+ remove_idle_func(function)
+ serverlog("Exception %s broke the idle_thread, exiting" % traceback.format_exc())
+ self.quit = True
- msg = "Delaying shutdown due to active processes which appear to be holding bitbake.lock"
- if procs:
- msg += ":\n%s" % str(procs.decode("utf-8"))
- serverlog(msg)
+ # Create new heartbeat event?
+ now = time.time()
+ if bb.event._heartbeat_enabled and now >= self.next_heartbeat:
+ # We might have missed heartbeats. Just trigger once in
+ # that case and continue after the usual delay.
+ self.next_heartbeat += self.heartbeat_seconds
+ if self.next_heartbeat <= now:
+ self.next_heartbeat = now + self.heartbeat_seconds
+ if hasattr(self.cooker, "data"):
+ heartbeat = bb.event.HeartbeatEvent(now)
+ try:
+ bb.event.fire(heartbeat, self.cooker.data)
+ except Exception as exc:
+ if not isinstance(exc, bb.BBHandledException):
+ logger.exception('Running heartbeat function')
+ serverlog("Exception %s broke in idle_thread, exiting" % traceback.format_exc())
+ self.quit = True
+ if nextsleep and bb.event._heartbeat_enabled and now + nextsleep > self.next_heartbeat:
+ # Shorten timeout so that we we wake up in time for
+ # the heartbeat.
+ nextsleep = self.next_heartbeat - now
+
+ if nextsleep is not None:
+ select.select(fds,[],[],nextsleep)[0]
def idle_commands(self, delay, fds=None):
nextsleep = delay
if not fds:
fds = []
- for function, data in list(self._idlefuns.items()):
- try:
- retval = function(self, data, False)
- if retval is False:
- del self._idlefuns[function]
- nextsleep = None
- elif retval is True:
- nextsleep = None
- elif isinstance(retval, float) and nextsleep:
- if (retval < nextsleep):
- nextsleep = retval
- elif nextsleep is None:
- continue
- else:
- fds = fds + retval
- except SystemExit:
- raise
- except Exception as exc:
- if not isinstance(exc, bb.BBHandledException):
- logger.exception('Running idle function')
- del self._idlefuns[function]
- self.quit = True
-
- # Create new heartbeat event?
- now = time.time()
- if now >= self.next_heartbeat:
- # We might have missed heartbeats. Just trigger once in
- # that case and continue after the usual delay.
- self.next_heartbeat += self.heartbeat_seconds
- if self.next_heartbeat <= now:
- self.next_heartbeat = now + self.heartbeat_seconds
- if hasattr(self.cooker, "data"):
- heartbeat = bb.event.HeartbeatEvent(now)
- bb.event.fire(heartbeat, self.cooker.data)
- if nextsleep and now + nextsleep > self.next_heartbeat:
- # Shorten timeout so that we we wake up in time for
- # the heartbeat.
- nextsleep = self.next_heartbeat - now
+ if not self.idle:
+ self.idle = threading.Thread(target=self.idle_thread)
+ self.idle.start()
+ elif self.idle and not self.idle.is_alive():
+ serverlog("Idle thread terminated, main thread exiting too")
+ bb.error("Idle thread terminated, main thread exiting too")
+ self.quit = True
if nextsleep is not None:
if self.xmlrpc:
@@ -391,12 +516,18 @@ class ServerCommunicator():
self.recv = recv
def runCommand(self, command):
- self.connection.send(command)
+ try:
+ self.connection.send(command)
+ except BrokenPipeError as e:
+ raise BrokenPipeError("bitbake-server might have died or been forcibly stopped, ie. OOM killed") from e
if not self.recv.poll(30):
- logger.info("No reply from server in 30s")
+ logger.info("No reply from server in 30s (for command %s at %s)" % (command[0], currenttime()))
if not self.recv.poll(30):
- raise ProcessTimeout("Timeout while waiting for a reply from the bitbake server (60s)")
- ret, exc = self.recv.get()
+ raise ProcessTimeout("Timeout while waiting for a reply from the bitbake server (60s at %s)" % currenttime())
+ try:
+ ret, exc = self.recv.get()
+ except EOFError as e:
+ raise EOFError("bitbake-server might have died or been forcibly stopped, ie. OOM killed") from e
# Should probably turn all exceptions in exc back into exceptions?
# For now, at least handle BBHandledException
if exc and ("BBHandledException" in exc or "SystemExit" in exc):
@@ -429,6 +560,7 @@ class BitBakeProcessServerConnection(object):
self.socket_connection = sock
def terminate(self):
+ self.events.close()
self.socket_connection.close()
self.connection.connection.close()
self.connection.recv.close()
@@ -439,13 +571,14 @@ start_log_datetime_format = '%Y-%m-%d %H:%M:%S.%f'
class BitBakeServer(object):
- def __init__(self, lock, sockname, featureset, server_timeout, xmlrpcinterface):
+ def __init__(self, lock, sockname, featureset, server_timeout, xmlrpcinterface, profile):
self.server_timeout = server_timeout
self.xmlrpcinterface = xmlrpcinterface
self.featureset = featureset
self.sockname = sockname
self.bitbake_lock = lock
+ self.profile = profile
self.readypipe, self.readypipein = os.pipe()
# Place the log in the builddirectory alongside the lock file
@@ -466,7 +599,7 @@ class BitBakeServer(object):
try:
r = ready.get()
except EOFError:
- # Trap the child exitting/closing the pipe and error out
+ # Trap the child exiting/closing the pipe and error out
r = None
if not r or r[0] != "r":
ready.close()
@@ -509,9 +642,9 @@ class BitBakeServer(object):
os.set_inheritable(self.bitbake_lock.fileno(), True)
os.set_inheritable(self.readypipein, True)
serverscript = os.path.realpath(os.path.dirname(__file__) + "/../../../bin/bitbake-server")
- os.execl(sys.executable, "bitbake-server", serverscript, "decafbad", str(self.bitbake_lock.fileno()), str(self.readypipein), self.logfile, self.bitbake_lock.name, self.sockname, str(self.server_timeout or 0), str(self.xmlrpcinterface[0]), str(self.xmlrpcinterface[1]))
+ os.execl(sys.executable, sys.executable, serverscript, "decafbad", str(self.bitbake_lock.fileno()), str(self.readypipein), self.logfile, self.bitbake_lock.name, self.sockname, str(self.server_timeout or 0), str(int(self.profile)), str(self.xmlrpcinterface[0]), str(self.xmlrpcinterface[1]))
-def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpcinterface):
+def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpcinterface, profile):
import bb.cookerdata
import bb.cooker
@@ -523,6 +656,7 @@ def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpc
# Create server control socket
if os.path.exists(sockname):
+ serverlog("WARNING: removing existing socket file '%s'" % sockname)
os.unlink(sockname)
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
@@ -539,7 +673,8 @@ def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpc
writer = ConnectionWriter(readypipeinfd)
try:
featureset = []
- cooker = bb.cooker.BBCooker(featureset, server.register_idle_function)
+ cooker = bb.cooker.BBCooker(featureset, server)
+ cooker.configuration.profile = profile
except bb.BBHandledException:
return None
writer.send("r")
@@ -549,7 +684,7 @@ def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpc
server.run()
finally:
- # Flush any ,essages/errors to the logfile before exit
+ # Flush any messages/errors to the logfile before exit
sys.stdout.flush()
sys.stderr.flush()
@@ -654,23 +789,18 @@ class BBUIEventQueue:
self.reader = ConnectionReader(readfd)
self.t = threading.Thread()
- self.t.setDaemon(True)
self.t.run = self.startCallbackHandler
self.t.start()
def getEvent(self):
- self.eventQueueLock.acquire()
-
- if len(self.eventQueue) == 0:
- self.eventQueueLock.release()
- return None
-
- item = self.eventQueue.pop(0)
+ with bb.utils.lock_timeout(self.eventQueueLock):
+ if len(self.eventQueue) == 0:
+ return None
- if len(self.eventQueue) == 0:
- self.eventQueueNotify.clear()
+ item = self.eventQueue.pop(0)
+ if len(self.eventQueue) == 0:
+ self.eventQueueNotify.clear()
- self.eventQueueLock.release()
return item
def waitEvent(self, delay):
@@ -678,10 +808,9 @@ class BBUIEventQueue:
return self.getEvent()
def queue_event(self, event):
- self.eventQueueLock.acquire()
- self.eventQueue.append(event)
- self.eventQueueNotify.set()
- self.eventQueueLock.release()
+ with bb.utils.lock_timeout(self.eventQueueLock):
+ self.eventQueue.append(event)
+ self.eventQueueNotify.set()
def send_event(self, event):
self.queue_event(pickle.loads(event))
@@ -690,13 +819,17 @@ class BBUIEventQueue:
bb.utils.set_process_name("UIEventQueue")
while True:
try:
- self.reader.wait()
- event = self.reader.get()
- self.queue_event(event)
- except EOFError:
+ ready = self.reader.wait(0.25)
+ if ready:
+ event = self.reader.get()
+ self.queue_event(event)
+ except (EOFError, OSError, TypeError):
# Easiest way to exit is to close the file descriptor to cause an exit
break
+
+ def close(self):
self.reader.close()
+ self.t.join()
class ConnectionReader(object):
@@ -711,7 +844,7 @@ class ConnectionReader(object):
return self.reader.poll(timeout)
def get(self):
- with self.rlock:
+ with bb.utils.lock_timeout(self.rlock):
res = self.reader.recv_bytes()
return multiprocessing.reduction.ForkingPickler.loads(res)
@@ -730,10 +863,31 @@ class ConnectionWriter(object):
# Why bb.event needs this I have no idea
self.event = self
+ def _send(self, obj):
+ gc.disable()
+ with bb.utils.lock_timeout(self.wlock):
+ self.writer.send_bytes(obj)
+ gc.enable()
+
def send(self, obj):
obj = multiprocessing.reduction.ForkingPickler.dumps(obj)
- with self.wlock:
- self.writer.send_bytes(obj)
+ # See notes/code in CookerParser
+ # We must not terminate holding this lock else processes will hang.
+ # For SIGTERM, raising afterwards avoids this.
+ # For SIGINT, we don't want to have written partial data to the pipe.
+ # pthread_sigmask block/unblock would be nice but doesn't work, https://bugs.python.org/issue47139
+ process = multiprocessing.current_process()
+ if process and hasattr(process, "queue_signals"):
+ with bb.utils.lock_timeout(process.signal_threadlock):
+ process.queue_signals = True
+ self._send(obj)
+ process.queue_signals = False
+
+ while len(process.signal_received) > 0:
+ sig = process.signal_received.pop()
+ process.handle_sig(sig, None)
+ else:
+ self._send(obj)
def fileno(self):
return self.writer.fileno()
diff --git a/lib/bb/server/xmlrpcserver.py b/lib/bb/server/xmlrpcserver.py
index 2fa71be66..04b0b17db 100644
--- a/lib/bb/server/xmlrpcserver.py
+++ b/lib/bb/server/xmlrpcserver.py
@@ -11,6 +11,7 @@ import hashlib
import time
import inspect
from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
+import bb.server.xmlrpcclient
import bb
@@ -117,7 +118,7 @@ class BitBakeXMLRPCServerCommands():
"""
Run a cooker command on the server
"""
- return self.server.cooker.command.runCommand(command, self.server.readonly)
+ return self.server.cooker.command.runCommand(command, self.server.parent, self.server.readonly)
def getEventHandle(self):
return self.event_handle
diff --git a/lib/bb/siggen.py b/lib/bb/siggen.py
index 07692e673..8ab08ec96 100644
--- a/lib/bb/siggen.py
+++ b/lib/bb/siggen.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -11,6 +13,10 @@ import pickle
import bb.data
import difflib
import simplediff
+import json
+import types
+from contextlib import contextmanager
+import bb.compress.zstd
from bb.checksum import FileChecksumCache
from bb import runqueue
import hashserv
@@ -19,6 +25,35 @@ import hashserv.client
logger = logging.getLogger('BitBake.SigGen')
hashequiv_logger = logging.getLogger('BitBake.SigGen.HashEquiv')
+#find_siginfo and find_siginfo_version are set by the metadata siggen
+# The minimum version of the find_siginfo function we need
+find_siginfo_minversion = 2
+
+HASHSERV_ENVVARS = [
+ "SSL_CERT_DIR",
+ "SSL_CERT_FILE",
+ "NO_PROXY",
+ "HTTPS_PROXY",
+ "HTTP_PROXY"
+]
+
+def check_siggen_version(siggen):
+ if not hasattr(siggen, "find_siginfo_version"):
+ bb.fatal("Siggen from metadata (OE-Core?) is too old, please update it (no version found)")
+ if siggen.find_siginfo_version < siggen.find_siginfo_minversion:
+ bb.fatal("Siggen from metadata (OE-Core?) is too old, please update it (%s vs %s)" % (siggen.find_siginfo_version, siggen.find_siginfo_minversion))
+
+class SetEncoder(json.JSONEncoder):
+ def default(self, obj):
+ if isinstance(obj, set) or isinstance(obj, frozenset):
+ return dict(_set_object=list(sorted(obj)))
+ return json.JSONEncoder.default(self, obj)
+
+def SetDecoder(dct):
+ if '_set_object' in dct:
+ return frozenset(dct['_set_object'])
+ return dct
+
def init(d):
siggens = [obj for obj in globals().values()
if type(obj) is type and issubclass(obj, SignatureGenerator)]
@@ -27,7 +62,6 @@ def init(d):
for sg in siggens:
if desired == sg.name:
return sg(d)
- break
else:
logger.error("Invalid signature generator '%s', using default 'noop'\n"
"Available generators: %s", desired,
@@ -39,11 +73,6 @@ class SignatureGenerator(object):
"""
name = "noop"
- # If the derived class supports multiconfig datacaches, set this to True
- # The default is False for backward compatibility with derived signature
- # generators that do not understand multiconfig caches
- supports_multiconfig_datacaches = False
-
def __init__(self, data):
self.basehash = {}
self.taskhash = {}
@@ -61,9 +90,39 @@ class SignatureGenerator(object):
def postparsing_clean_cache(self):
return
+ def setup_datacache(self, datacaches):
+ self.datacaches = datacaches
+
+ def setup_datacache_from_datastore(self, mcfn, d):
+ # In task context we have no cache so setup internal data structures
+ # from the fully parsed data store provided
+
+ mc = d.getVar("__BBMULTICONFIG", False) or ""
+ tasks = d.getVar('__BBTASKS', False)
+
+ self.datacaches = {}
+ self.datacaches[mc] = types.SimpleNamespace()
+ setattr(self.datacaches[mc], "stamp", {})
+ self.datacaches[mc].stamp[mcfn] = d.getVar('STAMP')
+ setattr(self.datacaches[mc], "stamp_extrainfo", {})
+ self.datacaches[mc].stamp_extrainfo[mcfn] = {}
+ for t in tasks:
+ flag = d.getVarFlag(t, "stamp-extra-info")
+ if flag:
+ self.datacaches[mc].stamp_extrainfo[mcfn][t] = flag
+
+ def get_cached_unihash(self, tid):
+ return None
+
def get_unihash(self, tid):
+ unihash = self.get_cached_unihash(tid)
+ if unihash:
+ return unihash
return self.taskhash[tid]
+ def get_unihashes(self, tids):
+ return {tid: self.get_unihash(tid) for tid in tids}
+
def prep_taskhash(self, tid, deps, dataCaches):
return
@@ -75,17 +134,51 @@ class SignatureGenerator(object):
"""Write/update the file checksum cache onto disk"""
return
+ def stampfile_base(self, mcfn):
+ mc = bb.runqueue.mc_from_tid(mcfn)
+ return self.datacaches[mc].stamp[mcfn]
+
+ def stampfile_mcfn(self, taskname, mcfn, extrainfo=True):
+ mc = bb.runqueue.mc_from_tid(mcfn)
+ stamp = self.datacaches[mc].stamp[mcfn]
+ if not stamp:
+ return
+
+ stamp_extrainfo = ""
+ if extrainfo:
+ taskflagname = taskname
+ if taskname.endswith("_setscene"):
+ taskflagname = taskname.replace("_setscene", "")
+ stamp_extrainfo = self.datacaches[mc].stamp_extrainfo[mcfn].get(taskflagname) or ""
+
+ return self.stampfile(stamp, mcfn, taskname, stamp_extrainfo)
+
def stampfile(self, stampbase, file_name, taskname, extrainfo):
return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
+ def stampcleanmask_mcfn(self, taskname, mcfn):
+ mc = bb.runqueue.mc_from_tid(mcfn)
+ stamp = self.datacaches[mc].stamp[mcfn]
+ if not stamp:
+ return []
+
+ taskflagname = taskname
+ if taskname.endswith("_setscene"):
+ taskflagname = taskname.replace("_setscene", "")
+ stamp_extrainfo = self.datacaches[mc].stamp_extrainfo[mcfn].get(taskflagname) or ""
+
+ return self.stampcleanmask(stamp, mcfn, taskname, stamp_extrainfo)
+
def stampcleanmask(self, stampbase, file_name, taskname, extrainfo):
return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
- def dump_sigtask(self, fn, task, stampbase, runtime):
+ def dump_sigtask(self, mcfn, task, stampbase, runtime):
return
- def invalidate_task(self, task, d, fn):
- bb.build.del_stamp(task, d, fn)
+ def invalidate_task(self, task, mcfn):
+ mc = bb.runqueue.mc_from_tid(mcfn)
+ stamp = self.datacaches[mc].stamp[mcfn]
+ bb.utils.remove(stamp)
def dump_sigs(self, dataCache, options):
return
@@ -108,40 +201,19 @@ class SignatureGenerator(object):
def save_unitaskhashes(self):
return
- def set_setscene_tasks(self, setscene_tasks):
+ def copy_unitaskhashes(self, targetdir):
return
- @classmethod
- def get_data_caches(cls, dataCaches, mc):
- """
- This function returns the datacaches that should be passed to signature
- generator functions. If the signature generator supports multiconfig
- caches, the entire dictionary of data caches is sent, otherwise a
- special proxy is sent that support both index access to all
- multiconfigs, and also direct access for the default multiconfig.
-
- The proxy class allows code in this class itself to always use
- multiconfig aware code (to ease maintenance), but derived classes that
- are unaware of multiconfig data caches can still access the default
- multiconfig as expected.
-
- Do not override this function in derived classes; it will be removed in
- the future when support for multiconfig data caches is mandatory
- """
- class DataCacheProxy(object):
- def __init__(self):
- pass
-
- def __getitem__(self, key):
- return dataCaches[key]
-
- def __getattr__(self, name):
- return getattr(dataCaches[mc], name)
+ def set_setscene_tasks(self, setscene_tasks):
+ return
- if cls.supports_multiconfig_datacaches:
- return dataCaches
+ def exit(self):
+ return
- return DataCacheProxy()
+def build_pnid(mc, pn, taskname):
+ if mc:
+ return "mc:" + mc + ":" + pn + ":" + taskname
+ return pn + ":" + taskname
class SignatureGeneratorBasic(SignatureGenerator):
"""
@@ -152,15 +224,12 @@ class SignatureGeneratorBasic(SignatureGenerator):
self.basehash = {}
self.taskhash = {}
self.unihash = {}
- self.taskdeps = {}
self.runtaskdeps = {}
self.file_checksum_values = {}
self.taints = {}
- self.gendeps = {}
- self.lookupcache = {}
self.setscenetasks = set()
- self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST") or "").split())
- self.taskwhitelist = None
+ self.basehash_ignore_vars = set((data.getVar("BB_BASEHASH_IGNORE_VARS") or "").split())
+ self.taskhash_ignore_tasks = None
self.init_rundepcheck(data)
checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE")
if checksum_cache_file:
@@ -175,21 +244,21 @@ class SignatureGeneratorBasic(SignatureGenerator):
self.tidtopn = {}
def init_rundepcheck(self, data):
- self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST") or None
- if self.taskwhitelist:
- self.twl = re.compile(self.taskwhitelist)
+ self.taskhash_ignore_tasks = data.getVar("BB_TASKHASH_IGNORE_TASKS") or None
+ if self.taskhash_ignore_tasks:
+ self.twl = re.compile(self.taskhash_ignore_tasks)
else:
self.twl = None
- def _build_data(self, fn, d):
+ def _build_data(self, mcfn, d):
ignore_mismatch = ((d.getVar("BB_HASH_IGNORE_MISMATCH") or '') == '1')
- tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d, self.basewhitelist)
+ tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d, self.basehash_ignore_vars)
- taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, self.basewhitelist, fn)
+ taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, self.basehash_ignore_vars, mcfn)
for task in tasklist:
- tid = fn + ":" + task
+ tid = mcfn + ":" + task
if not ignore_mismatch and tid in self.basehash and self.basehash[tid] != basehash[tid]:
bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (tid, self.basehash[tid], basehash[tid]))
bb.error("The following commands may help:")
@@ -200,11 +269,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
bb.error("%s -Sprintdiff\n" % cmd)
self.basehash[tid] = basehash[tid]
- self.taskdeps[fn] = taskdeps
- self.gendeps[fn] = gendeps
- self.lookupcache[fn] = lookupcache
-
- return taskdeps
+ return taskdeps, gendeps, lookupcache
def set_setscene_tasks(self, setscene_tasks):
self.setscenetasks = set(setscene_tasks)
@@ -212,35 +277,47 @@ class SignatureGeneratorBasic(SignatureGenerator):
def finalise(self, fn, d, variant):
mc = d.getVar("__BBMULTICONFIG", False) or ""
+ mcfn = fn
if variant or mc:
- fn = bb.cache.realfn2virtual(fn, variant, mc)
+ mcfn = bb.cache.realfn2virtual(fn, variant, mc)
try:
- taskdeps = self._build_data(fn, d)
+ taskdeps, gendeps, lookupcache = self._build_data(mcfn, d)
except bb.parse.SkipRecipe:
raise
except:
- bb.warn("Error during finalise of %s" % fn)
+ bb.warn("Error during finalise of %s" % mcfn)
raise
+ basehashes = {}
+ for task in taskdeps:
+ basehashes[task] = self.basehash[mcfn + ":" + task]
+
+ d.setVar("__siggen_basehashes", basehashes)
+ d.setVar("__siggen_gendeps", gendeps)
+ d.setVar("__siggen_varvals", lookupcache)
+ d.setVar("__siggen_taskdeps", taskdeps)
+
#Slow but can be useful for debugging mismatched basehashes
- #for task in self.taskdeps[fn]:
- # self.dump_sigtask(fn, task, d.getVar("STAMP"), False)
+ #self.setup_datacache_from_datastore(mcfn, d)
+ #for task in taskdeps:
+ # self.dump_sigtask(mcfn, task, d.getVar("STAMP"), False)
- for task in taskdeps:
- d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + ":" + task])
+ def setup_datacache_from_datastore(self, mcfn, d):
+ super().setup_datacache_from_datastore(mcfn, d)
- def postparsing_clean_cache(self):
- #
- # After parsing we can remove some things from memory to reduce our memory footprint
- #
- self.gendeps = {}
- self.lookupcache = {}
- self.taskdeps = {}
+ mc = bb.runqueue.mc_from_tid(mcfn)
+ for attr in ["siggen_varvals", "siggen_taskdeps", "siggen_gendeps"]:
+ if not hasattr(self.datacaches[mc], attr):
+ setattr(self.datacaches[mc], attr, {})
+ self.datacaches[mc].siggen_varvals[mcfn] = d.getVar("__siggen_varvals")
+ self.datacaches[mc].siggen_taskdeps[mcfn] = d.getVar("__siggen_taskdeps")
+ self.datacaches[mc].siggen_gendeps[mcfn] = d.getVar("__siggen_gendeps")
def rundep_check(self, fn, recipename, task, dep, depname, dataCaches):
# Return True if we should keep the dependency, False to drop it
- # We only manipulate the dependencies for packages not in the whitelist
+ # We only manipulate the dependencies for packages not in the ignore
+ # list
if self.twl and not self.twl.search(recipename):
# then process the actual dependencies
if self.twl.search(depname):
@@ -258,38 +335,37 @@ class SignatureGeneratorBasic(SignatureGenerator):
def prep_taskhash(self, tid, deps, dataCaches):
- (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
+ (mc, _, task, mcfn) = bb.runqueue.split_tid_mcfn(tid)
self.basehash[tid] = dataCaches[mc].basetaskhash[tid]
self.runtaskdeps[tid] = []
self.file_checksum_values[tid] = []
- recipename = dataCaches[mc].pkg_fn[fn]
+ recipename = dataCaches[mc].pkg_fn[mcfn]
self.tidtopn[tid] = recipename
+ # save hashfn for deps into siginfo?
+ for dep in deps:
+ (depmc, _, deptask, depmcfn) = bb.runqueue.split_tid_mcfn(dep)
+ dep_pn = dataCaches[depmc].pkg_fn[depmcfn]
- for dep in sorted(deps, key=clean_basepath):
- (depmc, _, _, depmcfn) = bb.runqueue.split_tid_mcfn(dep)
- depname = dataCaches[depmc].pkg_fn[depmcfn]
- if not self.supports_multiconfig_datacaches and mc != depmc:
- # If the signature generator doesn't understand multiconfig
- # data caches, any dependency not in the same multiconfig must
- # be skipped for backward compatibility
- continue
- if not self.rundep_check(fn, recipename, task, dep, depname, dataCaches):
+ if not self.rundep_check(mcfn, recipename, task, dep, dep_pn, dataCaches):
continue
+
if dep not in self.taskhash:
bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep)
- self.runtaskdeps[tid].append(dep)
- if task in dataCaches[mc].file_checksums[fn]:
+ dep_pnid = build_pnid(depmc, dep_pn, deptask)
+ self.runtaskdeps[tid].append((dep_pnid, dep))
+
+ if task in dataCaches[mc].file_checksums[mcfn]:
if self.checksum_cache:
- checksums = self.checksum_cache.get_checksums(dataCaches[mc].file_checksums[fn][task], recipename, self.localdirsexclude)
+ checksums = self.checksum_cache.get_checksums(dataCaches[mc].file_checksums[mcfn][task], recipename, self.localdirsexclude)
else:
- checksums = bb.fetch2.get_file_checksums(dataCaches[mc].file_checksums[fn][task], recipename, self.localdirsexclude)
+ checksums = bb.fetch2.get_file_checksums(dataCaches[mc].file_checksums[mcfn][task], recipename, self.localdirsexclude)
for (f,cs) in checksums:
self.file_checksum_values[tid].append((f,cs))
- taskdep = dataCaches[mc].task_deps[fn]
+ taskdep = dataCaches[mc].task_deps[mcfn]
if 'nostamp' in taskdep and task in taskdep['nostamp']:
# Nostamp tasks need an implicit taint so that they force any dependent tasks to run
if tid in self.taints and self.taints[tid].startswith("nostamp:"):
@@ -300,7 +376,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
taint = str(uuid.uuid4())
self.taints[tid] = "nostamp:" + taint
- taint = self.read_taint(fn, task, dataCaches[mc].stamp[fn])
+ taint = self.read_taint(mcfn, task, dataCaches[mc].stamp[mcfn])
if taint:
self.taints[tid] = taint
logger.warning("%s is tainted from a forced run" % tid)
@@ -310,22 +386,24 @@ class SignatureGeneratorBasic(SignatureGenerator):
def get_taskhash(self, tid, deps, dataCaches):
data = self.basehash[tid]
- for dep in self.runtaskdeps[tid]:
- data = data + self.get_unihash(dep)
+ for dep in sorted(self.runtaskdeps[tid]):
+ data += self.get_unihash(dep[1])
- for (f, cs) in self.file_checksum_values[tid]:
+ for (f, cs) in sorted(self.file_checksum_values[tid], key=clean_checksum_file_path):
if cs:
- data = data + cs
+ if "/./" in f:
+ data += "./" + f.split("/./")[1]
+ data += cs
if tid in self.taints:
if self.taints[tid].startswith("nostamp:"):
- data = data + self.taints[tid][8:]
+ data += self.taints[tid][8:]
else:
- data = data + self.taints[tid]
+ data += self.taints[tid]
h = hashlib.sha256(data.encode("utf-8")).hexdigest()
self.taskhash[tid] = h
- #d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
+ #d.setVar("BB_TASKHASH:task-%s" % task, taskhash[task])
return h
def writeout_file_checksum_cache(self):
@@ -340,9 +418,12 @@ class SignatureGeneratorBasic(SignatureGenerator):
def save_unitaskhashes(self):
self.unihash_cache.save(self.unitaskhashes)
- def dump_sigtask(self, fn, task, stampbase, runtime):
+ def copy_unitaskhashes(self, targetdir):
+ self.unihash_cache.copyfile(targetdir)
- tid = fn + ":" + task
+ def dump_sigtask(self, mcfn, task, stampbase, runtime):
+ tid = mcfn + ":" + task
+ mc = bb.runqueue.mc_from_tid(mcfn)
referencestamp = stampbase
if isinstance(runtime, str) and runtime.startswith("customfile"):
sigfile = stampbase
@@ -357,29 +438,34 @@ class SignatureGeneratorBasic(SignatureGenerator):
data = {}
data['task'] = task
- data['basewhitelist'] = self.basewhitelist
- data['taskwhitelist'] = self.taskwhitelist
- data['taskdeps'] = self.taskdeps[fn][task]
+ data['basehash_ignore_vars'] = self.basehash_ignore_vars
+ data['taskhash_ignore_tasks'] = self.taskhash_ignore_tasks
+ data['taskdeps'] = self.datacaches[mc].siggen_taskdeps[mcfn][task]
data['basehash'] = self.basehash[tid]
data['gendeps'] = {}
data['varvals'] = {}
- data['varvals'][task] = self.lookupcache[fn][task]
- for dep in self.taskdeps[fn][task]:
- if dep in self.basewhitelist:
+ data['varvals'][task] = self.datacaches[mc].siggen_varvals[mcfn][task]
+ for dep in self.datacaches[mc].siggen_taskdeps[mcfn][task]:
+ if dep in self.basehash_ignore_vars:
continue
- data['gendeps'][dep] = self.gendeps[fn][dep]
- data['varvals'][dep] = self.lookupcache[fn][dep]
+ data['gendeps'][dep] = self.datacaches[mc].siggen_gendeps[mcfn][dep]
+ data['varvals'][dep] = self.datacaches[mc].siggen_varvals[mcfn][dep]
if runtime and tid in self.taskhash:
- data['runtaskdeps'] = self.runtaskdeps[tid]
- data['file_checksum_values'] = [(os.path.basename(f), cs) for f,cs in self.file_checksum_values[tid]]
+ data['runtaskdeps'] = [dep[0] for dep in sorted(self.runtaskdeps[tid])]
+ data['file_checksum_values'] = []
+ for f,cs in sorted(self.file_checksum_values[tid], key=clean_checksum_file_path):
+ if "/./" in f:
+ data['file_checksum_values'].append(("./" + f.split("/./")[1], cs))
+ else:
+ data['file_checksum_values'].append((os.path.basename(f), cs))
data['runtaskhashes'] = {}
- for dep in data['runtaskdeps']:
- data['runtaskhashes'][dep] = self.get_unihash(dep)
+ for dep in self.runtaskdeps[tid]:
+ data['runtaskhashes'][dep[0]] = self.get_unihash(dep[1])
data['taskhash'] = self.taskhash[tid]
data['unihash'] = self.get_unihash(tid)
- taint = self.read_taint(fn, task, referencestamp)
+ taint = self.read_taint(mcfn, task, referencestamp)
if taint:
data['taint'] = taint
@@ -396,11 +482,11 @@ class SignatureGeneratorBasic(SignatureGenerator):
bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[tid], tid))
sigfile = sigfile.replace(self.taskhash[tid], computed_taskhash)
- fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
+ fd, tmpfile = bb.utils.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
try:
- with os.fdopen(fd, "wb") as stream:
- p = pickle.dump(data, stream, -1)
- stream.flush()
+ with bb.compress.zstd.open(fd, "wt", encoding="utf-8", num_threads=1) as f:
+ json.dump(data, f, sort_keys=True, separators=(",", ":"), cls=SetEncoder)
+ f.flush()
os.chmod(tmpfile, 0o664)
bb.utils.rename(tmpfile, sigfile)
except (OSError, IOError) as err:
@@ -410,18 +496,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
pass
raise err
- def dump_sigfn(self, fn, dataCaches, options):
- if fn in self.taskdeps:
- for task in self.taskdeps[fn]:
- tid = fn + ":" + task
- mc = bb.runqueue.mc_from_tid(tid)
- if tid not in self.taskhash:
- continue
- if dataCaches[mc].basetaskhash[tid] != self.basehash[tid]:
- bb.error("Bitbake's cached basehash does not match the one we just generated (%s)!" % tid)
- bb.error("The mismatched hashes were %s and %s" % (dataCaches[mc].basetaskhash[tid], self.basehash[tid]))
- self.dump_sigtask(fn, task, dataCaches[mc].stamp[fn], True)
-
class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
name = "basichash"
@@ -432,11 +506,11 @@ class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
# If task is not in basehash, then error
return self.basehash[tid]
- def stampfile(self, stampbase, fn, taskname, extrainfo, clean=False):
- if taskname != "do_setscene" and taskname.endswith("_setscene"):
- tid = fn + ":" + taskname[:-9]
+ def stampfile(self, stampbase, mcfn, taskname, extrainfo, clean=False):
+ if taskname.endswith("_setscene"):
+ tid = mcfn + ":" + taskname[:-9]
else:
- tid = fn + ":" + taskname
+ tid = mcfn + ":" + taskname
if clean:
h = "*"
else:
@@ -444,29 +518,106 @@ class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.')
- def stampcleanmask(self, stampbase, fn, taskname, extrainfo):
- return self.stampfile(stampbase, fn, taskname, extrainfo, clean=True)
+ def stampcleanmask(self, stampbase, mcfn, taskname, extrainfo):
+ return self.stampfile(stampbase, mcfn, taskname, extrainfo, clean=True)
+
+ def invalidate_task(self, task, mcfn):
+ bb.note("Tainting hash to force rebuild of task %s, %s" % (mcfn, task))
- def invalidate_task(self, task, d, fn):
- bb.note("Tainting hash to force rebuild of task %s, %s" % (fn, task))
- bb.build.write_taint(task, d, fn)
+ mc = bb.runqueue.mc_from_tid(mcfn)
+ stamp = self.datacaches[mc].stamp[mcfn]
+
+ taintfn = stamp + '.' + task + '.taint'
+
+ import uuid
+ bb.utils.mkdirhier(os.path.dirname(taintfn))
+ # The specific content of the taint file is not really important,
+ # we just need it to be random, so a random UUID is used
+ with open(taintfn, 'w') as taintf:
+ taintf.write(str(uuid.uuid4()))
class SignatureGeneratorUniHashMixIn(object):
def __init__(self, data):
self.extramethod = {}
+ # NOTE: The cache only tracks hashes that exist. Hashes that don't
+ # exist are always queries from the server since it is possible for
+ # hashes to appear over time, but much less likely for them to
+ # disappear
+ self.unihash_exists_cache = set()
+ self.username = None
+ self.password = None
+ self.env = {}
+
+ origenv = data.getVar("BB_ORIGENV")
+ for e in HASHSERV_ENVVARS:
+ value = data.getVar(e)
+ if not value and origenv:
+ value = origenv.getVar(e)
+ if value:
+ self.env[e] = value
super().__init__(data)
def get_taskdata(self):
- return (self.server, self.method, self.extramethod) + super().get_taskdata()
+ return (self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password, self.env) + super().get_taskdata()
def set_taskdata(self, data):
- self.server, self.method, self.extramethod = data[:3]
- super().set_taskdata(data[3:])
+ self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password, self.env = data[:7]
+ super().set_taskdata(data[7:])
+
+ def get_hashserv_creds(self):
+ if self.username and self.password:
+ return {
+ "username": self.username,
+ "password": self.password,
+ }
+
+ return {}
+
+ @contextmanager
+ def _client_env(self):
+ orig_env = os.environ.copy()
+ try:
+ for k, v in self.env.items():
+ os.environ[k] = v
+
+ yield
+ finally:
+ for k, v in self.env.items():
+ if k in orig_env:
+ os.environ[k] = orig_env[k]
+ else:
+ del os.environ[k]
+ @contextmanager
def client(self):
- if getattr(self, '_client', None) is None:
- self._client = hashserv.create_client(self.server)
- return self._client
+ with self._client_env():
+ if getattr(self, '_client', None) is None:
+ self._client = hashserv.create_client(self.server, **self.get_hashserv_creds())
+ yield self._client
+
+ @contextmanager
+ def client_pool(self):
+ with self._client_env():
+ if getattr(self, '_client_pool', None) is None:
+ self._client_pool = hashserv.client.ClientPool(self.server, self.max_parallel, **self.get_hashserv_creds())
+ yield self._client_pool
+
+ def reset(self, data):
+ self.__close_clients()
+ return super().reset(data)
+
+ def exit(self):
+ self.__close_clients()
+ return super().exit()
+
+ def __close_clients(self):
+ with self._client_env():
+ if getattr(self, '_client', None) is not None:
+ self._client.close()
+ self._client = None
+ if getattr(self, '_client_pool', None) is not None:
+ self._client_pool.close()
+ self._client_pool = None
def get_stampfile_hash(self, tid):
if tid in self.taskhash:
@@ -499,7 +650,7 @@ class SignatureGeneratorUniHashMixIn(object):
return None
return unihash
- def get_unihash(self, tid):
+ def get_cached_unihash(self, tid):
taskhash = self.taskhash[tid]
# If its not a setscene task we can return
@@ -514,40 +665,105 @@ class SignatureGeneratorUniHashMixIn(object):
self.unihash[tid] = unihash
return unihash
- # In the absence of being able to discover a unique hash from the
- # server, make it be equivalent to the taskhash. The unique "hash" only
- # really needs to be a unique string (not even necessarily a hash), but
- # making it match the taskhash has a few advantages:
- #
- # 1) All of the sstate code that assumes hashes can be the same
- # 2) It provides maximal compatibility with builders that don't use
- # an equivalency server
- # 3) The value is easy for multiple independent builders to derive the
- # same unique hash from the same input. This means that if the
- # independent builders find the same taskhash, but it isn't reported
- # to the server, there is a better chance that they will agree on
- # the unique hash.
- unihash = taskhash
+ return None
- try:
- method = self.method
- if tid in self.extramethod:
- method = method + self.extramethod[tid]
- data = self.client().get_unihash(method, self.taskhash[tid])
- if data:
- unihash = data
+ def _get_method(self, tid):
+ method = self.method
+ if tid in self.extramethod:
+ method = method + self.extramethod[tid]
+
+ return method
+
+ def unihashes_exist(self, query):
+ if len(query) == 0:
+ return {}
+
+ uncached_query = {}
+ result = {}
+ for key, unihash in query.items():
+ if unihash in self.unihash_exists_cache:
+ result[key] = True
+ else:
+ uncached_query[key] = unihash
+
+ if self.max_parallel <= 1 or len(uncached_query) <= 1:
+ # No parallelism required. Make the query serially with the single client
+ with self.client() as client:
+ uncached_result = {
+ key: client.unihash_exists(value) for key, value in uncached_query.items()
+ }
+ else:
+ with self.client_pool() as client_pool:
+ uncached_result = client_pool.unihashes_exist(uncached_query)
+
+ for key, exists in uncached_result.items():
+ if exists:
+ self.unihash_exists_cache.add(query[key])
+ result[key] = exists
+
+ return result
+
+ def get_unihash(self, tid):
+ return self.get_unihashes([tid])[tid]
+
+ def get_unihashes(self, tids):
+ """
+ For a iterable of tids, returns a dictionary that maps each tid to a
+ unihash
+ """
+ result = {}
+ queries = {}
+ query_result = {}
+
+ for tid in tids:
+ unihash = self.get_cached_unihash(tid)
+ if unihash:
+ result[tid] = unihash
+ else:
+ queries[tid] = (self._get_method(tid), self.taskhash[tid])
+
+ if len(queries) == 0:
+ return result
+
+ if self.max_parallel <= 1 or len(queries) <= 1:
+ # No parallelism required. Make the query serially with the single client
+ with self.client() as client:
+ for tid, args in queries.items():
+ query_result[tid] = client.get_unihash(*args)
+ else:
+ with self.client_pool() as client_pool:
+ query_result = client_pool.get_unihashes(queries)
+
+ for tid, unihash in query_result.items():
+ # In the absence of being able to discover a unique hash from the
+ # server, make it be equivalent to the taskhash. The unique "hash" only
+ # really needs to be a unique string (not even necessarily a hash), but
+ # making it match the taskhash has a few advantages:
+ #
+ # 1) All of the sstate code that assumes hashes can be the same
+ # 2) It provides maximal compatibility with builders that don't use
+ # an equivalency server
+ # 3) The value is easy for multiple independent builders to derive the
+ # same unique hash from the same input. This means that if the
+ # independent builders find the same taskhash, but it isn't reported
+ # to the server, there is a better chance that they will agree on
+ # the unique hash.
+ taskhash = self.taskhash[tid]
+ if unihash:
# A unique hash equal to the taskhash is not very interesting,
# so it is reported it at debug level 2. If they differ, that
# is much more interesting, so it is reported at debug level 1
- hashequiv_logger.debug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server))
+ hashequiv_logger.bbdebug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server))
else:
hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server))
- except ConnectionError as e:
- bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
+ unihash = taskhash
- self.set_unihash(tid, unihash)
- self.unihash[tid] = unihash
- return unihash
+
+ self.set_unihash(tid, unihash)
+ self.unihash[tid] = unihash
+ result[tid] = unihash
+
+ return result
def report_unihash(self, path, task, d):
import importlib
@@ -556,14 +772,14 @@ class SignatureGeneratorUniHashMixIn(object):
unihash = d.getVar('BB_UNIHASH')
report_taskdata = d.getVar('SSTATE_HASHEQUIV_REPORT_TASKDATA') == '1'
tempdir = d.getVar('T')
- fn = d.getVar('BB_FILENAME')
- tid = fn + ':do_' + task
+ mcfn = d.getVar('BB_FILENAME')
+ tid = mcfn + ':do_' + task
key = tid + ':' + taskhash
if self.setscenetasks and tid not in self.setscenetasks:
return
- # This can happen if locked sigs are in action. Detect and just abort
+ # This can happen if locked sigs are in action. Detect and just exit
if taskhash != self.taskhash[tid]:
return
@@ -611,12 +827,14 @@ class SignatureGeneratorUniHashMixIn(object):
if tid in self.extramethod:
method = method + self.extramethod[tid]
- data = self.client().report_unihash(taskhash, method, outhash, unihash, extra_data)
+ with self.client() as client:
+ data = client.report_unihash(taskhash, method, outhash, unihash, extra_data)
+
new_unihash = data['unihash']
if new_unihash != unihash:
hashequiv_logger.debug('Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server))
- bb.event.fire(bb.runqueue.taskUniHashUpdate(fn + ':do_' + task, new_unihash), d)
+ bb.event.fire(bb.runqueue.taskUniHashUpdate(mcfn + ':do_' + task, new_unihash), d)
self.set_unihash(tid, new_unihash)
d.setVar('BB_UNIHASH', new_unihash)
else:
@@ -642,7 +860,9 @@ class SignatureGeneratorUniHashMixIn(object):
if tid in self.extramethod:
method = method + self.extramethod[tid]
- data = self.client().report_unihash_equiv(taskhash, method, wanted_unihash, extra_data)
+ with self.client() as client:
+ data = client.report_unihash_equiv(taskhash, method, wanted_unihash, extra_data)
+
hashequiv_logger.verbose('Reported task %s as unihash %s to %s (%s)' % (tid, wanted_unihash, self.server, str(data)))
if data is None:
@@ -675,20 +895,20 @@ class SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureG
super().init_rundepcheck(data)
self.server = data.getVar('BB_HASHSERVE')
self.method = "sstate_output_hash"
+ self.max_parallel = 1
-#
-# Dummy class used for bitbake-selftest
-#
-class SignatureGeneratorTestMulticonfigDepends(SignatureGeneratorBasicHash):
- name = "TestMulticonfigDepends"
- supports_multiconfig_datacaches = True
+def clean_checksum_file_path(file_checksum_tuple):
+ f, cs = file_checksum_tuple
+ if "/./" in f:
+ return "./" + f.split("/./")[1]
+ return f
def dump_this_task(outfile, d):
import bb.parse
- fn = d.getVar("BB_FILENAME")
+ mcfn = d.getVar("BB_FILENAME")
task = "do_" + d.getVar("BB_CURRENTTASK")
- referencestamp = bb.build.stamp_internal(task, d, None, True)
- bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile:" + referencestamp)
+ referencestamp = bb.parse.siggen.stampfile_base(mcfn)
+ bb.parse.siggen.dump_sigtask(mcfn, task, outfile, "customfile:" + referencestamp)
def init_colors(enable_color):
"""Initialise colour dict for passing to compare_sigfiles()"""
@@ -741,38 +961,15 @@ def list_inline_diff(oldlist, newlist, colors=None):
ret.append(item)
return '[%s]' % (', '.join(ret))
-def clean_basepath(basepath):
- basepath, dir, recipe_task = basepath.rsplit("/", 2)
- cleaned = dir + '/' + recipe_task
-
- if basepath[0] == '/':
- return cleaned
-
- if basepath.startswith("mc:") and basepath.count(':') >= 2:
- mc, mc_name, basepath = basepath.split(":", 2)
- mc_suffix = ':mc:' + mc_name
- else:
- mc_suffix = ''
-
- # mc stuff now removed from basepath. Whatever was next, if present will be the first
- # suffix. ':/', recipe path start, marks the end of this. Something like
- # 'virtual:a[:b[:c]]:/path...' (b and c being optional)
- if basepath[0] != '/':
- cleaned += ':' + basepath.split(':/', 1)[0]
-
- return cleaned + mc_suffix
+# Handled renamed fields
+def handle_renames(data):
+ if 'basewhitelist' in data:
+ data['basehash_ignore_vars'] = data['basewhitelist']
+ del data['basewhitelist']
+ if 'taskwhitelist' in data:
+ data['taskhash_ignore_tasks'] = data['taskwhitelist']
+ del data['taskwhitelist']
-def clean_basepaths(a):
- b = {}
- for x in a:
- b[clean_basepath(x)] = a[x]
- return b
-
-def clean_basepaths_list(a):
- b = []
- for x in a:
- b.append(clean_basepath(x))
- return b
def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
output = []
@@ -794,20 +991,29 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
formatparams.update(values)
return formatstr.format(**formatparams)
- with open(a, 'rb') as f:
- p1 = pickle.Unpickler(f)
- a_data = p1.load()
- with open(b, 'rb') as f:
- p2 = pickle.Unpickler(f)
- b_data = p2.load()
-
- def dict_diff(a, b, whitelist=set()):
+ try:
+ with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
+ a_data = json.load(f, object_hook=SetDecoder)
+ except (TypeError, OSError) as err:
+ bb.error("Failed to open sigdata file '%s': %s" % (a, str(err)))
+ raise err
+ try:
+ with bb.compress.zstd.open(b, "rt", encoding="utf-8", num_threads=1) as f:
+ b_data = json.load(f, object_hook=SetDecoder)
+ except (TypeError, OSError) as err:
+ bb.error("Failed to open sigdata file '%s': %s" % (b, str(err)))
+ raise err
+
+ for data in [a_data, b_data]:
+ handle_renames(data)
+
+ def dict_diff(a, b, ignored_vars=set()):
sa = set(a.keys())
sb = set(b.keys())
common = sa & sb
changed = set()
for i in common:
- if a[i] != b[i] and i not in whitelist:
+ if a[i] != b[i] and i not in ignored_vars:
changed.add(i)
added = sb - sa
removed = sa - sb
@@ -815,11 +1021,11 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
def file_checksums_diff(a, b):
from collections import Counter
- # Handle old siginfo format
- if isinstance(a, dict):
- a = [(os.path.basename(f), cs) for f, cs in a.items()]
- if isinstance(b, dict):
- b = [(os.path.basename(f), cs) for f, cs in b.items()]
+
+ # Convert lists back to tuples
+ a = [(f[0], f[1]) for f in a]
+ b = [(f[0], f[1]) for f in b]
+
# Compare lists, ensuring we can handle duplicate filenames if they exist
removedcount = Counter(a)
removedcount.subtract(b)
@@ -846,15 +1052,15 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
removed = [x[0] for x in removed]
return changed, added, removed
- if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']:
- output.append(color_format("{color_title}basewhitelist changed{color_default} from '%s' to '%s'") % (a_data['basewhitelist'], b_data['basewhitelist']))
- if a_data['basewhitelist'] and b_data['basewhitelist']:
- output.append("changed items: %s" % a_data['basewhitelist'].symmetric_difference(b_data['basewhitelist']))
+ if 'basehash_ignore_vars' in a_data and a_data['basehash_ignore_vars'] != b_data['basehash_ignore_vars']:
+ output.append(color_format("{color_title}basehash_ignore_vars changed{color_default} from '%s' to '%s'") % (a_data['basehash_ignore_vars'], b_data['basehash_ignore_vars']))
+ if a_data['basehash_ignore_vars'] and b_data['basehash_ignore_vars']:
+ output.append("changed items: %s" % a_data['basehash_ignore_vars'].symmetric_difference(b_data['basehash_ignore_vars']))
- if 'taskwhitelist' in a_data and a_data['taskwhitelist'] != b_data['taskwhitelist']:
- output.append(color_format("{color_title}taskwhitelist changed{color_default} from '%s' to '%s'") % (a_data['taskwhitelist'], b_data['taskwhitelist']))
- if a_data['taskwhitelist'] and b_data['taskwhitelist']:
- output.append("changed items: %s" % a_data['taskwhitelist'].symmetric_difference(b_data['taskwhitelist']))
+ if 'taskhash_ignore_tasks' in a_data and a_data['taskhash_ignore_tasks'] != b_data['taskhash_ignore_tasks']:
+ output.append(color_format("{color_title}taskhash_ignore_tasks changed{color_default} from '%s' to '%s'") % (a_data['taskhash_ignore_tasks'], b_data['taskhash_ignore_tasks']))
+ if a_data['taskhash_ignore_tasks'] and b_data['taskhash_ignore_tasks']:
+ output.append("changed items: %s" % a_data['taskhash_ignore_tasks'].symmetric_difference(b_data['taskhash_ignore_tasks']))
if a_data['taskdeps'] != b_data['taskdeps']:
output.append(color_format("{color_title}Task dependencies changed{color_default} from:\n%s\nto:\n%s") % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps'])))
@@ -862,23 +1068,23 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
if a_data['basehash'] != b_data['basehash'] and not collapsed:
output.append(color_format("{color_title}basehash changed{color_default} from %s to %s") % (a_data['basehash'], b_data['basehash']))
- changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basewhitelist'] & b_data['basewhitelist'])
+ changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basehash_ignore_vars'] & b_data['basehash_ignore_vars'])
if changed:
- for dep in changed:
+ for dep in sorted(changed):
output.append(color_format("{color_title}List of dependencies for variable %s changed from '{color_default}%s{color_title}' to '{color_default}%s{color_title}'") % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep]))
if a_data['gendeps'][dep] and b_data['gendeps'][dep]:
output.append("changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep]))
if added:
- for dep in added:
+ for dep in sorted(added):
output.append(color_format("{color_title}Dependency on variable %s was added") % (dep))
if removed:
- for dep in removed:
+ for dep in sorted(removed):
output.append(color_format("{color_title}Dependency on Variable %s was removed") % (dep))
changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals'])
if changed:
- for dep in changed:
+ for dep in sorted(changed):
oldval = a_data['varvals'][dep]
newval = b_data['varvals'][dep]
if newval and oldval and ('\n' in oldval or '\n' in newval):
@@ -902,9 +1108,9 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
output.append(color_format("{color_title}Variable {var} value changed from '{color_default}{oldval}{color_title}' to '{color_default}{newval}{color_title}'{color_default}", var=dep, oldval=oldval, newval=newval))
if not 'file_checksum_values' in a_data:
- a_data['file_checksum_values'] = {}
+ a_data['file_checksum_values'] = []
if not 'file_checksum_values' in b_data:
- b_data['file_checksum_values'] = {}
+ b_data['file_checksum_values'] = []
changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values'])
if changed:
@@ -931,11 +1137,11 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
a = a_data['runtaskdeps'][idx]
b = b_data['runtaskdeps'][idx]
if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b] and not collapsed:
- changed.append("%s with hash %s\n changed to\n%s with hash %s" % (clean_basepath(a), a_data['runtaskhashes'][a], clean_basepath(b), b_data['runtaskhashes'][b]))
+ changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b]))
if changed:
- clean_a = clean_basepaths_list(a_data['runtaskdeps'])
- clean_b = clean_basepaths_list(b_data['runtaskdeps'])
+ clean_a = a_data['runtaskdeps']
+ clean_b = b_data['runtaskdeps']
if clean_a != clean_b:
output.append(color_format("{color_title}runtaskdeps changed:{color_default}\n%s") % list_inline_diff(clean_a, clean_b, colors))
else:
@@ -948,7 +1154,7 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
b = b_data['runtaskhashes']
changed, added, removed = dict_diff(a, b)
if added:
- for dep in added:
+ for dep in sorted(added):
bdep_found = False
if removed:
for bdep in removed:
@@ -956,9 +1162,9 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
#output.append("Dependency on task %s was replaced by %s with same hash" % (dep, bdep))
bdep_found = True
if not bdep_found:
- output.append(color_format("{color_title}Dependency on task %s was added{color_default} with hash %s") % (clean_basepath(dep), b[dep]))
+ output.append(color_format("{color_title}Dependency on task %s was added{color_default} with hash %s") % (dep, b[dep]))
if removed:
- for dep in removed:
+ for dep in sorted(removed):
adep_found = False
if added:
for adep in added:
@@ -966,11 +1172,11 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
#output.append("Dependency on task %s was replaced by %s with same hash" % (adep, dep))
adep_found = True
if not adep_found:
- output.append(color_format("{color_title}Dependency on task %s was removed{color_default} with hash %s") % (clean_basepath(dep), a[dep]))
+ output.append(color_format("{color_title}Dependency on task %s was removed{color_default} with hash %s") % (dep, a[dep]))
if changed:
- for dep in changed:
+ for dep in sorted(changed):
if not collapsed:
- output.append(color_format("{color_title}Hash for dependent task %s changed{color_default} from %s to %s") % (clean_basepath(dep), a[dep], b[dep]))
+ output.append(color_format("{color_title}Hash for task dependency %s changed{color_default} from %s to %s") % (dep, a[dep], b[dep]))
if callable(recursecb):
recout = recursecb(dep, a[dep], b[dep])
if recout:
@@ -980,6 +1186,7 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
# If a dependent hash changed, might as well print the line above and then defer to the changes in
# that hash since in all likelyhood, they're the same changes this task also saw.
output = [output[-1]] + recout
+ break
a_taint = a_data.get('taint', None)
b_taint = b_data.get('taint', None)
@@ -1001,7 +1208,7 @@ def calc_basehash(sigdata):
basedata = ''
alldeps = sigdata['taskdeps']
- for dep in alldeps:
+ for dep in sorted(alldeps):
basedata = basedata + dep
val = sigdata['varvals'][dep]
if val is not None:
@@ -1017,6 +1224,8 @@ def calc_taskhash(sigdata):
for c in sigdata['file_checksum_values']:
if c[1]:
+ if "./" in c[0]:
+ data = data + c[0]
data = data + c[1]
if 'taint' in sigdata:
@@ -1031,32 +1240,37 @@ def calc_taskhash(sigdata):
def dump_sigfile(a):
output = []
- with open(a, 'rb') as f:
- p1 = pickle.Unpickler(f)
- a_data = p1.load()
+ try:
+ with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
+ a_data = json.load(f, object_hook=SetDecoder)
+ except (TypeError, OSError) as err:
+ bb.error("Failed to open sigdata file '%s': %s" % (a, str(err)))
+ raise err
+
+ handle_renames(a_data)
- output.append("basewhitelist: %s" % (a_data['basewhitelist']))
+ output.append("basehash_ignore_vars: %s" % (sorted(a_data['basehash_ignore_vars'])))
- output.append("taskwhitelist: %s" % (a_data['taskwhitelist']))
+ output.append("taskhash_ignore_tasks: %s" % (sorted(a_data['taskhash_ignore_tasks'] or [])))
output.append("Task dependencies: %s" % (sorted(a_data['taskdeps'])))
output.append("basehash: %s" % (a_data['basehash']))
- for dep in a_data['gendeps']:
- output.append("List of dependencies for variable %s is %s" % (dep, a_data['gendeps'][dep]))
+ for dep in sorted(a_data['gendeps']):
+ output.append("List of dependencies for variable %s is %s" % (dep, sorted(a_data['gendeps'][dep])))
- for dep in a_data['varvals']:
+ for dep in sorted(a_data['varvals']):
output.append("Variable %s value is %s" % (dep, a_data['varvals'][dep]))
if 'runtaskdeps' in a_data:
- output.append("Tasks this task depends on: %s" % (a_data['runtaskdeps']))
+ output.append("Tasks this task depends on: %s" % (sorted(a_data['runtaskdeps'])))
if 'file_checksum_values' in a_data:
- output.append("This task depends on the checksums of files: %s" % (a_data['file_checksum_values']))
+ output.append("This task depends on the checksums of files: %s" % (sorted(a_data['file_checksum_values'])))
if 'runtaskhashes' in a_data:
- for dep in a_data['runtaskhashes']:
+ for dep in sorted(a_data['runtaskhashes']):
output.append("Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep]))
if 'taint' in a_data:
diff --git a/lib/bb/taskdata.py b/lib/bb/taskdata.py
index 47bad6d1f..66545a65a 100644
--- a/lib/bb/taskdata.py
+++ b/lib/bb/taskdata.py
@@ -39,7 +39,7 @@ class TaskData:
"""
BitBake Task Data implementation
"""
- def __init__(self, abort = True, skiplist = None, allowincomplete = False):
+ def __init__(self, halt = True, skiplist = None, allowincomplete = False):
self.build_targets = {}
self.run_targets = {}
@@ -57,7 +57,7 @@ class TaskData:
self.failed_rdeps = []
self.failed_fns = []
- self.abort = abort
+ self.halt = halt
self.allowincomplete = allowincomplete
self.skiplist = skiplist
@@ -328,7 +328,7 @@ class TaskData:
try:
self.add_provider_internal(cfgData, dataCache, item)
except bb.providers.NoProvider:
- if self.abort:
+ if self.halt:
raise
self.remove_buildtarget(item)
@@ -451,12 +451,12 @@ class TaskData:
for target in self.build_targets:
if fn in self.build_targets[target]:
self.build_targets[target].remove(fn)
- if len(self.build_targets[target]) == 0:
+ if not self.build_targets[target]:
self.remove_buildtarget(target, missing_list)
for target in self.run_targets:
if fn in self.run_targets[target]:
self.run_targets[target].remove(fn)
- if len(self.run_targets[target]) == 0:
+ if not self.run_targets[target]:
self.remove_runtarget(target, missing_list)
def remove_buildtarget(self, target, missing_list=None):
@@ -479,7 +479,7 @@ class TaskData:
fn = tid.rsplit(":",1)[0]
self.fail_fn(fn, missing_list)
- if self.abort and target in self.external_targets:
+ if self.halt and target in self.external_targets:
logger.error("Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s", target, missing_list)
raise bb.providers.NoProvider(target)
@@ -516,7 +516,7 @@ class TaskData:
self.add_provider_internal(cfgData, dataCache, target)
added = added + 1
except bb.providers.NoProvider:
- if self.abort and target in self.external_targets and not self.allowincomplete:
+ if self.halt and target in self.external_targets and not self.allowincomplete:
raise
if not self.allowincomplete:
self.remove_buildtarget(target)
diff --git a/lib/bb/tests/codeparser.py b/lib/bb/tests/codeparser.py
index 826a2d2f6..f6585fb3a 100644
--- a/lib/bb/tests/codeparser.py
+++ b/lib/bb/tests/codeparser.py
@@ -44,6 +44,7 @@ class VariableReferenceTest(ReferenceTest):
def parseExpression(self, exp):
parsedvar = self.d.expandWithRefs(exp, None)
self.references = parsedvar.references
+ self.execs = parsedvar.execs
def test_simple_reference(self):
self.setEmptyVars(["FOO"])
@@ -61,6 +62,11 @@ class VariableReferenceTest(ReferenceTest):
self.parseExpression("${@d.getVar('BAR') + 'foo'}")
self.assertReferences(set(["BAR"]))
+ def test_python_exec_reference(self):
+ self.parseExpression("${@eval('3 * 5')}")
+ self.assertReferences(set())
+ self.assertExecs(set(["eval"]))
+
class ShellReferenceTest(ReferenceTest):
def parseExpression(self, exp):
@@ -111,9 +117,9 @@ ${D}${libdir}/pkgconfig/*.pc
self.assertExecs(set(["sed"]))
def test_parameter_expansion_modifiers(self):
- # - and + are also valid modifiers for parameter expansion, but are
+ # -,+ and : are also valid modifiers for parameter expansion, but are
# valid characters in bitbake variable names, so are not included here
- for i in ('=', ':-', ':=', '?', ':?', ':+', '#', '%', '##', '%%'):
+ for i in ('=', '?', '#', '%', '##', '%%'):
name = "foo%sbar" % i
self.parseExpression("${%s}" % name)
self.assertNotIn(name, self.references)
@@ -318,7 +324,7 @@ d.getVar(a(), False)
"filename": "example.bb",
})
- deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
+ deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
self.assertEqual(deps, set(["somevar", "bar", "something", "inexpand", "test", "test2", "a"]))
@@ -365,7 +371,7 @@ esac
self.d.setVarFlags("FOO", {"func": True})
self.setEmptyVars(execs)
- deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
+ deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
self.assertEqual(deps, set(["somevar", "inverted"] + execs))
@@ -375,7 +381,7 @@ esac
self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
self.d.setVarFlag("FOO", "vardeps", "oe_libinstall")
- deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
+ deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
self.assertEqual(deps, set(["oe_libinstall"]))
@@ -384,7 +390,7 @@ esac
self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
self.d.setVarFlag("FOO", "vardeps", "${@'oe_libinstall'}")
- deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
+ deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
self.assertEqual(deps, set(["oe_libinstall"]))
@@ -399,7 +405,7 @@ esac
# Check dependencies
self.d.setVar('ANOTHERVAR', expr)
self.d.setVar('TESTVAR', 'anothervalue testval testval2')
- deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), self.d)
+ deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
self.assertEqual(sorted(values.splitlines()),
sorted([expr,
'TESTVAR{anothervalue} = Set',
@@ -412,11 +418,55 @@ esac
# Check final value
self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['anothervalue', 'yetanothervalue', 'lastone'])
+ def test_contains_vardeps_excluded(self):
+ # Check the ignored_vars option to build_dependencies is handled by contains functionality
+ varval = '${TESTVAR2} ${@bb.utils.filter("TESTVAR", "somevalue anothervalue", d)}'
+ self.d.setVar('ANOTHERVAR', varval)
+ self.d.setVar('TESTVAR', 'anothervalue testval testval2')
+ self.d.setVar('TESTVAR2', 'testval3')
+ deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), set(["TESTVAR"]), self.d, self.d)
+ self.assertEqual(sorted(values.splitlines()), sorted([varval]))
+ self.assertEqual(deps, set(["TESTVAR2"]))
+ self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['testval3', 'anothervalue'])
+
+ # Check the vardepsexclude flag is handled by contains functionality
+ self.d.setVarFlag('ANOTHERVAR', 'vardepsexclude', 'TESTVAR')
+ deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
+ self.assertEqual(sorted(values.splitlines()), sorted([varval]))
+ self.assertEqual(deps, set(["TESTVAR2"]))
+ self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['testval3', 'anothervalue'])
+
+ def test_contains_vardeps_override_operators(self):
+ # Check override operators handle dependencies correctly with the contains functionality
+ expr_plain = 'testval'
+ expr_prepend = '${@bb.utils.filter("TESTVAR1", "testval1", d)} '
+ expr_append = ' ${@bb.utils.filter("TESTVAR2", "testval2", d)}'
+ expr_remove = '${@bb.utils.contains("TESTVAR3", "no-testval", "testval", "", d)}'
+ # Check dependencies
+ self.d.setVar('ANOTHERVAR', expr_plain)
+ self.d.prependVar('ANOTHERVAR', expr_prepend)
+ self.d.appendVar('ANOTHERVAR', expr_append)
+ self.d.setVar('ANOTHERVAR:remove', expr_remove)
+ self.d.setVar('TESTVAR1', 'blah')
+ self.d.setVar('TESTVAR2', 'testval2')
+ self.d.setVar('TESTVAR3', 'no-testval')
+ deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
+ self.assertEqual(sorted(values.splitlines()),
+ sorted([
+ expr_prepend + expr_plain + expr_append,
+ '_remove of ' + expr_remove,
+ 'TESTVAR1{testval1} = Unset',
+ 'TESTVAR2{testval2} = Set',
+ 'TESTVAR3{no-testval} = Set',
+ ]))
+ # Check final value
+ self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['testval2'])
+
#Currently no wildcard support
#def test_vardeps_wildcards(self):
# self.d.setVar("oe_libinstall", "echo test")
# self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
# self.d.setVarFlag("FOO", "vardeps", "oe_*")
- # self.assertEquals(deps, set(["oe_libinstall"]))
+ # self.assertEqual(deps, set(["oe_libinstall"]))
diff --git a/lib/bb/tests/color.py b/lib/bb/tests/color.py
index 88dd27800..bb70cb393 100644
--- a/lib/bb/tests/color.py
+++ b/lib/bb/tests/color.py
@@ -20,7 +20,7 @@ class ProgressWatcher:
def __init__(self):
self._reports = []
- def handle_event(self, event):
+ def handle_event(self, event, d):
self._reports.append((event.progress, event.rate))
def reports(self):
diff --git a/lib/bb/tests/compression.py b/lib/bb/tests/compression.py
new file mode 100644
index 000000000..95af3f96d
--- /dev/null
+++ b/lib/bb/tests/compression.py
@@ -0,0 +1,100 @@
+#
+# Copyright BitBake Contributors
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+from pathlib import Path
+import bb.compress.lz4
+import bb.compress.zstd
+import contextlib
+import os
+import shutil
+import tempfile
+import unittest
+import subprocess
+
+
+class CompressionTests(object):
+ def setUp(self):
+ self._t = tempfile.TemporaryDirectory()
+ self.tmpdir = Path(self._t.name)
+ self.addCleanup(self._t.cleanup)
+
+ def _file_helper(self, mode_suffix, data):
+ tmp_file = self.tmpdir / "compressed"
+
+ with self.do_open(tmp_file, mode="w" + mode_suffix) as f:
+ f.write(data)
+
+ with self.do_open(tmp_file, mode="r" + mode_suffix) as f:
+ read_data = f.read()
+
+ self.assertEqual(read_data, data)
+
+ def test_text_file(self):
+ self._file_helper("t", "Hello")
+
+ def test_binary_file(self):
+ self._file_helper("b", "Hello".encode("utf-8"))
+
+ def _pipe_helper(self, mode_suffix, data):
+ rfd, wfd = os.pipe()
+ with open(rfd, "rb") as r, open(wfd, "wb") as w:
+ with self.do_open(r, mode="r" + mode_suffix) as decompress:
+ with self.do_open(w, mode="w" + mode_suffix) as compress:
+ compress.write(data)
+ read_data = decompress.read()
+
+ self.assertEqual(read_data, data)
+
+ def test_text_pipe(self):
+ self._pipe_helper("t", "Hello")
+
+ def test_binary_pipe(self):
+ self._pipe_helper("b", "Hello".encode("utf-8"))
+
+ def test_bad_decompress(self):
+ tmp_file = self.tmpdir / "compressed"
+ with tmp_file.open("wb") as f:
+ f.write(b"\x00")
+
+ with self.assertRaises(OSError):
+ with self.do_open(tmp_file, mode="rb", stderr=subprocess.DEVNULL) as f:
+ data = f.read()
+
+
+class LZ4Tests(CompressionTests, unittest.TestCase):
+ def setUp(self):
+ if shutil.which("lz4c") is None:
+ self.skipTest("'lz4c' not found")
+ super().setUp()
+
+ @contextlib.contextmanager
+ def do_open(self, *args, **kwargs):
+ with bb.compress.lz4.open(*args, **kwargs) as f:
+ yield f
+
+
+class ZStdTests(CompressionTests, unittest.TestCase):
+ def setUp(self):
+ if shutil.which("zstd") is None:
+ self.skipTest("'zstd' not found")
+ super().setUp()
+
+ @contextlib.contextmanager
+ def do_open(self, *args, **kwargs):
+ with bb.compress.zstd.open(*args, **kwargs) as f:
+ yield f
+
+
+class PZStdTests(CompressionTests, unittest.TestCase):
+ def setUp(self):
+ if shutil.which("pzstd") is None:
+ self.skipTest("'pzstd' not found")
+ super().setUp()
+
+ @contextlib.contextmanager
+ def do_open(self, *args, **kwargs):
+ with bb.compress.zstd.open(*args, num_threads=2, **kwargs) as f:
+ yield f
diff --git a/lib/bb/tests/cooker.py b/lib/bb/tests/cooker.py
index c82d4b7b8..9e524ae34 100644
--- a/lib/bb/tests/cooker.py
+++ b/lib/bb/tests/cooker.py
@@ -1,6 +1,8 @@
#
# BitBake Tests for cooker.py
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/lib/bb/tests/data.py b/lib/bb/tests/data.py
index 1d4a64b10..cbc7c1ecd 100644
--- a/lib/bb/tests/data.py
+++ b/lib/bb/tests/data.py
@@ -60,6 +60,15 @@ class DataExpansions(unittest.TestCase):
val = self.d.expand("${@5*12}")
self.assertEqual(str(val), "60")
+ def test_python_snippet_w_dict(self):
+ val = self.d.expand("${@{ 'green': 1, 'blue': 2 }['green']}")
+ self.assertEqual(str(val), "1")
+
+ def test_python_unexpanded_multi(self):
+ self.d.setVar("bar", "${unsetvar}")
+ val = self.d.expand("${@2*2},${foo},${@d.getVar('foo') + ' ${bar}'},${foo}")
+ self.assertEqual(str(val), "4,value_of_foo,${@d.getVar('foo') + ' ${unsetvar}'},value_of_foo")
+
def test_expand_in_python_snippet(self):
val = self.d.expand("${@'boo ' + '${foo}'}")
self.assertEqual(str(val), "boo value_of_foo")
@@ -68,6 +77,18 @@ class DataExpansions(unittest.TestCase):
val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}")
self.assertEqual(str(val), "value_of_foo value_of_bar")
+ def test_python_snippet_function_reference(self):
+ self.d.setVar("TESTVAL", "testvalue")
+ self.d.setVar("testfunc", 'd.getVar("TESTVAL")')
+ context = bb.utils.get_context()
+ context["testfunc"] = lambda d: d.getVar("TESTVAL")
+ val = self.d.expand("${@testfunc(d)}")
+ self.assertEqual(str(val), "testvalue")
+
+ def test_python_snippet_builtin_metadata(self):
+ self.d.setVar("eval", "INVALID")
+ self.d.expand("${@eval('3')}")
+
def test_python_unexpanded(self):
self.d.setVar("bar", "${unsetvar}")
val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}")
@@ -245,35 +266,35 @@ class TestConcatOverride(unittest.TestCase):
def test_prepend(self):
self.d.setVar("TEST", "${VAL}")
- self.d.setVar("TEST_prepend", "${FOO}:")
+ self.d.setVar("TEST:prepend", "${FOO}:")
self.assertEqual(self.d.getVar("TEST"), "foo:val")
def test_append(self):
self.d.setVar("TEST", "${VAL}")
- self.d.setVar("TEST_append", ":${BAR}")
+ self.d.setVar("TEST:append", ":${BAR}")
self.assertEqual(self.d.getVar("TEST"), "val:bar")
def test_multiple_append(self):
self.d.setVar("TEST", "${VAL}")
- self.d.setVar("TEST_prepend", "${FOO}:")
- self.d.setVar("TEST_append", ":val2")
- self.d.setVar("TEST_append", ":${BAR}")
+ self.d.setVar("TEST:prepend", "${FOO}:")
+ self.d.setVar("TEST:append", ":val2")
+ self.d.setVar("TEST:append", ":${BAR}")
self.assertEqual(self.d.getVar("TEST"), "foo:val:val2:bar")
def test_append_unset(self):
- self.d.setVar("TEST_prepend", "${FOO}:")
- self.d.setVar("TEST_append", ":val2")
- self.d.setVar("TEST_append", ":${BAR}")
+ self.d.setVar("TEST:prepend", "${FOO}:")
+ self.d.setVar("TEST:append", ":val2")
+ self.d.setVar("TEST:append", ":${BAR}")
self.assertEqual(self.d.getVar("TEST"), "foo::val2:bar")
def test_remove(self):
self.d.setVar("TEST", "${VAL} ${BAR}")
- self.d.setVar("TEST_remove", "val")
+ self.d.setVar("TEST:remove", "val")
self.assertEqual(self.d.getVar("TEST"), " bar")
def test_remove_cleared(self):
self.d.setVar("TEST", "${VAL} ${BAR}")
- self.d.setVar("TEST_remove", "val")
+ self.d.setVar("TEST:remove", "val")
self.d.setVar("TEST", "${VAL} ${BAR}")
self.assertEqual(self.d.getVar("TEST"), "val bar")
@@ -281,42 +302,42 @@ class TestConcatOverride(unittest.TestCase):
# (including that whitespace is preserved)
def test_remove_inactive_override(self):
self.d.setVar("TEST", "${VAL} ${BAR} 123")
- self.d.setVar("TEST_remove_inactiveoverride", "val")
+ self.d.setVar("TEST:remove:inactiveoverride", "val")
self.assertEqual(self.d.getVar("TEST"), "val bar 123")
def test_doubleref_remove(self):
self.d.setVar("TEST", "${VAL} ${BAR}")
- self.d.setVar("TEST_remove", "val")
+ self.d.setVar("TEST:remove", "val")
self.d.setVar("TEST_TEST", "${TEST} ${TEST}")
self.assertEqual(self.d.getVar("TEST_TEST"), " bar bar")
def test_empty_remove(self):
self.d.setVar("TEST", "")
- self.d.setVar("TEST_remove", "val")
+ self.d.setVar("TEST:remove", "val")
self.assertEqual(self.d.getVar("TEST"), "")
def test_remove_expansion(self):
self.d.setVar("BAR", "Z")
self.d.setVar("TEST", "${BAR}/X Y")
- self.d.setVar("TEST_remove", "${BAR}/X")
+ self.d.setVar("TEST:remove", "${BAR}/X")
self.assertEqual(self.d.getVar("TEST"), " Y")
def test_remove_expansion_items(self):
self.d.setVar("TEST", "A B C D")
self.d.setVar("BAR", "B D")
- self.d.setVar("TEST_remove", "${BAR}")
+ self.d.setVar("TEST:remove", "${BAR}")
self.assertEqual(self.d.getVar("TEST"), "A C ")
def test_remove_preserve_whitespace(self):
# When the removal isn't active, the original value should be preserved
self.d.setVar("TEST", " A B")
- self.d.setVar("TEST_remove", "C")
+ self.d.setVar("TEST:remove", "C")
self.assertEqual(self.d.getVar("TEST"), " A B")
def test_remove_preserve_whitespace2(self):
# When the removal is active preserve the whitespace
self.d.setVar("TEST", " A B")
- self.d.setVar("TEST_remove", "B")
+ self.d.setVar("TEST:remove", "B")
self.assertEqual(self.d.getVar("TEST"), " A ")
class TestOverrides(unittest.TestCase):
@@ -329,81 +350,86 @@ class TestOverrides(unittest.TestCase):
self.assertEqual(self.d.getVar("TEST"), "testvalue")
def test_one_override(self):
- self.d.setVar("TEST_bar", "testvalue2")
+ self.d.setVar("TEST:bar", "testvalue2")
self.assertEqual(self.d.getVar("TEST"), "testvalue2")
def test_one_override_unset(self):
- self.d.setVar("TEST2_bar", "testvalue2")
+ self.d.setVar("TEST2:bar", "testvalue2")
self.assertEqual(self.d.getVar("TEST2"), "testvalue2")
- self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar'])
+ self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2:bar'])
def test_multiple_override(self):
- self.d.setVar("TEST_bar", "testvalue2")
- self.d.setVar("TEST_local", "testvalue3")
- self.d.setVar("TEST_foo", "testvalue4")
+ self.d.setVar("TEST:bar", "testvalue2")
+ self.d.setVar("TEST:local", "testvalue3")
+ self.d.setVar("TEST:foo", "testvalue4")
self.assertEqual(self.d.getVar("TEST"), "testvalue3")
- self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local'])
+ self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST:foo', 'OVERRIDES', 'TEST:bar', 'TEST:local'])
def test_multiple_combined_overrides(self):
- self.d.setVar("TEST_local_foo_bar", "testvalue3")
+ self.d.setVar("TEST:local:foo:bar", "testvalue3")
self.assertEqual(self.d.getVar("TEST"), "testvalue3")
def test_multiple_overrides_unset(self):
- self.d.setVar("TEST2_local_foo_bar", "testvalue3")
+ self.d.setVar("TEST2:local:foo:bar", "testvalue3")
self.assertEqual(self.d.getVar("TEST2"), "testvalue3")
def test_keyexpansion_override(self):
self.d.setVar("LOCAL", "local")
- self.d.setVar("TEST_bar", "testvalue2")
- self.d.setVar("TEST_${LOCAL}", "testvalue3")
- self.d.setVar("TEST_foo", "testvalue4")
+ self.d.setVar("TEST:bar", "testvalue2")
+ self.d.setVar("TEST:${LOCAL}", "testvalue3")
+ self.d.setVar("TEST:foo", "testvalue4")
bb.data.expandKeys(self.d)
self.assertEqual(self.d.getVar("TEST"), "testvalue3")
def test_rename_override(self):
- self.d.setVar("ALTERNATIVE_ncurses-tools_class-target", "a")
+ self.d.setVar("ALTERNATIVE:ncurses-tools:class-target", "a")
self.d.setVar("OVERRIDES", "class-target")
- self.d.renameVar("ALTERNATIVE_ncurses-tools", "ALTERNATIVE_lib32-ncurses-tools")
- self.assertEqual(self.d.getVar("ALTERNATIVE_lib32-ncurses-tools"), "a")
+ self.d.renameVar("ALTERNATIVE:ncurses-tools", "ALTERNATIVE:lib32-ncurses-tools")
+ self.assertEqual(self.d.getVar("ALTERNATIVE:lib32-ncurses-tools"), "a")
def test_underscore_override(self):
- self.d.setVar("TEST_bar", "testvalue2")
- self.d.setVar("TEST_some_val", "testvalue3")
- self.d.setVar("TEST_foo", "testvalue4")
+ self.d.setVar("TEST:bar", "testvalue2")
+ self.d.setVar("TEST:some_val", "testvalue3")
+ self.d.setVar("TEST:foo", "testvalue4")
self.d.setVar("OVERRIDES", "foo:bar:some_val")
self.assertEqual(self.d.getVar("TEST"), "testvalue3")
+ # Test an override with _<numeric> in it based on a real world OE issue
+ def test_underscore_override_2(self):
+ self.d.setVar("TARGET_ARCH", "x86_64")
+ self.d.setVar("PN", "test-${TARGET_ARCH}")
+ self.d.setVar("VERSION", "1")
+ self.d.setVar("VERSION:pn-test-${TARGET_ARCH}", "2")
+ self.d.setVar("OVERRIDES", "pn-${PN}")
+ bb.data.expandKeys(self.d)
+ self.assertEqual(self.d.getVar("VERSION"), "2")
+
def test_remove_with_override(self):
- self.d.setVar("TEST_bar", "testvalue2")
- self.d.setVar("TEST_some_val", "testvalue3 testvalue5")
- self.d.setVar("TEST_some_val_remove", "testvalue3")
- self.d.setVar("TEST_foo", "testvalue4")
+ self.d.setVar("TEST:bar", "testvalue2")
+ self.d.setVar("TEST:some_val", "testvalue3 testvalue5")
+ self.d.setVar("TEST:some_val:remove", "testvalue3")
+ self.d.setVar("TEST:foo", "testvalue4")
self.d.setVar("OVERRIDES", "foo:bar:some_val")
self.assertEqual(self.d.getVar("TEST"), " testvalue5")
def test_append_and_override_1(self):
- self.d.setVar("TEST_append", "testvalue2")
- self.d.setVar("TEST_bar", "testvalue3")
+ self.d.setVar("TEST:append", "testvalue2")
+ self.d.setVar("TEST:bar", "testvalue3")
self.assertEqual(self.d.getVar("TEST"), "testvalue3testvalue2")
def test_append_and_override_2(self):
- self.d.setVar("TEST_append_bar", "testvalue2")
+ self.d.setVar("TEST:append:bar", "testvalue2")
self.assertEqual(self.d.getVar("TEST"), "testvaluetestvalue2")
def test_append_and_override_3(self):
- self.d.setVar("TEST_bar_append", "testvalue2")
+ self.d.setVar("TEST:bar:append", "testvalue2")
self.assertEqual(self.d.getVar("TEST"), "testvalue2")
- # Test an override with _<numeric> in it based on a real world OE issue
- def test_underscore_override(self):
- self.d.setVar("TARGET_ARCH", "x86_64")
- self.d.setVar("PN", "test-${TARGET_ARCH}")
- self.d.setVar("VERSION", "1")
- self.d.setVar("VERSION_pn-test-${TARGET_ARCH}", "2")
- self.d.setVar("OVERRIDES", "pn-${PN}")
- bb.data.expandKeys(self.d)
- self.assertEqual(self.d.getVar("VERSION"), "2")
+ def test_append_and_unused_override(self):
+ # Had a bug where an unused override append could return "" instead of None
+ self.d.setVar("BAR:append:unusedoverride", "testvalue2")
+ self.assertEqual(self.d.getVar("BAR"), None)
class TestKeyExpansion(unittest.TestCase):
def setUp(self):
@@ -498,7 +524,7 @@ class TaskHash(unittest.TestCase):
d.setVar("VAR", "val")
# Adding an inactive removal shouldn't change the hash
d.setVar("BAR", "notbar")
- d.setVar("MYCOMMAND_remove", "${BAR}")
+ d.setVar("MYCOMMAND:remove", "${BAR}")
nexthash = gettask_bashhash("mytask", d)
self.assertEqual(orighash, nexthash)
diff --git a/lib/bb/tests/event.py b/lib/bb/tests/event.py
index 9ca7e9bc8..ef61891d3 100644
--- a/lib/bb/tests/event.py
+++ b/lib/bb/tests/event.py
@@ -13,6 +13,7 @@ import pickle
import threading
import time
import unittest
+import tempfile
from unittest.mock import Mock
from unittest.mock import call
@@ -157,7 +158,7 @@ class EventHandlingTest(unittest.TestCase):
self._test_process.event_handler,
event,
None)
- self._test_process.event_handler.assert_called_once_with(event)
+ self._test_process.event_handler.assert_called_once_with(event, None)
def test_fire_class_handlers(self):
""" Test fire_class_handlers method """
@@ -175,10 +176,10 @@ class EventHandlingTest(unittest.TestCase):
bb.event.fire_class_handlers(event1, None)
bb.event.fire_class_handlers(event2, None)
bb.event.fire_class_handlers(event2, None)
- expected_event_handler1 = [call(event1)]
- expected_event_handler2 = [call(event1),
- call(event2),
- call(event2)]
+ expected_event_handler1 = [call(event1, None)]
+ expected_event_handler2 = [call(event1, None),
+ call(event2, None),
+ call(event2, None)]
self.assertEqual(self._test_process.event_handler1.call_args_list,
expected_event_handler1)
self.assertEqual(self._test_process.event_handler2.call_args_list,
@@ -205,7 +206,7 @@ class EventHandlingTest(unittest.TestCase):
bb.event.fire_class_handlers(event2, None)
bb.event.fire_class_handlers(event2, None)
expected_event_handler1 = []
- expected_event_handler2 = [call(event1)]
+ expected_event_handler2 = [call(event1, None)]
self.assertEqual(self._test_process.event_handler1.call_args_list,
expected_event_handler1)
self.assertEqual(self._test_process.event_handler2.call_args_list,
@@ -223,7 +224,7 @@ class EventHandlingTest(unittest.TestCase):
self.assertEqual(result, bb.event.Registered)
bb.event.fire_class_handlers(event1, None)
bb.event.fire_class_handlers(event2, None)
- expected = [call(event1), call(event2)]
+ expected = [call(event1, None), call(event2, None)]
self.assertEqual(self._test_process.event_handler1.call_args_list,
expected)
@@ -237,7 +238,7 @@ class EventHandlingTest(unittest.TestCase):
self.assertEqual(result, bb.event.Registered)
bb.event.fire_class_handlers(event1, None)
bb.event.fire_class_handlers(event2, None)
- expected = [call(event1), call(event2), call(event1)]
+ expected = [call(event1, None), call(event2, None), call(event1, None)]
self.assertEqual(self._test_process.event_handler1.call_args_list,
expected)
@@ -251,7 +252,7 @@ class EventHandlingTest(unittest.TestCase):
self.assertEqual(result, bb.event.Registered)
bb.event.fire_class_handlers(event1, None)
bb.event.fire_class_handlers(event2, None)
- expected = [call(event1), call(event2), call(event1), call(event2)]
+ expected = [call(event1,None), call(event2, None), call(event1, None), call(event2, None)]
self.assertEqual(self._test_process.event_handler1.call_args_list,
expected)
@@ -359,9 +360,10 @@ class EventHandlingTest(unittest.TestCase):
event1 = bb.event.ConfigParsed()
bb.event.fire(event1, None)
- expected = [call(event1)]
+ expected = [call(event1, None)]
self.assertEqual(self._test_process.event_handler1.call_args_list,
expected)
+ expected = [call(event1)]
self.assertEqual(self._test_ui1.event.send.call_args_list,
expected)
@@ -450,10 +452,9 @@ class EventHandlingTest(unittest.TestCase):
and disable threadlocks tests """
bb.event.fire(bb.event.OperationStarted(), None)
- def test_enable_threadlock(self):
+ def test_event_threadlock(self):
""" Test enable_threadlock method """
self._set_threadlock_test_mockups()
- bb.event.enable_threadlock()
self._set_and_run_threadlock_test_workers()
# Calls to UI handlers should be in order as all the registered
# handlers for the event coming from the first worker should be
@@ -461,20 +462,6 @@ class EventHandlingTest(unittest.TestCase):
self.assertEqual(self._threadlock_test_calls,
["w1_ui1", "w1_ui2", "w2_ui1", "w2_ui2"])
-
- def test_disable_threadlock(self):
- """ Test disable_threadlock method """
- self._set_threadlock_test_mockups()
- bb.event.disable_threadlock()
- self._set_and_run_threadlock_test_workers()
- # Calls to UI handlers should be intertwined together. Thanks to the
- # delay in the registered handlers for the event coming from the first
- # worker, the event coming from the second worker starts being
- # processed before finishing handling the first worker event.
- self.assertEqual(self._threadlock_test_calls,
- ["w1_ui1", "w2_ui1", "w1_ui2", "w2_ui2"])
-
-
class EventClassesTest(unittest.TestCase):
""" Event classes test class """
@@ -482,6 +469,8 @@ class EventClassesTest(unittest.TestCase):
def setUp(self):
bb.event.worker_pid = EventClassesTest._worker_pid
+ self.d = bb.data.init()
+ bb.parse.siggen = bb.siggen.init(self.d)
def test_Event(self):
""" Test the Event base class """
@@ -964,3 +953,24 @@ class EventClassesTest(unittest.TestCase):
event = bb.event.FindSigInfoResult(result)
self.assertEqual(event.result, result)
self.assertEqual(event.pid, EventClassesTest._worker_pid)
+
+ def test_lineno_in_eventhandler(self):
+ # The error lineno is 5, not 4 since the first line is '\n'
+ error_line = """
+# Comment line1
+# Comment line2
+python test_lineno_in_eventhandler() {
+ This is an error line
+}
+addhandler test_lineno_in_eventhandler
+test_lineno_in_eventhandler[eventmask] = "bb.event.ConfigParsed"
+"""
+
+ with self.assertLogs() as logs:
+ f = tempfile.NamedTemporaryFile(suffix = '.bb')
+ f.write(bytes(error_line, "utf-8"))
+ f.flush()
+ d = bb.parse.handle(f.name, self.d)['']
+
+ output = "".join(logs.output)
+ self.assertTrue(" line 5\n" in output)
diff --git a/lib/bb/tests/fetch-testdata/debian/pool/main/m/minicom/index.html b/lib/bb/tests/fetch-testdata/debian/pool/main/m/minicom/index.html
new file mode 100644
index 000000000..4a1eb4de1
--- /dev/null
+++ b/lib/bb/tests/fetch-testdata/debian/pool/main/m/minicom/index.html
@@ -0,0 +1,59 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
+<html>
+ <head>
+ <title>Index of /debian/pool/main/m/minicom</title>
+ </head>
+ <body>
+<h1>Index of /debian/pool/main/m/minicom</h1>
+ <table>
+ <tr><th valign="top"><img src="/icons/blank.gif" alt="[ICO]"></th><th><a href="?C=N;O=D">Name</a></th><th><a href="?C=M;O=A">Last modified</a></th><th><a href="?C=S;O=A">Size</a></th></tr>
+ <tr><th colspan="4"><hr></th></tr>
+<tr><td valign="top"><img src="/icons/back.gif" alt="[PARENTDIR]"></td><td><a href="/debian/pool/main/m/">Parent Directory</a></td><td>&nbsp;</td><td align="right"> - </td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1.debian.tar.xz">minicom_2.7-1+deb8u1.debian.tar.xz</a></td><td align="right">2017-04-24 08:22 </td><td align="right"> 14K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1.dsc">minicom_2.7-1+deb8u1.dsc</a></td><td align="right">2017-04-24 08:22 </td><td align="right">1.9K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1_amd64.deb">minicom_2.7-1+deb8u1_amd64.deb</a></td><td align="right">2017-04-25 21:10 </td><td align="right">257K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1_armel.deb">minicom_2.7-1+deb8u1_armel.deb</a></td><td align="right">2017-04-26 00:58 </td><td align="right">246K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1_armhf.deb">minicom_2.7-1+deb8u1_armhf.deb</a></td><td align="right">2017-04-26 00:58 </td><td align="right">245K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1_i386.deb">minicom_2.7-1+deb8u1_i386.deb</a></td><td align="right">2017-04-25 21:41 </td><td align="right">258K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1.debian.tar.xz">minicom_2.7-1.1.debian.tar.xz</a></td><td align="right">2017-04-22 09:34 </td><td align="right"> 14K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1.dsc">minicom_2.7-1.1.dsc</a></td><td align="right">2017-04-22 09:34 </td><td align="right">1.9K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_amd64.deb">minicom_2.7-1.1_amd64.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">261K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_arm64.deb">minicom_2.7-1.1_arm64.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">250K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_armel.deb">minicom_2.7-1.1_armel.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">255K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_armhf.deb">minicom_2.7-1.1_armhf.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">254K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_i386.deb">minicom_2.7-1.1_i386.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">266K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_mips.deb">minicom_2.7-1.1_mips.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">258K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_mips64el.deb">minicom_2.7-1.1_mips64el.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">259K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_mipsel.deb">minicom_2.7-1.1_mipsel.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">259K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_ppc64el.deb">minicom_2.7-1.1_ppc64el.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">253K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_s390x.deb">minicom_2.7-1.1_s390x.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">261K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_amd64.deb">minicom_2.7.1-1+b1_amd64.deb</a></td><td align="right">2018-05-06 08:14 </td><td align="right">262K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_arm64.deb">minicom_2.7.1-1+b1_arm64.deb</a></td><td align="right">2018-05-06 07:58 </td><td align="right">250K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_armel.deb">minicom_2.7.1-1+b1_armel.deb</a></td><td align="right">2018-05-06 08:45 </td><td align="right">253K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_armhf.deb">minicom_2.7.1-1+b1_armhf.deb</a></td><td align="right">2018-05-06 10:42 </td><td align="right">253K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_i386.deb">minicom_2.7.1-1+b1_i386.deb</a></td><td align="right">2018-05-06 08:55 </td><td align="right">266K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_mips.deb">minicom_2.7.1-1+b1_mips.deb</a></td><td align="right">2018-05-06 08:14 </td><td align="right">258K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_mipsel.deb">minicom_2.7.1-1+b1_mipsel.deb</a></td><td align="right">2018-05-06 12:13 </td><td align="right">259K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_ppc64el.deb">minicom_2.7.1-1+b1_ppc64el.deb</a></td><td align="right">2018-05-06 09:10 </td><td align="right">260K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_s390x.deb">minicom_2.7.1-1+b1_s390x.deb</a></td><td align="right">2018-05-06 08:14 </td><td align="right">257K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b2_mips64el.deb">minicom_2.7.1-1+b2_mips64el.deb</a></td><td align="right">2018-05-06 09:41 </td><td align="right">260K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1.debian.tar.xz">minicom_2.7.1-1.debian.tar.xz</a></td><td align="right">2017-08-13 15:40 </td><td align="right"> 14K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1.dsc">minicom_2.7.1-1.dsc</a></td><td align="right">2017-08-13 15:40 </td><td align="right">1.8K</td></tr>
+<tr><td valign="top"><img src="/icons/compressed.gif" alt="[ ]"></td><td><a href="minicom_2.7.1.orig.tar.gz">minicom_2.7.1.orig.tar.gz</a></td><td align="right">2017-08-13 15:40 </td><td align="right">855K</td></tr>
+<tr><td valign="top"><img src="/icons/compressed.gif" alt="[ ]"></td><td><a href="minicom_2.7.orig.tar.gz">minicom_2.7.orig.tar.gz</a></td><td align="right">2014-01-01 09:36 </td><td align="right">843K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2.debian.tar.xz">minicom_2.8-2.debian.tar.xz</a></td><td align="right">2021-06-15 03:47 </td><td align="right"> 14K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2.dsc">minicom_2.8-2.dsc</a></td><td align="right">2021-06-15 03:47 </td><td align="right">1.8K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_amd64.deb">minicom_2.8-2_amd64.deb</a></td><td align="right">2021-06-15 03:58 </td><td align="right">280K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_arm64.deb">minicom_2.8-2_arm64.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">275K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_armel.deb">minicom_2.8-2_armel.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">271K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_armhf.deb">minicom_2.8-2_armhf.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">272K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_i386.deb">minicom_2.8-2_i386.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">285K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_mips64el.deb">minicom_2.8-2_mips64el.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">277K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_mipsel.deb">minicom_2.8-2_mipsel.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">278K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_ppc64el.deb">minicom_2.8-2_ppc64el.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">286K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_s390x.deb">minicom_2.8-2_s390x.deb</a></td><td align="right">2021-06-15 03:58 </td><td align="right">275K</td></tr>
+<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8.orig.tar.bz2">minicom_2.8.orig.tar.bz2</a></td><td align="right">2021-01-03 12:44 </td><td align="right">598K</td></tr>
+ <tr><th colspan="4"><hr></th></tr>
+</table>
+<address>Apache Server at ftp.debian.org Port 80</address>
+</body></html>
diff --git a/lib/bb/tests/fetch-testdata/npm-local-link-sources/inner/npm-shrinkwrap-v1.json b/lib/bb/tests/fetch-testdata/npm-local-link-sources/inner/npm-shrinkwrap-v1.json
new file mode 100644
index 000000000..bcdca7de9
--- /dev/null
+++ b/lib/bb/tests/fetch-testdata/npm-local-link-sources/inner/npm-shrinkwrap-v1.json
@@ -0,0 +1,11 @@
+{
+ "name": "@npm-local-link-sources/inner",
+ "version": "1.0.0",
+ "lockfileVersion": 1,
+ "requires": true,
+ "dependencies": {
+ "@npm-local-link-sources/upper": {
+ "version": "file:.."
+ }
+ }
+}
diff --git a/lib/bb/tests/fetch-testdata/npm-local-link-sources/inner/npm-shrinkwrap-v2.json b/lib/bb/tests/fetch-testdata/npm-local-link-sources/inner/npm-shrinkwrap-v2.json
new file mode 100644
index 000000000..2427acf2d
--- /dev/null
+++ b/lib/bb/tests/fetch-testdata/npm-local-link-sources/inner/npm-shrinkwrap-v2.json
@@ -0,0 +1,28 @@
+{
+ "name": "@npm-local-link-sources/inner",
+ "version": "1.0.0",
+ "lockfileVersion": 2,
+ "requires": true,
+ "packages": {
+ "": {
+ "name": "@npm-local-link-sources/inner",
+ "version": "1.0.0",
+ "dependencies": {
+ "@npm-local-link-sources/upper": "file:../"
+ }
+ },
+ "..": {
+ "name": "@npm-local-link-sources/upper",
+ "version": "1.0.0"
+ },
+ "node_modules/@npm-local-link-sources/upper": {
+ "resolved": "..",
+ "link": true
+ }
+ },
+ "dependencies": {
+ "@npm-local-link-sources/upper": {
+ "version": "file:.."
+ }
+ }
+}
diff --git a/lib/bb/tests/fetch-testdata/npm-local-link-sources/inner/npm-shrinkwrap-v3.json b/lib/bb/tests/fetch-testdata/npm-local-link-sources/inner/npm-shrinkwrap-v3.json
new file mode 100644
index 000000000..99e0e8f6b
--- /dev/null
+++ b/lib/bb/tests/fetch-testdata/npm-local-link-sources/inner/npm-shrinkwrap-v3.json
@@ -0,0 +1,23 @@
+{
+ "name": "@npm-local-link-sources/inner",
+ "version": "1.0.0",
+ "lockfileVersion": 3,
+ "requires": true,
+ "packages": {
+ "": {
+ "name": "@npm-local-link-sources/inner",
+ "version": "1.0.0",
+ "dependencies": {
+ "@npm-local-link-sources/upper": "file:../"
+ }
+ },
+ "..": {
+ "name": "@npm-local-link-sources/upper",
+ "version": "1.0.0"
+ },
+ "node_modules/@npm-local-link-sources/upper": {
+ "resolved": "..",
+ "link": true
+ }
+ }
+}
diff --git a/lib/bb/tests/fetch-testdata/npm-local-link-sources/inner/package.json b/lib/bb/tests/fetch-testdata/npm-local-link-sources/inner/package.json
new file mode 100644
index 000000000..a578771b9
--- /dev/null
+++ b/lib/bb/tests/fetch-testdata/npm-local-link-sources/inner/package.json
@@ -0,0 +1,8 @@
+{
+ "name": "@npm-local-link-sources/inner",
+ "version": "1.0.0",
+ "description": "Inner project in npm-local-link-sources example",
+ "dependencies": {
+ "@npm-local-link-sources/upper": "file:../"
+ }
+}
diff --git a/lib/bb/tests/fetch-testdata/npm-local-link-sources/package.json b/lib/bb/tests/fetch-testdata/npm-local-link-sources/package.json
new file mode 100644
index 000000000..42176c3ed
--- /dev/null
+++ b/lib/bb/tests/fetch-testdata/npm-local-link-sources/package.json
@@ -0,0 +1,5 @@
+{
+ "name": "@npm-local-link-sources/upper",
+ "version": "1.0.0",
+ "description": "Upper project in npm-local-link-sources example"
+}
diff --git a/lib/bb/tests/fetch-testdata/software/libxml2/2.10/index.html b/lib/bb/tests/fetch-testdata/software/libxml2/2.10/index.html
new file mode 100644
index 000000000..4e41af6d6
--- /dev/null
+++ b/lib/bb/tests/fetch-testdata/software/libxml2/2.10/index.html
@@ -0,0 +1,20 @@
+<!DOCTYPE html><html><head><meta http-equiv="content-type" content="text/html; charset=utf-8"><meta name="viewport" content="width=device-width"><style type="text/css">body,html {background:#fff;font-family:"Bitstream Vera Sans","Lucida Grande","Lucida Sans Unicode",Lucidux,Verdana,Lucida,sans-serif;}tr:nth-child(even) {background:#f4f4f4;}th,td {padding:0.1em 0.5em;}th {text-align:left;font-weight:bold;background:#eee;border-bottom:1px solid #aaa;}#list {border:1px solid #aaa;width:100%;}a {color:#a33;}a:hover {color:#e33;}</style>
+
+<title>Index of /sources/libxml2/2.10/</title>
+</head><body><h1>Index of /sources/libxml2/2.10/</h1>
+<table id="list"><thead><tr><th style="width:55%"><a href="?C=N&amp;O=A">File Name</a>&nbsp;<a href="?C=N&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:20%"><a href="?C=S&amp;O=A">File Size</a>&nbsp;<a href="?C=S&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:25%"><a href="?C=M&amp;O=A">Date</a>&nbsp;<a href="?C=M&amp;O=D">&nbsp;&darr;&nbsp;</a></th></tr></thead>
+<tbody><tr><td class="link"><a href="../">Parent directory/</a></td><td class="size">-</td><td class="date">-</td></tr>
+<tr><td class="link"><a href="LATEST-IS-2.10.3" title="LATEST-IS-2.10.3">LATEST-IS-2.10.3</a></td><td class="size">2.5 MiB</td><td class="date">2022-Oct-14 12:55</td></tr>
+<tr><td class="link"><a href="libxml2-2.10.0.news" title="libxml2-2.10.0.news">libxml2-2.10.0.news</a></td><td class="size">7.1 KiB</td><td class="date">2022-Aug-17 11:55</td></tr>
+<tr><td class="link"><a href="libxml2-2.10.0.sha256sum" title="libxml2-2.10.0.sha256sum">libxml2-2.10.0.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Aug-17 11:55</td></tr>
+<tr><td class="link"><a href="libxml2-2.10.0.tar.xz" title="libxml2-2.10.0.tar.xz">libxml2-2.10.0.tar.xz</a></td><td class="size">2.6 MiB</td><td class="date">2022-Aug-17 11:55</td></tr>
+<tr><td class="link"><a href="libxml2-2.10.1.news" title="libxml2-2.10.1.news">libxml2-2.10.1.news</a></td><td class="size">455 B</td><td class="date">2022-Aug-25 11:33</td></tr>
+<tr><td class="link"><a href="libxml2-2.10.1.sha256sum" title="libxml2-2.10.1.sha256sum">libxml2-2.10.1.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Aug-25 11:33</td></tr>
+<tr><td class="link"><a href="libxml2-2.10.1.tar.xz" title="libxml2-2.10.1.tar.xz">libxml2-2.10.1.tar.xz</a></td><td class="size">2.6 MiB</td><td class="date">2022-Aug-25 11:33</td></tr>
+<tr><td class="link"><a href="libxml2-2.10.2.news" title="libxml2-2.10.2.news">libxml2-2.10.2.news</a></td><td class="size">309 B</td><td class="date">2022-Aug-29 14:56</td></tr>
+<tr><td class="link"><a href="libxml2-2.10.2.sha256sum" title="libxml2-2.10.2.sha256sum">libxml2-2.10.2.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Aug-29 14:56</td></tr>
+<tr><td class="link"><a href="libxml2-2.10.2.tar.xz" title="libxml2-2.10.2.tar.xz">libxml2-2.10.2.tar.xz</a></td><td class="size">2.5 MiB</td><td class="date">2022-Aug-29 14:56</td></tr>
+<tr><td class="link"><a href="libxml2-2.10.3.news" title="libxml2-2.10.3.news">libxml2-2.10.3.news</a></td><td class="size">294 B</td><td class="date">2022-Oct-14 12:55</td></tr>
+<tr><td class="link"><a href="libxml2-2.10.3.sha256sum" title="libxml2-2.10.3.sha256sum">libxml2-2.10.3.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Oct-14 12:55</td></tr>
+<tr><td class="link"><a href="libxml2-2.10.3.tar.xz" title="libxml2-2.10.3.tar.xz">libxml2-2.10.3.tar.xz</a></td><td class="size">2.5 MiB</td><td class="date">2022-Oct-14 12:55</td></tr>
+</tbody></table></body></html>
diff --git a/lib/bb/tests/fetch-testdata/software/libxml2/2.9/index.html b/lib/bb/tests/fetch-testdata/software/libxml2/2.9/index.html
new file mode 100644
index 000000000..abdfdd0fa
--- /dev/null
+++ b/lib/bb/tests/fetch-testdata/software/libxml2/2.9/index.html
@@ -0,0 +1,40 @@
+<!DOCTYPE html><html><head><meta http-equiv="content-type" content="text/html; charset=utf-8"><meta name="viewport" content="width=device-width"><style type="text/css">body,html {background:#fff;font-family:"Bitstream Vera Sans","Lucida Grande","Lucida Sans Unicode",Lucidux,Verdana,Lucida,sans-serif;}tr:nth-child(even) {background:#f4f4f4;}th,td {padding:0.1em 0.5em;}th {text-align:left;font-weight:bold;background:#eee;border-bottom:1px solid #aaa;}#list {border:1px solid #aaa;width:100%;}a {color:#a33;}a:hover {color:#e33;}</style>
+
+<title>Index of /sources/libxml2/2.9/</title>
+</head><body><h1>Index of /sources/libxml2/2.9/</h1>
+<table id="list"><thead><tr><th style="width:55%"><a href="?C=N&amp;O=A">File Name</a>&nbsp;<a href="?C=N&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:20%"><a href="?C=S&amp;O=A">File Size</a>&nbsp;<a href="?C=S&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:25%"><a href="?C=M&amp;O=A">Date</a>&nbsp;<a href="?C=M&amp;O=D">&nbsp;&darr;&nbsp;</a></th></tr></thead>
+<tbody><tr><td class="link"><a href="../">Parent directory/</a></td><td class="size">-</td><td class="date">-</td></tr>
+<tr><td class="link"><a href="LATEST-IS-2.9.14" title="LATEST-IS-2.9.14">LATEST-IS-2.9.14</a></td><td class="size">3.0 MiB</td><td class="date">2022-May-02 12:03</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.0.sha256sum" title="libxml2-2.9.0.sha256sum">libxml2-2.9.0.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:27</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.0.tar.xz" title="libxml2-2.9.0.tar.xz">libxml2-2.9.0.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:27</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.1.sha256sum" title="libxml2-2.9.1.sha256sum">libxml2-2.9.1.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:28</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.1.tar.xz" title="libxml2-2.9.1.tar.xz">libxml2-2.9.1.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:28</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.10.sha256sum" title="libxml2-2.9.10.sha256sum">libxml2-2.9.10.sha256sum</a></td><td class="size">88 B</td><td class="date">2022-Feb-14 18:42</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.10.tar.xz" title="libxml2-2.9.10.tar.xz">libxml2-2.9.10.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:42</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.11.sha256sum" title="libxml2-2.9.11.sha256sum">libxml2-2.9.11.sha256sum</a></td><td class="size">88 B</td><td class="date">2022-Feb-14 18:43</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.11.tar.xz" title="libxml2-2.9.11.tar.xz">libxml2-2.9.11.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:43</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.12.sha256sum" title="libxml2-2.9.12.sha256sum">libxml2-2.9.12.sha256sum</a></td><td class="size">88 B</td><td class="date">2022-Feb-14 18:45</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.12.tar.xz" title="libxml2-2.9.12.tar.xz">libxml2-2.9.12.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:45</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.13.news" title="libxml2-2.9.13.news">libxml2-2.9.13.news</a></td><td class="size">26.6 KiB</td><td class="date">2022-Feb-20 12:42</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.13.sha256sum" title="libxml2-2.9.13.sha256sum">libxml2-2.9.13.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Feb-20 12:42</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.13.tar.xz" title="libxml2-2.9.13.tar.xz">libxml2-2.9.13.tar.xz</a></td><td class="size">3.1 MiB</td><td class="date">2022-Feb-20 12:42</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.14.news" title="libxml2-2.9.14.news">libxml2-2.9.14.news</a></td><td class="size">1.0 KiB</td><td class="date">2022-May-02 12:03</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.14.sha256sum" title="libxml2-2.9.14.sha256sum">libxml2-2.9.14.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-May-02 12:03</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.14.tar.xz" title="libxml2-2.9.14.tar.xz">libxml2-2.9.14.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-May-02 12:03</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.2.sha256sum" title="libxml2-2.9.2.sha256sum">libxml2-2.9.2.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:30</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.2.tar.xz" title="libxml2-2.9.2.tar.xz">libxml2-2.9.2.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:30</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.3.sha256sum" title="libxml2-2.9.3.sha256sum">libxml2-2.9.3.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:31</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.3.tar.xz" title="libxml2-2.9.3.tar.xz">libxml2-2.9.3.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:31</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.4.sha256sum" title="libxml2-2.9.4.sha256sum">libxml2-2.9.4.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:33</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.4.tar.xz" title="libxml2-2.9.4.tar.xz">libxml2-2.9.4.tar.xz</a></td><td class="size">2.9 MiB</td><td class="date">2022-Feb-14 18:33</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.5.sha256sum" title="libxml2-2.9.5.sha256sum">libxml2-2.9.5.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:35</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.5.tar.xz" title="libxml2-2.9.5.tar.xz">libxml2-2.9.5.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:35</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.6.sha256sum" title="libxml2-2.9.6.sha256sum">libxml2-2.9.6.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:36</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.6.tar.xz" title="libxml2-2.9.6.tar.xz">libxml2-2.9.6.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:36</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.7.sha256sum" title="libxml2-2.9.7.sha256sum">libxml2-2.9.7.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:37</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.7.tar.xz" title="libxml2-2.9.7.tar.xz">libxml2-2.9.7.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:37</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.8.sha256sum" title="libxml2-2.9.8.sha256sum">libxml2-2.9.8.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:39</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.8.tar.xz" title="libxml2-2.9.8.tar.xz">libxml2-2.9.8.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:39</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.9.sha256sum" title="libxml2-2.9.9.sha256sum">libxml2-2.9.9.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:40</td></tr>
+<tr><td class="link"><a href="libxml2-2.9.9.tar.xz" title="libxml2-2.9.9.tar.xz">libxml2-2.9.9.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:40</td></tr>
+</tbody></table></body></html>
diff --git a/lib/bb/tests/fetch-testdata/software/libxml2/index.html b/lib/bb/tests/fetch-testdata/software/libxml2/index.html
new file mode 100644
index 000000000..c183e06a5
--- /dev/null
+++ b/lib/bb/tests/fetch-testdata/software/libxml2/index.html
@@ -0,0 +1,19 @@
+<!DOCTYPE html><html><head><meta http-equiv="content-type" content="text/html; charset=utf-8"><meta name="viewport" content="width=device-width"><style type="text/css">body,html {background:#fff;font-family:"Bitstream Vera Sans","Lucida Grande","Lucida Sans Unicode",Lucidux,Verdana,Lucida,sans-serif;}tr:nth-child(even) {background:#f4f4f4;}th,td {padding:0.1em 0.5em;}th {text-align:left;font-weight:bold;background:#eee;border-bottom:1px solid #aaa;}#list {border:1px solid #aaa;width:100%;}a {color:#a33;}a:hover {color:#e33;}</style>
+
+<title>Index of /sources/libxml2/</title>
+</head><body><h1>Index of /sources/libxml2/</h1>
+<table id="list"><thead><tr><th style="width:55%"><a href="?C=N&amp;O=A">File Name</a>&nbsp;<a href="?C=N&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:20%"><a href="?C=S&amp;O=A">File Size</a>&nbsp;<a href="?C=S&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:25%"><a href="?C=M&amp;O=A">Date</a>&nbsp;<a href="?C=M&amp;O=D">&nbsp;&darr;&nbsp;</a></th></tr></thead>
+<tbody><tr><td class="link"><a href="../">Parent directory/</a></td><td class="size">-</td><td class="date">-</td></tr>
+<tr><td class="link"><a href="2.0/" title="2.0">2.0/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:04</td></tr>
+<tr><td class="link"><a href="2.1/" title="2.1">2.1/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:04</td></tr>
+<tr><td class="link"><a href="2.10/" title="2.10">2.10/</a></td><td class="size">-</td><td class="date">2022-Oct-14 12:55</td></tr>
+<tr><td class="link"><a href="2.2/" title="2.2">2.2/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:04</td></tr>
+<tr><td class="link"><a href="2.3/" title="2.3">2.3/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:05</td></tr>
+<tr><td class="link"><a href="2.4/" title="2.4">2.4/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:05</td></tr>
+<tr><td class="link"><a href="2.5/" title="2.5">2.5/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:05</td></tr>
+<tr><td class="link"><a href="2.6/" title="2.6">2.6/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:05</td></tr>
+<tr><td class="link"><a href="2.7/" title="2.7">2.7/</a></td><td class="size">-</td><td class="date">2022-Feb-14 18:24</td></tr>
+<tr><td class="link"><a href="2.8/" title="2.8">2.8/</a></td><td class="size">-</td><td class="date">2022-Feb-14 18:26</td></tr>
+<tr><td class="link"><a href="2.9/" title="2.9">2.9/</a></td><td class="size">-</td><td class="date">2022-May-02 12:04</td></tr>
+<tr><td class="link"><a href="cache.json" title="cache.json">cache.json</a></td><td class="size">22.8 KiB</td><td class="date">2022-Oct-14 12:55</td></tr>
+</tbody></table></body></html>
diff --git a/lib/bb/tests/fetch.py b/lib/bb/tests/fetch.py
index 9291ce4a0..5233ab5c8 100644
--- a/lib/bb/tests/fetch.py
+++ b/lib/bb/tests/fetch.py
@@ -6,11 +6,14 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import contextlib
import unittest
import hashlib
import tempfile
import collections
import os
+import signal
+import tarfile
from bb.fetch2 import URI
from bb.fetch2 import FetchMethod
import bb
@@ -18,9 +21,28 @@ from bb.tests.support.httpserver import HTTPService
def skipIfNoNetwork():
if os.environ.get("BB_SKIP_NETTESTS") == "yes":
- return unittest.skip("Network tests being skipped")
+ return unittest.skip("network test")
return lambda f: f
+class TestTimeout(Exception):
+ # Indicate to pytest that this is not a test suite
+ __test__ = False
+
+class Timeout():
+
+ def __init__(self, seconds):
+ self.seconds = seconds
+
+ def handle_timeout(self, signum, frame):
+ raise TestTimeout("Test failed: timeout reached")
+
+ def __enter__(self):
+ signal.signal(signal.SIGALRM, self.handle_timeout)
+ signal.alarm(self.seconds)
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ signal.alarm(0)
+
class URITest(unittest.TestCase):
test_uris = {
"http://www.google.com/index.html" : {
@@ -286,6 +308,21 @@ class URITest(unittest.TestCase):
'params': {"someparam" : "1"},
'query': {},
'relative': True
+ },
+ "https://www.innodisk.com/Download_file?9BE0BF6657;downloadfilename=EGPL-T101.zip": {
+ 'uri': 'https://www.innodisk.com/Download_file?9BE0BF6657;downloadfilename=EGPL-T101.zip',
+ 'scheme': 'https',
+ 'hostname': 'www.innodisk.com',
+ 'port': None,
+ 'hostport': 'www.innodisk.com',
+ 'path': '/Download_file',
+ 'userinfo': '',
+ 'userinfo': '',
+ 'username': '',
+ 'password': '',
+ 'params': {"downloadfilename" : "EGPL-T101.zip"},
+ 'query': {"9BE0BF6657": None},
+ 'relative': False
}
}
@@ -376,7 +413,7 @@ class FetcherTest(unittest.TestCase):
def setUp(self):
self.origdir = os.getcwd()
self.d = bb.data.init()
- self.tempdir = tempfile.mkdtemp()
+ self.tempdir = tempfile.mkdtemp(prefix="bitbake-fetch-")
self.dldir = os.path.join(self.tempdir, "download")
os.mkdir(self.dldir)
self.d.setVar("DL_DIR", self.dldir)
@@ -393,55 +430,91 @@ class FetcherTest(unittest.TestCase):
bb.process.run('chmod u+rw -R %s' % self.tempdir)
bb.utils.prunedir(self.tempdir)
+ def git(self, cmd, cwd=None):
+ if isinstance(cmd, str):
+ cmd = 'git -c safe.bareRepository=all ' + cmd
+ else:
+ cmd = ['git', '-c', 'safe.bareRepository=all'] + cmd
+ if cwd is None:
+ cwd = self.gitdir
+ return bb.process.run(cmd, cwd=cwd)[0]
+
+ def git_init(self, cwd=None):
+ self.git('init', cwd=cwd)
+ # Explicitly set initial branch to master as
+ # a common setup is to use other default
+ # branch than master.
+ self.git(['checkout', '-b', 'master'], cwd=cwd)
+
+ try:
+ self.git(['config', 'user.email'], cwd=cwd)
+ except bb.process.ExecutionError:
+ self.git(['config', 'user.email', 'you@example.com'], cwd=cwd)
+
+ try:
+ self.git(['config', 'user.name'], cwd=cwd)
+ except bb.process.ExecutionError:
+ self.git(['config', 'user.name', 'Your Name'], cwd=cwd)
+
class MirrorUriTest(FetcherTest):
replaceuris = {
- ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "http://somewhere.org/somedir/")
+ ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "http://somewhere.org/somedir/")
: "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz",
- ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http")
- : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
- ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http")
- : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
- ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/\\2;protocol=http")
- : "git://somewhere.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
+ ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http")
+ : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
+ ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http")
+ : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
+ ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/\\2;protocol=http")
+ : "git://somewhere.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890", "git://someserver.org/bitbake", "git://git.openembedded.org/bitbake")
: "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890",
- ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache")
+ ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache")
: "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
- ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache/")
+ ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache/")
: "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
- ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/somedir3")
+ ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/somedir3")
: "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz",
- ("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz")
+ ("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz")
: "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz",
("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://www.apache.org/dist", "http://archive.apache.org/dist")
: "http://archive.apache.org/dist/subversion/subversion-1.7.1.tar.bz2",
("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://.*/.*", "file:///somepath/downloads/")
: "file:///somepath/downloads/subversion-1.7.1.tar.bz2",
- ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http")
- : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
- ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http")
- : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
- ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/MIRRORNAME;protocol=http")
- : "git://somewhere.org/somedir/git.invalid.infradead.org.foo.mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
+ ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http")
+ : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
+ ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http")
+ : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
+ ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/MIRRORNAME;protocol=http")
+ : "git://somewhere.org/somedir/git.invalid.infradead.org.foo.mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org")
: "http://somewhere2.org/somefile_1.2.3.tar.gz",
("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/")
: "http://somewhere2.org/somefile_1.2.3.tar.gz",
("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", "git://someserver.org/bitbake;branch=master", "git://git.openembedded.org/bitbake;protocol=http")
: "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
+ ("git://user1@someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", "git://someserver.org/bitbake;branch=master", "git://user2@git.openembedded.org/bitbake;protocol=http")
+ : "git://user2@git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
+ ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;protocol=git;branch=master", "git://someserver.org/bitbake", "git://someotherserver.org/bitbake;protocol=https")
+ : "git://someotherserver.org/bitbake;tag=1234567890123456789012345678901234567890;protocol=https;branch=master",
+ ("gitsm://git.qemu.org/git/seabios.git/;protocol=https;name=roms/seabios;subpath=roms/seabios;bareclone=1;nobranch=1;rev=1234567890123456789012345678901234567890", "gitsm://.*/.*", "http://petalinux.xilinx.com/sswreleases/rel-v${XILINX_VER_MAIN}/downloads") : "http://petalinux.xilinx.com/sswreleases/rel-v%24%7BXILINX_VER_MAIN%7D/downloads/git2_git.qemu.org.git.seabios.git..tar.gz",
+ ("https://somewhere.org/example/1.0.0/example;downloadfilename=some-example-1.0.0.tgz", "https://.*/.*", "file:///mirror/PATH")
+ : "file:///mirror/example/1.0.0/some-example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz",
+ ("https://somewhere.org/example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz", "https://.*/.*", "file:///mirror/some-example-1.0.0.tgz")
+ : "file:///mirror/some-example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz",
#Renaming files doesn't work
#("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") : "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz"
#("file://sstate-xyz.tgz", "file://.*/.*", "file:///somewhere/1234/sstate-cache") : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
}
- mirrorvar = "http://.*/.* file:///somepath/downloads/ \n" \
- "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n" \
- "https://.*/.* file:///someotherpath/downloads/ \n" \
- "http://.*/.* file:///someotherpath/downloads/ \n"
+ mirrorvar = "http://.*/.* file:///somepath/downloads/ " \
+ "git://someserver.org/bitbake git://git.openembedded.org/bitbake " \
+ "https://.*/.* file:///someotherpath/downloads/ " \
+ "http://.*/.* file:///someotherpath/downloads/"
def test_urireplace(self):
+ self.d.setVar("FILESPATH", ".")
for k, v in self.replaceuris.items():
ud = bb.fetch.FetchData(k[0], self.d)
ud.setup_localpath(self.d)
@@ -464,8 +537,8 @@ class MirrorUriTest(FetcherTest):
def test_mirror_of_mirror(self):
# Test if mirror of a mirror works
- mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/ \n"
- mirrorvar = mirrorvar + " http://otherdownloads.yoctoproject.org/.* http://downloads2.yoctoproject.org/downloads/ \n"
+ mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/"
+ mirrorvar = mirrorvar + " http://otherdownloads.yoctoproject.org/.* http://downloads2.yoctoproject.org/downloads/"
fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
mirrors = bb.fetch2.mirror_from_string(mirrorvar)
uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
@@ -474,8 +547,8 @@ class MirrorUriTest(FetcherTest):
'http://otherdownloads.yoctoproject.org/downloads/bitbake-1.0.tar.gz',
'http://downloads2.yoctoproject.org/downloads/bitbake-1.0.tar.gz'])
- recmirrorvar = "https://.*/[^/]* http://AAAA/A/A/A/ \n" \
- "https://.*/[^/]* https://BBBB/B/B/B/ \n"
+ recmirrorvar = "https://.*/[^/]* http://AAAA/A/A/A/ " \
+ "https://.*/[^/]* https://BBBB/B/B/B/"
def test_recursive(self):
fetcher = bb.fetch.FetchData("https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
@@ -489,15 +562,15 @@ class MirrorUriTest(FetcherTest):
class GitDownloadDirectoryNamingTest(FetcherTest):
def setUp(self):
super(GitDownloadDirectoryNamingTest, self).setUp()
- self.recipe_url = "git://git.openembedded.org/bitbake"
+ self.recipe_url = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
self.recipe_dir = "git.openembedded.org.bitbake"
- self.mirror_url = "git://github.com/openembedded/bitbake.git"
+ self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https;branch=master"
self.mirror_dir = "github.com.openembedded.bitbake.git"
self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
def setup_mirror_rewrite(self):
- self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url + " \n")
+ self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url)
@skipIfNoNetwork()
def test_that_directory_is_named_after_recipe_url_when_no_mirroring_is_used(self):
@@ -537,16 +610,16 @@ class GitDownloadDirectoryNamingTest(FetcherTest):
class TarballNamingTest(FetcherTest):
def setUp(self):
super(TarballNamingTest, self).setUp()
- self.recipe_url = "git://git.openembedded.org/bitbake"
+ self.recipe_url = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz"
- self.mirror_url = "git://github.com/openembedded/bitbake.git"
+ self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https;branch=master"
self.mirror_tarball = "git2_github.com.openembedded.bitbake.git.tar.gz"
self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1')
self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
def setup_mirror_rewrite(self):
- self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url + " \n")
+ self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url)
@skipIfNoNetwork()
def test_that_the_recipe_tarball_is_created_when_no_mirroring_is_used(self):
@@ -571,9 +644,9 @@ class TarballNamingTest(FetcherTest):
class GitShallowTarballNamingTest(FetcherTest):
def setUp(self):
super(GitShallowTarballNamingTest, self).setUp()
- self.recipe_url = "git://git.openembedded.org/bitbake"
+ self.recipe_url = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
self.recipe_tarball = "gitshallow_git.openembedded.org.bitbake_82ea737-1_master.tar.gz"
- self.mirror_url = "git://github.com/openembedded/bitbake.git"
+ self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https;branch=master"
self.mirror_tarball = "gitshallow_github.com.openembedded.bitbake.git_82ea737-1_master.tar.gz"
self.d.setVar('BB_GIT_SHALLOW', '1')
@@ -581,7 +654,7 @@ class GitShallowTarballNamingTest(FetcherTest):
self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
def setup_mirror_rewrite(self):
- self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url + " \n")
+ self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url)
@skipIfNoNetwork()
def test_that_the_tarball_is_named_after_recipe_url_when_no_mirroring_is_used(self):
@@ -603,6 +676,39 @@ class GitShallowTarballNamingTest(FetcherTest):
self.assertIn(self.mirror_tarball, dir)
+class CleanTarballTest(FetcherTest):
+ def setUp(self):
+ super(CleanTarballTest, self).setUp()
+ self.recipe_url = "git://git.openembedded.org/bitbake;protocol=https"
+ self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz"
+
+ self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1')
+ self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
+
+ @skipIfNoNetwork()
+ def test_that_the_tarball_contents_does_not_leak_info(self):
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+
+ fetcher.download()
+
+ fetcher.unpack(self.unpackdir)
+ mtime = bb.process.run('git log --all -1 --format=%ct',
+ cwd=os.path.join(self.unpackdir, 'git'))
+ self.assertEqual(len(mtime), 2)
+ mtime = int(mtime[0])
+
+ archive = tarfile.open(os.path.join(self.dldir, self.recipe_tarball))
+ self.assertNotEqual(len(archive.members), 0)
+ for member in archive.members:
+ if member.name == ".":
+ continue
+ self.assertEqual(member.uname, 'oe', "user name for %s differs" % member.name)
+ self.assertEqual(member.uid, 0, "uid for %s differs" % member.name)
+ self.assertEqual(member.gname, 'oe', "group name for %s differs" % member.name)
+ self.assertEqual(member.gid, 0, "gid for %s differs" % member.name)
+ self.assertEqual(member.mtime, mtime, "mtime for %s differs" % member.name)
+
+
class FetcherLocalTest(FetcherTest):
def setUp(self):
def touch(fn):
@@ -620,6 +726,9 @@ class FetcherLocalTest(FetcherTest):
os.makedirs(os.path.join(self.localsrcdir, 'dir', 'subdir'))
touch(os.path.join(self.localsrcdir, 'dir', 'subdir', 'e'))
touch(os.path.join(self.localsrcdir, r'backslash\x2dsystemd-unit.device'))
+ bb.process.run('tar cf archive.tar -C dir .', cwd=self.localsrcdir)
+ bb.process.run('tar czf archive.tar.gz -C dir .', cwd=self.localsrcdir)
+ bb.process.run('tar cjf archive.tar.bz2 -C dir .', cwd=self.localsrcdir)
self.d.setVar("FILESPATH", self.localsrcdir)
def fetchUnpack(self, uris):
@@ -633,6 +742,11 @@ class FetcherLocalTest(FetcherTest):
flst.sort()
return flst
+ def test_local_checksum_fails_no_file(self):
+ self.d.setVar("SRC_URI", "file://404")
+ with self.assertRaises(bb.BBHandledException):
+ bb.fetch.get_checksum_file_list(self.d)
+
def test_local(self):
tree = self.fetchUnpack(['file://a', 'file://dir/c'])
self.assertEqual(tree, ['a', 'dir/c'])
@@ -674,32 +788,39 @@ class FetcherLocalTest(FetcherTest):
with self.assertRaises(bb.fetch2.UnpackError):
self.fetchUnpack(['file://a;subdir=/bin/sh'])
+ def test_local_striplevel(self):
+ tree = self.fetchUnpack(['file://archive.tar;subdir=bar;striplevel=1'])
+ self.assertEqual(tree, ['bar/c', 'bar/d', 'bar/subdir/e'])
+
+ def test_local_striplevel_gzip(self):
+ tree = self.fetchUnpack(['file://archive.tar.gz;subdir=bar;striplevel=1'])
+ self.assertEqual(tree, ['bar/c', 'bar/d', 'bar/subdir/e'])
+
+ def test_local_striplevel_bzip2(self):
+ tree = self.fetchUnpack(['file://archive.tar.bz2;subdir=bar;striplevel=1'])
+ self.assertEqual(tree, ['bar/c', 'bar/d', 'bar/subdir/e'])
+
def dummyGitTest(self, suffix):
# Create dummy local Git repo
src_dir = tempfile.mkdtemp(dir=self.tempdir,
prefix='gitfetch_localusehead_')
- src_dir = os.path.abspath(src_dir)
- bb.process.run("git init", cwd=src_dir)
- bb.process.run("git config user.email 'you@example.com'", cwd=src_dir)
- bb.process.run("git config user.name 'Your Name'", cwd=src_dir)
- bb.process.run("git commit --allow-empty -m'Dummy commit'",
- cwd=src_dir)
+ self.gitdir = os.path.abspath(src_dir)
+ self.git_init()
+ self.git(['commit', '--allow-empty', '-m', 'Dummy commit'])
# Use other branch than master
- bb.process.run("git checkout -b my-devel", cwd=src_dir)
- bb.process.run("git commit --allow-empty -m'Dummy commit 2'",
- cwd=src_dir)
- stdout = bb.process.run("git rev-parse HEAD", cwd=src_dir)
- orig_rev = stdout[0].strip()
+ self.git(['checkout', '-b', 'my-devel'])
+ self.git(['commit', '--allow-empty', '-m', 'Dummy commit 2'])
+ orig_rev = self.git(['rev-parse', 'HEAD']).strip()
# Fetch and check revision
self.d.setVar("SRCREV", "AUTOINC")
- url = "git://" + src_dir + ";protocol=file;" + suffix
+ self.d.setVar("__BBSRCREV_SEEN", "1")
+ url = "git://" + self.gitdir + ";branch=master;protocol=file;" + suffix
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
fetcher.unpack(self.unpackdir)
- stdout = bb.process.run("git rev-parse HEAD",
- cwd=os.path.join(self.unpackdir, 'git'))
- unpack_rev = stdout[0].strip()
+ unpack_rev = self.git(['rev-parse', 'HEAD'],
+ cwd=os.path.join(self.unpackdir, 'git')).strip()
self.assertEqual(orig_rev, unpack_rev)
def test_local_gitfetch_usehead(self):
@@ -826,12 +947,12 @@ class FetcherNoNetworkTest(FetcherTest):
class FetcherNetworkTest(FetcherTest):
@skipIfNoNetwork()
def test_fetch(self):
- fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
+ fetcher = bb.fetch.Fetch(["https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
fetcher.download()
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.1.tar.gz"), 57892)
self.d.setVar("BB_NO_NETWORK", "1")
- fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
+ fetcher = bb.fetch.Fetch(["https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
fetcher.download()
fetcher.unpack(self.unpackdir)
self.assertEqual(len(os.listdir(self.unpackdir + "/bitbake-1.0/")), 9)
@@ -839,21 +960,22 @@ class FetcherNetworkTest(FetcherTest):
@skipIfNoNetwork()
def test_fetch_mirror(self):
- self.d.setVar("MIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake")
+ self.d.setVar("MIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake")
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
fetcher.download()
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
@skipIfNoNetwork()
def test_fetch_mirror_of_mirror(self):
- self.d.setVar("MIRRORS", "http://.*/.* http://invalid2.yoctoproject.org/ \n http://invalid2.yoctoproject.org/.* http://downloads.yoctoproject.org/releases/bitbake")
+ self.d.setVar("MIRRORS", "http://.*/.* http://invalid2.yoctoproject.org/ http://invalid2.yoctoproject.org/.* https://downloads.yoctoproject.org/releases/bitbake")
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
fetcher.download()
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
@skipIfNoNetwork()
def test_fetch_file_mirror_of_mirror(self):
- self.d.setVar("MIRRORS", "http://.*/.* file:///some1where/ \n file:///some1where/.* file://some2where/ \n file://some2where/.* http://downloads.yoctoproject.org/releases/bitbake")
+ self.d.setVar("FILESPATH", ".")
+ self.d.setVar("MIRRORS", "http://.*/.* file:///some1where/ file:///some1where/.* file://some2where/ file://some2where/.* https://downloads.yoctoproject.org/releases/bitbake")
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
os.mkdir(self.dldir + "/some2where")
fetcher.download()
@@ -861,16 +983,46 @@ class FetcherNetworkTest(FetcherTest):
@skipIfNoNetwork()
def test_fetch_premirror(self):
- self.d.setVar("PREMIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake")
+ self.d.setVar("PREMIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake")
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
fetcher.download()
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
@skipIfNoNetwork()
+ def test_fetch_specify_downloadfilename(self):
+ fetcher = bb.fetch.Fetch(["https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz;downloadfilename=bitbake-v1.0.0.tar.gz"], self.d)
+ fetcher.download()
+ self.assertEqual(os.path.getsize(self.dldir + "/bitbake-v1.0.0.tar.gz"), 57749)
+
+ @skipIfNoNetwork()
+ def test_fetch_premirror_specify_downloadfilename_regex_uri(self):
+ self.d.setVar("PREMIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake/")
+ fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/1.0.tar.gz;downloadfilename=bitbake-1.0.tar.gz"], self.d)
+ fetcher.download()
+ self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
+
+ @skipIfNoNetwork()
+ # BZ13039
+ def test_fetch_premirror_specify_downloadfilename_specific_uri(self):
+ self.d.setVar("PREMIRRORS", "http://invalid.yoctoproject.org/releases/bitbake https://downloads.yoctoproject.org/releases/bitbake")
+ fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/1.0.tar.gz;downloadfilename=bitbake-1.0.tar.gz"], self.d)
+ fetcher.download()
+ self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
+
+ @skipIfNoNetwork()
+ def test_fetch_premirror_use_downloadfilename_to_fetch(self):
+ # Ensure downloadfilename is used when fetching from premirror.
+ self.d.setVar("PREMIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake")
+ fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz;downloadfilename=bitbake-1.0.tar.gz"], self.d)
+ fetcher.download()
+ self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
+
+ @skipIfNoNetwork()
def gitfetcher(self, url1, url2):
def checkrevision(self, fetcher):
fetcher.unpack(self.unpackdir)
- revision = bb.process.run("git rev-parse HEAD", shell=True, cwd=self.unpackdir + "/git")[0].strip()
+ revision = self.git(['rev-parse', 'HEAD'],
+ cwd=os.path.join(self.unpackdir, 'git')).strip()
self.assertEqual(revision, "270a05b0b4ba0959fe0624d2a4885d7b70426da5")
self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "1")
@@ -888,25 +1040,25 @@ class FetcherNetworkTest(FetcherTest):
@skipIfNoNetwork()
def test_gitfetch(self):
- url1 = url2 = "git://git.openembedded.org/bitbake"
+ url1 = url2 = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
self.gitfetcher(url1, url2)
@skipIfNoNetwork()
def test_gitfetch_goodsrcrev(self):
# SRCREV is set but matches rev= parameter
- url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5"
+ url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;branch=master;protocol=https"
self.gitfetcher(url1, url2)
@skipIfNoNetwork()
def test_gitfetch_badsrcrev(self):
# SRCREV is set but does not match rev= parameter
- url1 = url2 = "git://git.openembedded.org/bitbake;rev=dead05b0b4ba0959fe0624d2a4885d7b70426da5"
+ url1 = url2 = "git://git.openembedded.org/bitbake;rev=dead05b0b4ba0959fe0624d2a4885d7b70426da5;branch=master;protocol=https"
self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2)
@skipIfNoNetwork()
def test_gitfetch_tagandrev(self):
# SRCREV is set but does not match rev= parameter
- url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;tag=270a05b0b4ba0959fe0624d2a4885d7b70426da5"
+ url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;tag=270a05b0b4ba0959fe0624d2a4885d7b70426da5;protocol=https"
self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2)
@skipIfNoNetwork()
@@ -915,7 +1067,7 @@ class FetcherNetworkTest(FetcherTest):
# `usehead=1' and instead fetch the specified SRCREV. See
# test_local_gitfetch_usehead() for a positive use of the usehead
# feature.
- url = "git://git.openembedded.org/bitbake;usehead=1"
+ url = "git://git.openembedded.org/bitbake;usehead=1;branch=master;protocol=https"
self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url)
@skipIfNoNetwork()
@@ -924,38 +1076,38 @@ class FetcherNetworkTest(FetcherTest):
# `usehead=1' and instead fetch the specified SRCREV. See
# test_local_gitfetch_usehead() for a positive use of the usehead
# feature.
- url = "git://git.openembedded.org/bitbake;usehead=1;name=newName"
+ url = "git://git.openembedded.org/bitbake;usehead=1;name=newName;branch=master;protocol=https"
self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url)
@skipIfNoNetwork()
def test_gitfetch_finds_local_tarball_for_mirrored_url_when_previous_downloaded_by_the_recipe_url(self):
- recipeurl = "git://git.openembedded.org/bitbake"
- mirrorurl = "git://someserver.org/bitbake"
- self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n")
+ recipeurl = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
+ mirrorurl = "git://someserver.org/bitbake;branch=master;protocol=https"
+ self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake")
self.gitfetcher(recipeurl, mirrorurl)
@skipIfNoNetwork()
def test_gitfetch_finds_local_tarball_when_previous_downloaded_from_a_premirror(self):
- recipeurl = "git://someserver.org/bitbake"
- self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n")
+ recipeurl = "git://someserver.org/bitbake;branch=master;protocol=https"
+ self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake")
self.gitfetcher(recipeurl, recipeurl)
@skipIfNoNetwork()
def test_gitfetch_finds_local_repository_when_premirror_rewrites_the_recipe_url(self):
- realurl = "git://git.openembedded.org/bitbake"
- recipeurl = "git://someserver.org/bitbake"
+ realurl = "https://git.openembedded.org/bitbake"
+ recipeurl = "git://someserver.org/bitbake;protocol=https"
self.sourcedir = self.unpackdir.replace("unpacked", "sourcemirror.git")
os.chdir(self.tempdir)
- bb.process.run("git clone %s %s 2> /dev/null" % (realurl, self.sourcedir), shell=True)
- self.d.setVar("PREMIRRORS", "%s git://%s;protocol=file \n" % (recipeurl, self.sourcedir))
+ self.git(['clone', realurl, self.sourcedir], cwd=self.tempdir)
+ self.d.setVar("PREMIRRORS", "%s git://%s;protocol=file" % (recipeurl, self.sourcedir))
self.gitfetcher(recipeurl, recipeurl)
@skipIfNoNetwork()
def test_git_submodule(self):
# URL with ssh submodules
- url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=ssh-gitsm-tests;rev=049da4a6cb198d7c0302e9e8b243a1443cb809a7"
+ url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=ssh-gitsm-tests;rev=049da4a6cb198d7c0302e9e8b243a1443cb809a7;branch=master;protocol=https"
# Original URL (comment this if you have ssh access to git.yoctoproject.org)
- url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=master;rev=a2885dd7d25380d23627e7544b7bbb55014b16ee"
+ url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=master;rev=a2885dd7d25380d23627e7544b7bbb55014b16ee;branch=master;protocol=https"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# Previous cwd has been deleted
@@ -972,10 +1124,29 @@ class FetcherNetworkTest(FetcherTest):
self.assertTrue(os.path.exists(os.path.join(repo_path, 'bitbake-gitsm-test1', 'bitbake')), msg='submodule of submodule missing')
@skipIfNoNetwork()
+ def test_git_submodule_restricted_network_premirrors(self):
+ # this test is to ensure that premirrors will be tried in restricted network
+ # that is, BB_ALLOWED_NETWORKS does not contain the domain the url uses
+ url = "gitsm://github.com/grpc/grpc.git;protocol=https;name=grpc;branch=v1.60.x;rev=0ef13a7555dbaadd4633399242524129eef5e231"
+ # create a download directory to be used as premirror later
+ tempdir = tempfile.mkdtemp(prefix="bitbake-fetch-")
+ dl_premirror = os.path.join(tempdir, "download-premirror")
+ os.mkdir(dl_premirror)
+ self.d.setVar("DL_DIR", dl_premirror)
+ fetcher = bb.fetch.Fetch([url], self.d)
+ fetcher.download()
+ # now use the premirror in restricted network
+ self.d.setVar("DL_DIR", self.dldir)
+ self.d.setVar("PREMIRRORS", "gitsm://.*/.* gitsm://%s/git2/MIRRORNAME;protocol=file" % dl_premirror)
+ self.d.setVar("BB_ALLOWED_NETWORKS", "*.some.domain")
+ fetcher = bb.fetch.Fetch([url], self.d)
+ fetcher.download()
+
+ @skipIfNoNetwork()
def test_git_submodule_dbus_broker(self):
# The following external repositories have show failures in fetch and unpack operations
# We want to avoid regressions!
- url = "gitsm://github.com/bus1/dbus-broker;protocol=git;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main"
+ url = "gitsm://github.com/bus1/dbus-broker;protocol=https;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# Previous cwd has been deleted
@@ -991,7 +1162,7 @@ class FetcherNetworkTest(FetcherTest):
@skipIfNoNetwork()
def test_git_submodule_CLI11(self):
- url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf"
+ url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf;branch=main"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# Previous cwd has been deleted
@@ -1006,12 +1177,12 @@ class FetcherNetworkTest(FetcherTest):
@skipIfNoNetwork()
def test_git_submodule_update_CLI11(self):
""" Prevent regression on update detection not finding missing submodule, or modules without needed commits """
- url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714"
+ url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714;branch=main"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# CLI11 that pulls in a newer nlohmann-json
- url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca"
+ url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca;branch=main"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# Previous cwd has been deleted
@@ -1025,7 +1196,7 @@ class FetcherNetworkTest(FetcherTest):
@skipIfNoNetwork()
def test_git_submodule_aktualizr(self):
- url = "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=git;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
+ url = "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=https;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# Previous cwd has been deleted
@@ -1045,7 +1216,7 @@ class FetcherNetworkTest(FetcherTest):
""" Prevent regression on deeply nested submodules not being checked out properly, even though they were fetched. """
# This repository also has submodules where the module (name), path and url do not align
- url = "gitsm://github.com/azure/iotedge.git;protocol=git;rev=d76e0316c6f324345d77c48a83ce836d09392699"
+ url = "gitsm://github.com/azure/iotedge.git;protocol=https;rev=d76e0316c6f324345d77c48a83ce836d09392699;branch=main"
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
# Previous cwd has been deleted
@@ -1068,6 +1239,15 @@ class FetcherNetworkTest(FetcherTest):
self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/umock-c/deps/ctest/README.md')), msg='Missing submodule checkout')
self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/umock-c/deps/testrunner/readme.md')), msg='Missing submodule checkout')
+ @skipIfNoNetwork()
+ def test_git_submodule_reference_to_parent(self):
+ self.recipe_url = "gitsm://github.com/gflags/gflags.git;protocol=https;branch=master"
+ self.d.setVar("SRCREV", "14e1138441bbbb584160cb1c0a0426ec1bac35f1")
+ with Timeout(60):
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ with self.assertRaises(bb.fetch2.FetchError):
+ fetcher.download()
+
class SVNTest(FetcherTest):
def skipIfNoSvn():
import shutil
@@ -1102,8 +1282,9 @@ class SVNTest(FetcherTest):
cwd=repo_dir)
bb.process.run("svn co %s svnfetch_co" % self.repo_url, cwd=self.tempdir)
- # Github will emulate SVN. Use this to check if we're downloding...
- bb.process.run("svn propset svn:externals 'bitbake svn://vcs.pcre.org/pcre2/code' .",
+ # Github won't emulate SVN anymore (see https://github.blog/2023-01-20-sunsetting-subversion-support/)
+ # Use still accessible svn repo (only trunk to avoid longer downloads)
+ bb.process.run("svn propset svn:externals 'bitbake https://svn.apache.org/repos/asf/serf/trunk' .",
cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
bb.process.run("svn commit --non-interactive -m 'Add external'",
cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
@@ -1131,8 +1312,8 @@ class SVNTest(FetcherTest):
self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk')), msg="Missing trunk")
self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk', 'README.md')), msg="Missing contents")
- self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk')), msg="External dir should NOT exist")
- self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk', 'README')), msg="External README should NOT exit")
+ self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols')), msg="External dir should NOT exist")
+ self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols', 'fcgi_buckets.h')), msg="External fcgi_buckets.h should NOT exit")
@skipIfNoSvn()
def test_external_svn(self):
@@ -1145,49 +1326,49 @@ class SVNTest(FetcherTest):
self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk')), msg="Missing trunk")
self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk', 'README.md')), msg="Missing contents")
- self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk')), msg="External dir should exist")
- self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk', 'README')), msg="External README should exit")
+ self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols')), msg="External dir should exist")
+ self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols', 'fcgi_buckets.h')), msg="External fcgi_buckets.h should exit")
class TrustedNetworksTest(FetcherTest):
def test_trusted_network(self):
# Ensure trusted_network returns False when the host IS in the list.
- url = "git://Someserver.org/foo;rev=1"
+ url = "git://Someserver.org/foo;rev=1;branch=master"
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org someserver.org server2.org server3.org")
self.assertTrue(bb.fetch.trusted_network(self.d, url))
def test_wild_trusted_network(self):
# Ensure trusted_network returns true when the *.host IS in the list.
- url = "git://Someserver.org/foo;rev=1"
+ url = "git://Someserver.org/foo;rev=1;branch=master"
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
self.assertTrue(bb.fetch.trusted_network(self.d, url))
def test_prefix_wild_trusted_network(self):
# Ensure trusted_network returns true when the prefix matches *.host.
- url = "git://git.Someserver.org/foo;rev=1"
+ url = "git://git.Someserver.org/foo;rev=1;branch=master"
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
self.assertTrue(bb.fetch.trusted_network(self.d, url))
def test_two_prefix_wild_trusted_network(self):
# Ensure trusted_network returns true when the prefix matches *.host.
- url = "git://something.git.Someserver.org/foo;rev=1"
+ url = "git://something.git.Someserver.org/foo;rev=1;branch=master"
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
self.assertTrue(bb.fetch.trusted_network(self.d, url))
def test_port_trusted_network(self):
# Ensure trusted_network returns True, even if the url specifies a port.
- url = "git://someserver.org:8080/foo;rev=1"
+ url = "git://someserver.org:8080/foo;rev=1;branch=master"
self.d.setVar("BB_ALLOWED_NETWORKS", "someserver.org")
self.assertTrue(bb.fetch.trusted_network(self.d, url))
def test_untrusted_network(self):
# Ensure trusted_network returns False when the host is NOT in the list.
- url = "git://someserver.org/foo;rev=1"
+ url = "git://someserver.org/foo;rev=1;branch=master"
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org")
self.assertFalse(bb.fetch.trusted_network(self.d, url))
def test_wild_untrusted_network(self):
# Ensure trusted_network returns False when the host is NOT in the list.
- url = "git://*.someserver.org/foo;rev=1"
+ url = "git://*.someserver.org/foo;rev=1;branch=master"
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org")
self.assertFalse(bb.fetch.trusted_network(self.d, url))
@@ -1197,14 +1378,17 @@ class URLHandle(unittest.TestCase):
"http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}),
"cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}),
"cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', collections.OrderedDict([('tag', 'V0-99-81'), ('module', 'familiar/dist/ipkg')])),
- "git://git.openembedded.org/bitbake;branch=@foo" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo'}),
+ "git://git.openembedded.org/bitbake;branch=@foo;protocol=https" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo', 'protocol' : 'https'}),
"file://somelocation;someparam=1": ('file', '', 'somelocation', '', '', {'someparam': '1'}),
+ "https://somesite.com/somerepo.git;user=anyUser:idtoken=1234" : ('https', 'somesite.com', '/somerepo.git', '', '', {'user': 'anyUser:idtoken=1234'}),
+ r'git://s.o-me_ONE:!#$%^&*()-_={}[]\|:?,.<>~`@git.openembedded.org/bitbake;branch=main;protocol=https': ('git', 'git.openembedded.org', '/bitbake', 's.o-me_ONE', r'!#$%^&*()-_={}[]\|:?,.<>~`', {'branch': 'main', 'protocol' : 'https'}),
}
# we require a pathname to encodeurl but users can still pass such urls to
# decodeurl and we need to handle them
decodedata = datatable.copy()
decodedata.update({
"http://somesite.net;someparam=1": ('http', 'somesite.net', '/', '', '', {'someparam': '1'}),
+ "npmsw://some.registry.url;package=@pkg;version=latest": ('npmsw', 'some.registry.url', '/', '', '', {'package': '@pkg', 'version': 'latest'}),
})
def test_decodeurl(self):
@@ -1221,37 +1405,39 @@ class FetchLatestVersionTest(FetcherTest):
test_git_uris = {
# version pattern "X.Y.Z"
- ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4", "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
+ ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4;protocol=https", "9b1db6b8060bd00b121a692f942404a24ae2960f", "", "")
: "1.99.4",
# version pattern "vX.Y"
# mirror of git.infradead.org since network issues interfered with testing
- ("mtd-utils", "git://git.yoctoproject.org/mtd-utils.git", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "")
+ ("mtd-utils", "git://git.yoctoproject.org/mtd-utils.git;branch=master;protocol=https", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "", "")
: "1.5.0",
# version pattern "pkg_name-X.Y"
# mirror of git://anongit.freedesktop.org/git/xorg/proto/presentproto since network issues interfered with testing
- ("presentproto", "git://git.yoctoproject.org/bbfetchtests-presentproto", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "")
+ ("presentproto", "git://git.yoctoproject.org/bbfetchtests-presentproto;branch=master;protocol=https", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "", "")
: "1.0",
# version pattern "pkg_name-vX.Y.Z"
- ("dtc", "git://git.qemu.org/dtc.git", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "")
+ ("dtc", "git://git.yoctoproject.org/bbfetchtests-dtc.git;branch=master;protocol=https", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "", "")
: "1.4.0",
# combination version pattern
- ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https", "cd44ee6644c3641507fb53b8a2a69137f2971219", "")
+ ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https;branch=master", "cd44ee6644c3641507fb53b8a2a69137f2971219", "", "")
: "1.2.0",
- ("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "")
+ ("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "", "")
: "2014.01",
# version pattern "yyyymmdd"
- ("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https", "4ed19e11c2975105b71b956440acdb25d46a347d", "")
+ ("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https;branch=master", "4ed19e11c2975105b71b956440acdb25d46a347d", "", "")
: "20120614",
# packages with a valid UPSTREAM_CHECK_GITTAGREGEX
# mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing
- ("xf86-video-omap", "git://git.yoctoproject.org/bbfetchtests-xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", "(?P<pver>(\d+\.(\d\.?)*))")
+ ("xf86-video-omap", "git://git.yoctoproject.org/bbfetchtests-xf86-video-omap;branch=master;protocol=https", "ae0394e687f1a77e966cf72f895da91840dffb8f", r"(?P<pver>(\d+\.(\d\.?)*))", "")
: "0.4.3",
- ("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", "(?P<pver>(([0-9][\.|_]?)+[0-9]))")
+ ("build-appliance-image", "git://git.yoctoproject.org/poky;branch=master;protocol=https", "b37dd451a52622d5b570183a81583cc34c2ff555", r"(?P<pver>(([0-9][\.|_]?)+[0-9]))", "")
: "11.0.0",
- ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot", "cd437ecbd8986c894442f8fce1e0061e20f04dee", "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
+ ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot;protocol=https", "cd437ecbd8986c894442f8fce1e0061e20f04dee", r"chkconfig\-(?P<pver>((\d+[\.\-_]*)+))", "")
: "1.3.59",
- ("remake", "git://github.com/rocky/remake.git", "f05508e521987c8494c92d9c2871aec46307d51d", "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
+ ("remake", "git://github.com/rocky/remake.git;protocol=https;branch=master", "f05508e521987c8494c92d9c2871aec46307d51d", r"(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))", "")
: "3.82+dbg0.9",
+ ("sysdig", "git://github.com/draios/sysdig.git;branch=dev;protocol=https", "4fb6288275f567f63515df0ff0a6518043ecfa9b", r"^(?P<pver>\d+(\.\d+)+)", "10.0.0")
+ : "0.28.0",
}
test_wget_uris = {
@@ -1267,13 +1453,16 @@ class FetchLatestVersionTest(FetcherTest):
# http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz
("cmake", "/files/v2.8/cmake-2.8.12.1.tar.gz", "", "")
: "2.8.12.1",
+ # https://download.gnome.org/sources/libxml2/2.9/libxml2-2.9.14.tar.xz
+ ("libxml2", "/software/libxml2/2.9/libxml2-2.9.14.tar.xz", "", "")
+ : "2.10.3",
#
# packages with versions only in current directory
#
- # http://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2
+ # https://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2
("eglic", "/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2", "", "")
: "2.19",
- # http://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2
+ # https://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2
("gnu-config", "/releases/gnu-config/gnu-config-20120814.tar.bz2", "", "")
: "20120814",
#
@@ -1290,12 +1479,18 @@ class FetchLatestVersionTest(FetcherTest):
#
# http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2
# https://github.com/apple/cups/releases
- ("cups", "/software/1.7.2/cups-1.7.2-source.tar.bz2", "/apple/cups/releases", "(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz")
+ ("cups", "/software/1.7.2/cups-1.7.2-source.tar.bz2", "/apple/cups/releases", r"(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz")
: "2.0.0",
# http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz
# http://ftp.debian.org/debian/pool/main/d/db5.3/
- ("db", "/berkeley-db/db-5.3.21.tar.gz", "/debian/pool/main/d/db5.3/", "(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz")
+ ("db", "/berkeley-db/db-5.3.21.tar.gz", "/debian/pool/main/d/db5.3/", r"(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz")
: "5.3.10",
+ #
+ # packages where the tarball compression changed in the new version
+ #
+ # http://ftp.debian.org/debian/pool/main/m/minicom/minicom_2.7.1.orig.tar.gz
+ ("minicom", "/debian/pool/main/m/minicom/minicom_2.7.1.orig.tar.gz", "", "")
+ : "2.8",
}
@skipIfNoNetwork()
@@ -1310,6 +1505,9 @@ class FetchLatestVersionTest(FetcherTest):
self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0])
r = bb.utils.vercmp_string(v, verstring)
self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
+ if k[4]:
+ r = bb.utils.vercmp_string(verstring, k[4])
+ self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], verstring, k[4]))
def test_wget_latest_versionstring(self):
testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata"
@@ -1336,17 +1534,14 @@ class FetchLatestVersionTest(FetcherTest):
class FetchCheckStatusTest(FetcherTest):
- test_wget_uris = ["http://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz",
- "http://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz",
- "http://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz",
+ test_wget_uris = ["https://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz",
+ "https://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz",
+ "https://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz",
"https://yoctoproject.org/",
"https://docs.yoctoproject.org",
- "http://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz",
- "http://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz",
+ "https://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz",
+ "https://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz",
"ftp://sourceware.org/pub/libffi/libffi-1.20.tar.gz",
- "http://ftp.gnu.org/gnu/autoconf/autoconf-2.60.tar.gz",
- "https://ftp.gnu.org/gnu/chess/gnuchess-5.08.tar.gz",
- "https://ftp.gnu.org/gnu/gmp/gmp-4.0.tar.gz",
# GitHub releases are hosted on Amazon S3, which doesn't support HEAD
"https://github.com/kergoth/tslib/releases/download/1.1/tslib-1.1.tar.xz"
]
@@ -1384,9 +1579,7 @@ class GitMakeShallowTest(FetcherTest):
FetcherTest.setUp(self)
self.gitdir = os.path.join(self.tempdir, 'gitshallow')
bb.utils.mkdirhier(self.gitdir)
- bb.process.run('git init', cwd=self.gitdir)
- bb.process.run('git config user.email "you@example.com"', cwd=self.gitdir)
- bb.process.run('git config user.name "Your Name"', cwd=self.gitdir)
+ self.git_init()
def assertRefs(self, expected_refs):
actual_refs = self.git(['for-each-ref', '--format=%(refname)']).splitlines()
@@ -1400,13 +1593,6 @@ class GitMakeShallowTest(FetcherTest):
actual_count = len(revs.splitlines())
self.assertEqual(expected_count, actual_count, msg='Object count `%d` is not the expected `%d`' % (actual_count, expected_count))
- def git(self, cmd):
- if isinstance(cmd, str):
- cmd = 'git ' + cmd
- else:
- cmd = ['git'] + cmd
- return bb.process.run(cmd, cwd=self.gitdir)[0]
-
def make_shallow(self, args=None):
if args is None:
args = ['HEAD']
@@ -1509,15 +1695,13 @@ class GitShallowTest(FetcherTest):
self.srcdir = os.path.join(self.tempdir, 'gitsource')
bb.utils.mkdirhier(self.srcdir)
- self.git('init', cwd=self.srcdir)
- self.git('config user.email "you@example.com"', cwd=self.srcdir)
- self.git('config user.name "Your Name"', cwd=self.srcdir)
+ self.git_init(cwd=self.srcdir)
self.d.setVar('WORKDIR', self.tempdir)
self.d.setVar('S', self.gitdir)
self.d.delVar('PREMIRRORS')
self.d.delVar('MIRRORS')
- uri = 'git://%s;protocol=file;subdir=${S}' % self.srcdir
+ uri = 'git://%s;protocol=file;subdir=${S};branch=master' % self.srcdir
self.d.setVar('SRC_URI', uri)
self.d.setVar('SRCREV', '${AUTOREV}')
self.d.setVar('AUTOREV', '${@bb.fetch2.get_autorev(d)}')
@@ -1525,6 +1709,7 @@ class GitShallowTest(FetcherTest):
self.d.setVar('BB_GIT_SHALLOW', '1')
self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '0')
self.d.setVar('BB_GENERATE_SHALLOW_TARBALLS', '1')
+ self.d.setVar("__BBSRCREV_SEEN", "1")
def assertRefs(self, expected_refs, cwd=None):
if cwd is None:
@@ -1542,15 +1727,6 @@ class GitShallowTest(FetcherTest):
actual_count = len(revs.splitlines())
self.assertEqual(expected_count, actual_count, msg='Object count `%d` is not the expected `%d`' % (actual_count, expected_count))
- def git(self, cmd, cwd=None):
- if isinstance(cmd, str):
- cmd = 'git ' + cmd
- else:
- cmd = ['git'] + cmd
- if cwd is None:
- cwd = self.gitdir
- return bb.process.run(cmd, cwd=cwd)[0]
-
def add_empty_file(self, path, cwd=None, msg=None):
if msg is None:
msg = path
@@ -1745,9 +1921,7 @@ class GitShallowTest(FetcherTest):
smdir = os.path.join(self.tempdir, 'gitsubmodule')
bb.utils.mkdirhier(smdir)
- self.git('init', cwd=smdir)
- self.git('config user.email "you@example.com"', cwd=smdir)
- self.git('config user.name "Your Name"', cwd=smdir)
+ self.git_init(cwd=smdir)
# Make this look like it was cloned from a remote...
self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir)
self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir)
@@ -1755,11 +1929,11 @@ class GitShallowTest(FetcherTest):
self.add_empty_file('bsub', cwd=smdir)
self.git('submodule init', cwd=self.srcdir)
- self.git('submodule add file://%s' % smdir, cwd=self.srcdir)
+ self.git('-c protocol.file.allow=always submodule add file://%s' % smdir, cwd=self.srcdir)
self.git('submodule update', cwd=self.srcdir)
self.git('commit -m submodule -a', cwd=self.srcdir)
- uri = 'gitsm://%s;protocol=file;subdir=${S}' % self.srcdir
+ uri = 'gitsm://%s;protocol=file;subdir=${S};branch=master' % self.srcdir
fetcher, ud = self.fetch_shallow(uri)
# Verify the main repository is shallow
@@ -1777,9 +1951,7 @@ class GitShallowTest(FetcherTest):
smdir = os.path.join(self.tempdir, 'gitsubmodule')
bb.utils.mkdirhier(smdir)
- self.git('init', cwd=smdir)
- self.git('config user.email "you@example.com"', cwd=smdir)
- self.git('config user.name "Your Name"', cwd=smdir)
+ self.git_init(cwd=smdir)
# Make this look like it was cloned from a remote...
self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir)
self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir)
@@ -1787,7 +1959,7 @@ class GitShallowTest(FetcherTest):
self.add_empty_file('bsub', cwd=smdir)
self.git('submodule init', cwd=self.srcdir)
- self.git('submodule add file://%s' % smdir, cwd=self.srcdir)
+ self.git('-c protocol.file.allow=always submodule add file://%s' % smdir, cwd=self.srcdir)
self.git('submodule update', cwd=self.srcdir)
self.git('commit -m submodule -a', cwd=self.srcdir)
@@ -1799,7 +1971,7 @@ class GitShallowTest(FetcherTest):
# Set up the mirror
mirrordir = os.path.join(self.tempdir, 'mirror')
bb.utils.rename(self.dldir, mirrordir)
- self.d.setVar('PREMIRRORS', 'gitsm://.*/.* file://%s/\n' % mirrordir)
+ self.d.setVar('PREMIRRORS', 'gitsm://.*/.* file://%s/' % mirrordir)
# Fetch from the mirror
bb.utils.remove(self.dldir, recurse=True)
@@ -1825,7 +1997,7 @@ class GitShallowTest(FetcherTest):
self.git('commit --author "Foo Bar <foo@bar>" -m annex-c -a', cwd=self.srcdir)
bb.process.run('chmod u+w -R %s' % self.srcdir)
- uri = 'gitannex://%s;protocol=file;subdir=${S}' % self.srcdir
+ uri = 'gitannex://%s;protocol=file;subdir=${S};branch=master' % self.srcdir
fetcher, ud = self.fetch_shallow(uri)
self.assertRevCount(1)
@@ -1914,7 +2086,7 @@ class GitShallowTest(FetcherTest):
# Set up the mirror
mirrordir = os.path.join(self.tempdir, 'mirror')
bb.utils.mkdirhier(mirrordir)
- self.d.setVar('PREMIRRORS', 'git://.*/.* file://%s/\n' % mirrordir)
+ self.d.setVar('PREMIRRORS', 'git://.*/.* file://%s/' % mirrordir)
bb.utils.rename(os.path.join(self.dldir, mirrortarball),
os.path.join(mirrordir, mirrortarball))
@@ -2037,7 +2209,7 @@ class GitShallowTest(FetcherTest):
@skipIfNoNetwork()
def test_bitbake(self):
- self.git('remote add --mirror=fetch origin git://github.com/openembedded/bitbake', cwd=self.srcdir)
+ self.git('remote add --mirror=fetch origin https://github.com/openembedded/bitbake', cwd=self.srcdir)
self.git('config core.bare true', cwd=self.srcdir)
self.git('fetch', cwd=self.srcdir)
@@ -2072,7 +2244,7 @@ class GitShallowTest(FetcherTest):
self.d.setVar('SRCREV', 'e5939ff608b95cdd4d0ab0e1935781ab9a276ac0')
self.d.setVar('BB_GIT_SHALLOW', '1')
self.d.setVar('BB_GENERATE_SHALLOW_TARBALLS', '1')
- fetcher = bb.fetch.Fetch(["git://git.yoctoproject.org/fstests"], self.d)
+ fetcher = bb.fetch.Fetch(["git://git.yoctoproject.org/fstests;branch=master;protocol=https"], self.d)
fetcher.download()
bb.utils.remove(self.dldir + "/*.tar.gz")
@@ -2082,12 +2254,18 @@ class GitShallowTest(FetcherTest):
self.assertIn("fstests.doap", dir)
class GitLfsTest(FetcherTest):
+ def skipIfNoGitLFS():
+ import shutil
+ if not shutil.which('git-lfs'):
+ return unittest.skip('git-lfs not installed')
+ return lambda f: f
+
def setUp(self):
FetcherTest.setUp(self)
self.gitdir = os.path.join(self.tempdir, 'git')
self.srcdir = os.path.join(self.tempdir, 'gitsource')
-
+
self.d.setVar('WORKDIR', self.tempdir)
self.d.setVar('S', self.gitdir)
self.d.delVar('PREMIRRORS')
@@ -2095,24 +2273,18 @@ class GitLfsTest(FetcherTest):
self.d.setVar('SRCREV', '${AUTOREV}')
self.d.setVar('AUTOREV', '${@bb.fetch2.get_autorev(d)}')
+ self.d.setVar("__BBSRCREV_SEEN", "1")
bb.utils.mkdirhier(self.srcdir)
- self.git('init', cwd=self.srcdir)
- self.git('config user.email "you@example.com"', cwd=self.srcdir)
- self.git('config user.name "Your Name"', cwd=self.srcdir)
- with open(os.path.join(self.srcdir, '.gitattributes'), 'wt') as attrs:
- attrs.write('*.mp3 filter=lfs -text')
- self.git(['add', '.gitattributes'], cwd=self.srcdir)
- self.git(['commit', '-m', "attributes", '.gitattributes'], cwd=self.srcdir)
+ self.git_init(cwd=self.srcdir)
+ self.commit_file('.gitattributes', '*.mp3 filter=lfs -text')
- def git(self, cmd, cwd=None):
- if isinstance(cmd, str):
- cmd = 'git ' + cmd
- else:
- cmd = ['git'] + cmd
- if cwd is None:
- cwd = self.gitdir
- return bb.process.run(cmd, cwd=cwd)[0]
+ def commit_file(self, filename, content):
+ with open(os.path.join(self.srcdir, filename), "w") as f:
+ f.write(content)
+ self.git(["add", filename], cwd=self.srcdir)
+ self.git(["commit", "-m", "Change"], cwd=self.srcdir)
+ return self.git(["rev-parse", "HEAD"], cwd=self.srcdir).strip()
def fetch(self, uri=None, download=True):
uris = self.d.getVar('SRC_URI').split()
@@ -2125,65 +2297,158 @@ class GitLfsTest(FetcherTest):
ud = fetcher.ud[uri]
return fetcher, ud
+ def get_real_git_lfs_file(self):
+ self.d.setVar('PATH', os.environ.get('PATH'))
+ fetcher, ud = self.fetch()
+ fetcher.unpack(self.d.getVar('WORKDIR'))
+ unpacked_lfs_file = os.path.join(self.d.getVar('WORKDIR'), 'git', "Cat_poster_1.jpg")
+ return unpacked_lfs_file
+
+ @skipIfNoGitLFS()
+ def test_fetch_lfs_on_srcrev_change(self):
+ """Test if fetch downloads missing LFS objects when a different revision within an existing repository is requested"""
+ self.git(["lfs", "install", "--local"], cwd=self.srcdir)
+
+ @contextlib.contextmanager
+ def hide_upstream_repository():
+ """Hide the upstream repository to make sure that git lfs cannot pull from it"""
+ temp_name = self.srcdir + ".bak"
+ os.rename(self.srcdir, temp_name)
+ try:
+ yield
+ finally:
+ os.rename(temp_name, self.srcdir)
+
+ def fetch_and_verify(revision, filename, content):
+ self.d.setVar('SRCREV', revision)
+ fetcher, ud = self.fetch()
+
+ with hide_upstream_repository():
+ workdir = self.d.getVar('WORKDIR')
+ fetcher.unpack(workdir)
+
+ with open(os.path.join(workdir, "git", filename)) as f:
+ self.assertEqual(f.read(), content)
+
+ commit_1 = self.commit_file("a.mp3", "version 1")
+ commit_2 = self.commit_file("a.mp3", "version 2")
+
+ self.d.setVar('SRC_URI', "git://%s;protocol=file;lfs=1;branch=master" % self.srcdir)
+
+ # Seed the local download folder by fetching the latest commit and verifying that the LFS contents are
+ # available even when the upstream repository disappears.
+ fetch_and_verify(commit_2, "a.mp3", "version 2")
+ # Verify that even when an older revision is fetched, the needed LFS objects are fetched into the download
+ # folder.
+ fetch_and_verify(commit_1, "a.mp3", "version 1")
+
+ @skipIfNoGitLFS()
+ @skipIfNoNetwork()
+ def test_real_git_lfs_repo_succeeds_without_lfs_param(self):
+ self.d.setVar('SRC_URI', "git://gitlab.com/gitlab-examples/lfs.git;protocol=https;branch=master")
+ f = self.get_real_git_lfs_file()
+ self.assertTrue(os.path.exists(f))
+ self.assertEqual("c0baab607a97839c9a328b4310713307", bb.utils.md5_file(f))
+
+ @skipIfNoGitLFS()
+ @skipIfNoNetwork()
+ def test_real_git_lfs_repo_succeeds(self):
+ self.d.setVar('SRC_URI', "git://gitlab.com/gitlab-examples/lfs.git;protocol=https;branch=master;lfs=1")
+ f = self.get_real_git_lfs_file()
+ self.assertTrue(os.path.exists(f))
+ self.assertEqual("c0baab607a97839c9a328b4310713307", bb.utils.md5_file(f))
+
+ @skipIfNoGitLFS()
+ @skipIfNoNetwork()
+ def test_real_git_lfs_repo_skips(self):
+ self.d.setVar('SRC_URI', "git://gitlab.com/gitlab-examples/lfs.git;protocol=https;branch=master;lfs=0")
+ f = self.get_real_git_lfs_file()
+ # This is the actual non-smudged placeholder file on the repo if git-lfs does not run
+ lfs_file = (
+ 'version https://git-lfs.github.com/spec/v1\n'
+ 'oid sha256:34be66b1a39a1955b46a12588df9d5f6fc1da790e05cf01f3c7422f4bbbdc26b\n'
+ 'size 11423554\n'
+ )
+
+ with open(f) as fh:
+ self.assertEqual(lfs_file, fh.read())
+
+ @skipIfNoGitLFS()
def test_lfs_enabled(self):
import shutil
- uri = 'git://%s;protocol=file;subdir=${S};lfs=1' % self.srcdir
+ uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir
self.d.setVar('SRC_URI', uri)
- # Careful: suppress initial attempt at downloading until
- # we know whether git-lfs is installed.
- fetcher, ud = self.fetch(uri=None, download=False)
- self.assertIsNotNone(ud.method._find_git_lfs)
-
- # If git-lfs can be found, the unpack should be successful. Only
- # attempt this with the real live copy of git-lfs installed.
- if ud.method._find_git_lfs(self.d):
- fetcher.download()
- shutil.rmtree(self.gitdir, ignore_errors=True)
- fetcher.unpack(self.d.getVar('WORKDIR'))
-
- # If git-lfs cannot be found, the unpack should throw an error
- with self.assertRaises(bb.fetch2.FetchError):
- fetcher.download()
- ud.method._find_git_lfs = lambda d: False
- shutil.rmtree(self.gitdir, ignore_errors=True)
- fetcher.unpack(self.d.getVar('WORKDIR'))
+ # With git-lfs installed, test that we can fetch and unpack
+ fetcher, ud = self.fetch()
+ shutil.rmtree(self.gitdir, ignore_errors=True)
+ fetcher.unpack(self.d.getVar('WORKDIR'))
+ @skipIfNoGitLFS()
def test_lfs_disabled(self):
import shutil
- uri = 'git://%s;protocol=file;subdir=${S};lfs=0' % self.srcdir
+ uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir
self.d.setVar('SRC_URI', uri)
- # In contrast to test_lfs_enabled(), allow the implicit download
- # done by self.fetch() to occur here. The point of this test case
- # is to verify that the fetcher can survive even if the source
+ # Verify that the fetcher can survive even if the source
# repository has Git LFS usage configured.
fetcher, ud = self.fetch()
- self.assertIsNotNone(ud.method._find_git_lfs)
-
- # If git-lfs can be found, the unpack should be successful. A
- # live copy of git-lfs is not required for this case, so
- # unconditionally forge its presence.
- ud.method._find_git_lfs = lambda d: True
- shutil.rmtree(self.gitdir, ignore_errors=True)
fetcher.unpack(self.d.getVar('WORKDIR'))
- # If git-lfs cannot be found, the unpack should be successful
- ud.method._find_git_lfs = lambda d: False
- shutil.rmtree(self.gitdir, ignore_errors=True)
- fetcher.unpack(self.d.getVar('WORKDIR'))
+ def test_lfs_enabled_not_installed(self):
+ import shutil
+
+ uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir
+ self.d.setVar('SRC_URI', uri)
+
+ # Careful: suppress initial attempt at downloading
+ fetcher, ud = self.fetch(uri=None, download=False)
+
+ # Artificially assert that git-lfs is not installed, so
+ # we can verify a failure to unpack in it's absence.
+ old_find_git_lfs = ud.method._find_git_lfs
+ try:
+ # If git-lfs cannot be found, the unpack should throw an error
+ with self.assertRaises(bb.fetch2.FetchError):
+ fetcher.download()
+ ud.method._find_git_lfs = lambda d: False
+ shutil.rmtree(self.gitdir, ignore_errors=True)
+ fetcher.unpack(self.d.getVar('WORKDIR'))
+ finally:
+ ud.method._find_git_lfs = old_find_git_lfs
+
+ def test_lfs_disabled_not_installed(self):
+ import shutil
+
+ uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir
+ self.d.setVar('SRC_URI', uri)
+
+ # Careful: suppress initial attempt at downloading
+ fetcher, ud = self.fetch(uri=None, download=False)
+
+ # Artificially assert that git-lfs is not installed, so
+ # we can verify a failure to unpack in it's absence.
+ old_find_git_lfs = ud.method._find_git_lfs
+ try:
+ # Even if git-lfs cannot be found, the unpack should be successful
+ fetcher.download()
+ ud.method._find_git_lfs = lambda d: False
+ shutil.rmtree(self.gitdir, ignore_errors=True)
+ fetcher.unpack(self.d.getVar('WORKDIR'))
+ finally:
+ ud.method._find_git_lfs = old_find_git_lfs
class GitURLWithSpacesTest(FetcherTest):
test_git_urls = {
- "git://tfs-example.org:22/tfs/example%20path/example.git" : {
- 'url': 'git://tfs-example.org:22/tfs/example%20path/example.git',
+ "git://tfs-example.org:22/tfs/example%20path/example.git;branch=master" : {
+ 'url': 'git://tfs-example.org:22/tfs/example%20path/example.git;branch=master',
'gitsrcname': 'tfs-example.org.22.tfs.example_path.example.git',
'path': '/tfs/example path/example.git'
},
- "git://tfs-example.org:22/tfs/example%20path/example%20repo.git" : {
- 'url': 'git://tfs-example.org:22/tfs/example%20path/example%20repo.git',
+ "git://tfs-example.org:22/tfs/example%20path/example%20repo.git;branch=master" : {
+ 'url': 'git://tfs-example.org:22/tfs/example%20path/example%20repo.git;branch=master',
'gitsrcname': 'tfs-example.org.22.tfs.example_path.example_repo.git',
'path': '/tfs/example path/example repo.git'
}
@@ -2207,11 +2472,129 @@ class GitURLWithSpacesTest(FetcherTest):
self.assertEqual(ud.clonedir, os.path.join(self.dldir, "git2", ref['gitsrcname']))
self.assertEqual(ud.fullmirror, os.path.join(self.dldir, "git2_" + ref['gitsrcname'] + '.tar.gz'))
+class CrateTest(FetcherTest):
+ @skipIfNoNetwork()
+ def test_crate_url(self):
+
+ uri = "crate://crates.io/glob/0.2.11"
+ self.d.setVar('SRC_URI', uri)
+
+ uris = self.d.getVar('SRC_URI').split()
+ d = self.d
+
+ fetcher = bb.fetch2.Fetch(uris, self.d)
+ ud = fetcher.ud[fetcher.urls[0]]
+
+ self.assertIn("name", ud.parm)
+ self.assertEqual(ud.parm["name"], "glob-0.2.11")
+ self.assertIn("downloadfilename", ud.parm)
+ self.assertEqual(ud.parm["downloadfilename"], "glob-0.2.11.crate")
+
+ fetcher.download()
+ fetcher.unpack(self.tempdir)
+ self.assertEqual(sorted(os.listdir(self.tempdir)), ['cargo_home', 'download' , 'unpacked'])
+ self.assertEqual(sorted(os.listdir(self.tempdir + "/download")), ['glob-0.2.11.crate', 'glob-0.2.11.crate.done'])
+ self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/.cargo-checksum.json"))
+ self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/src/lib.rs"))
+
+ @skipIfNoNetwork()
+ def test_crate_url_matching_recipe(self):
+
+ self.d.setVar('BP', 'glob-0.2.11')
+
+ uri = "crate://crates.io/glob/0.2.11"
+ self.d.setVar('SRC_URI', uri)
+
+ uris = self.d.getVar('SRC_URI').split()
+ d = self.d
+
+ fetcher = bb.fetch2.Fetch(uris, self.d)
+ ud = fetcher.ud[fetcher.urls[0]]
+
+ self.assertIn("name", ud.parm)
+ self.assertEqual(ud.parm["name"], "glob-0.2.11")
+ self.assertIn("downloadfilename", ud.parm)
+ self.assertEqual(ud.parm["downloadfilename"], "glob-0.2.11.crate")
+
+ fetcher.download()
+ fetcher.unpack(self.tempdir)
+ self.assertEqual(sorted(os.listdir(self.tempdir)), ['download', 'glob-0.2.11', 'unpacked'])
+ self.assertEqual(sorted(os.listdir(self.tempdir + "/download")), ['glob-0.2.11.crate', 'glob-0.2.11.crate.done'])
+ self.assertTrue(os.path.exists(self.tempdir + "/glob-0.2.11/src/lib.rs"))
+
+ @skipIfNoNetwork()
+ def test_crate_url_params(self):
+
+ uri = "crate://crates.io/aho-corasick/0.7.20;name=aho-corasick-renamed"
+ self.d.setVar('SRC_URI', uri)
+
+ uris = self.d.getVar('SRC_URI').split()
+ d = self.d
+
+ fetcher = bb.fetch2.Fetch(uris, self.d)
+ ud = fetcher.ud[fetcher.urls[0]]
+
+ self.assertIn("name", ud.parm)
+ self.assertEqual(ud.parm["name"], "aho-corasick-renamed")
+ self.assertIn("downloadfilename", ud.parm)
+ self.assertEqual(ud.parm["downloadfilename"], "aho-corasick-0.7.20.crate")
+
+ fetcher.download()
+ fetcher.unpack(self.tempdir)
+ self.assertEqual(sorted(os.listdir(self.tempdir)), ['cargo_home', 'download' , 'unpacked'])
+ self.assertEqual(sorted(os.listdir(self.tempdir + "/download")), ['aho-corasick-0.7.20.crate', 'aho-corasick-0.7.20.crate.done'])
+ self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/aho-corasick-0.7.20/.cargo-checksum.json"))
+ self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/aho-corasick-0.7.20/src/lib.rs"))
+
+ @skipIfNoNetwork()
+ def test_crate_url_multi(self):
+
+ uri = "crate://crates.io/glob/0.2.11 crate://crates.io/time/0.1.35"
+ self.d.setVar('SRC_URI', uri)
+
+ uris = self.d.getVar('SRC_URI').split()
+ d = self.d
+
+ fetcher = bb.fetch2.Fetch(uris, self.d)
+ ud = fetcher.ud[fetcher.urls[0]]
+
+ self.assertIn("name", ud.parm)
+ self.assertEqual(ud.parm["name"], "glob-0.2.11")
+ self.assertIn("downloadfilename", ud.parm)
+ self.assertEqual(ud.parm["downloadfilename"], "glob-0.2.11.crate")
+
+ ud = fetcher.ud[fetcher.urls[1]]
+ self.assertIn("name", ud.parm)
+ self.assertEqual(ud.parm["name"], "time-0.1.35")
+ self.assertIn("downloadfilename", ud.parm)
+ self.assertEqual(ud.parm["downloadfilename"], "time-0.1.35.crate")
+
+ fetcher.download()
+ fetcher.unpack(self.tempdir)
+ self.assertEqual(sorted(os.listdir(self.tempdir)), ['cargo_home', 'download' , 'unpacked'])
+ self.assertEqual(sorted(os.listdir(self.tempdir + "/download")), ['glob-0.2.11.crate', 'glob-0.2.11.crate.done', 'time-0.1.35.crate', 'time-0.1.35.crate.done'])
+ self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/.cargo-checksum.json"))
+ self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/src/lib.rs"))
+ self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/time-0.1.35/.cargo-checksum.json"))
+ self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/time-0.1.35/src/lib.rs"))
+
+ @skipIfNoNetwork()
+ def test_crate_incorrect_cksum(self):
+ uri = "crate://crates.io/aho-corasick/0.7.20"
+ self.d.setVar('SRC_URI', uri)
+ self.d.setVarFlag("SRC_URI", "aho-corasick-0.7.20.sha256sum", hashlib.sha256("Invalid".encode("utf-8")).hexdigest())
+
+ uris = self.d.getVar('SRC_URI').split()
+
+ fetcher = bb.fetch2.Fetch(uris, self.d)
+ with self.assertRaisesRegex(bb.fetch2.FetchError, "Fetcher failure for URL"):
+ fetcher.download()
+
class NPMTest(FetcherTest):
def skipIfNoNpm():
import shutil
if not shutil.which('npm'):
- return unittest.skip('npm not installed, tests being skipped')
+ return unittest.skip('npm not installed')
return lambda f: f
@skipIfNoNpm()
@@ -2256,11 +2639,42 @@ class NPMTest(FetcherTest):
ud = fetcher.ud[fetcher.urls[0]]
fetcher.download()
self.assertTrue(os.path.exists(ud.localpath))
+
+ # Setup the mirror by renaming the download directory
+ mirrordir = os.path.join(self.tempdir, 'mirror')
+ bb.utils.rename(self.dldir, mirrordir)
+ os.mkdir(self.dldir)
+
+ # Configure the premirror to be used
+ self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s/npm2' % mirrordir)
+ self.d.setVar('BB_FETCH_PREMIRRORONLY', '1')
+
+ # Fetch again
+ self.assertFalse(os.path.exists(ud.localpath))
+ # The npm fetcher doesn't handle that the .resolved file disappears
+ # while the fetcher object exists, which it does when we rename the
+ # download directory to "mirror" above. Thus we need a new fetcher to go
+ # with the now empty download directory.
+ fetcher = bb.fetch.Fetch([url], self.d)
+ ud = fetcher.ud[fetcher.urls[0]]
+ fetcher.download()
+ self.assertTrue(os.path.exists(ud.localpath))
+
+ @skipIfNoNpm()
+ @skipIfNoNetwork()
+ def test_npm_premirrors_with_specified_filename(self):
+ url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0'
+ # Fetch once to get a tarball
+ fetcher = bb.fetch.Fetch([url], self.d)
+ ud = fetcher.ud[fetcher.urls[0]]
+ fetcher.download()
+ self.assertTrue(os.path.exists(ud.localpath))
# Setup the mirror
mirrordir = os.path.join(self.tempdir, 'mirror')
bb.utils.mkdirhier(mirrordir)
- os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath)))
- self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s/\n' % mirrordir)
+ mirrorfilename = os.path.join(mirrordir, os.path.basename(ud.localpath))
+ os.replace(ud.localpath, mirrorfilename)
+ self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s' % mirrorfilename)
self.d.setVar('BB_FETCH_PREMIRRORONLY', '1')
# Fetch again
self.assertFalse(os.path.exists(ud.localpath))
@@ -2280,7 +2694,7 @@ class NPMTest(FetcherTest):
mirrordir = os.path.join(self.tempdir, 'mirror')
bb.utils.mkdirhier(mirrordir)
os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath)))
- self.d.setVar('MIRRORS', 'https?$://.*/.* file://%s/\n' % mirrordir)
+ self.d.setVar('MIRRORS', 'https?$://.*/.* file://%s/' % mirrordir)
# Update the resolved url to an invalid url
with open(ud.resolvefile, 'r') as f:
url = f.read()
@@ -2299,7 +2713,7 @@ class NPMTest(FetcherTest):
url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0;destsuffix=foo/bar;downloadfilename=foo-bar.tgz'
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
- self.assertTrue(os.path.exists(os.path.join(self.dldir, 'foo-bar.tgz')))
+ self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'foo-bar.tgz')))
fetcher.unpack(self.unpackdir)
unpackdir = os.path.join(self.unpackdir, 'foo', 'bar')
self.assertTrue(os.path.exists(os.path.join(unpackdir, 'package.json')))
@@ -2329,7 +2743,7 @@ class NPMTest(FetcherTest):
@skipIfNoNpm()
@skipIfNoNetwork()
def test_npm_registry_alternate(self):
- url = 'npm://registry.freajs.org;package=@savoirfairelinux/node-server-example;version=1.0.0'
+ url = 'npm://skimdb.npmjs.com;package=@savoirfairelinux/node-server-example;version=1.0.0'
fetcher = bb.fetch.Fetch([url], self.d)
fetcher.download()
fetcher.unpack(self.unpackdir)
@@ -2439,6 +2853,63 @@ class NPMTest(FetcherTest):
@skipIfNoNpm()
@skipIfNoNetwork()
+ def test_npmsw_git(self):
+ swfile = self.create_shrinkwrap_file({
+ 'dependencies': {
+ 'cookie': {
+ 'version': 'github:jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09',
+ 'from': 'github:jshttp/cookie.git'
+ }
+ }
+ })
+ fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
+ fetcher.download()
+ self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git')))
+
+ swfile = self.create_shrinkwrap_file({
+ 'dependencies': {
+ 'cookie': {
+ 'version': 'jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09',
+ 'from': 'jshttp/cookie.git'
+ }
+ }
+ })
+ fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
+ fetcher.download()
+ self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git')))
+
+ swfile = self.create_shrinkwrap_file({
+ 'dependencies': {
+ 'nodejs': {
+ 'version': 'gitlab:gitlab-examples/nodejs.git#892a1f16725e56cc3a2cb0d677be42935c8fc262',
+ 'from': 'gitlab:gitlab-examples/nodejs'
+ }
+ }
+ })
+ fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
+ fetcher.download()
+ self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'gitlab.com.gitlab-examples.nodejs.git')))
+
+ @skipIfNoNpm()
+ def test_npmsw_local_link_sources(self):
+ # generated with npm-10 in:
+ # lib/bb/tests/fetch-testdata/npm-local-link-sources/inner/
+ testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata"
+ testdir = os.path.join(testdata, 'npm-local-link-sources')
+
+ self.sdir = os.path.join(self.unpackdir, testdata[1:], 'npm-local-link-sources', 'inner')
+ self.d.setVar('S', os.path.join(self.sdir))
+
+ fetcher = bb.fetch.Fetch(['file://%s/' % testdir, 'npmsw://%s/inner/npm-shrinkwrap-v3.json' % testdir], self.d)
+ fetcher.download()
+ fetcher.unpack(self.unpackdir)
+
+ swf = os.path.join(self.sdir, 'npm-shrinkwrap-v3.json')
+ pkgf = os.path.join(self.sdir, 'node_modules', '@npm-local-link-sources/upper', 'package.json')
+ self.assertTrue(os.path.exists(swf), msg="%s doesn't exist" % swf)
+ self.assertTrue(os.path.exists(pkgf), msg="%s doesn't exist" % pkgf)
+
+ @skipIfNoNetwork()
def test_npmsw_dev(self):
swfile = self.create_shrinkwrap_file({
'dependencies': {
@@ -2596,7 +3067,7 @@ class NPMTest(FetcherTest):
mirrordir = os.path.join(self.tempdir, 'mirror')
bb.utils.mkdirhier(mirrordir)
os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath)))
- self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s/\n' % mirrordir)
+ self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s/' % mirrordir)
self.d.setVar('BB_FETCH_PREMIRRORONLY', '1')
# Fetch again
self.assertFalse(os.path.exists(ud.localpath))
@@ -2625,7 +3096,7 @@ class NPMTest(FetcherTest):
mirrordir = os.path.join(self.tempdir, 'mirror')
bb.utils.mkdirhier(mirrordir)
os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath)))
- self.d.setVar('MIRRORS', 'https?$://.*/.* file://%s/\n' % mirrordir)
+ self.d.setVar('MIRRORS', 'https?$://.*/.* file://%s/' % mirrordir)
# Fetch again with invalid url
self.assertFalse(os.path.exists(ud.localpath))
swfile = self.create_shrinkwrap_file({
@@ -2644,8 +3115,9 @@ class NPMTest(FetcherTest):
class GitSharedTest(FetcherTest):
def setUp(self):
super(GitSharedTest, self).setUp()
- self.recipe_url = "git://git.openembedded.org/bitbake"
+ self.recipe_url = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
+ self.d.setVar("__BBSRCREV_SEEN", "1")
@skipIfNoNetwork()
def test_shared_unpack(self):
@@ -2666,3 +3138,246 @@ class GitSharedTest(FetcherTest):
fetcher.unpack(self.unpackdir)
alt = os.path.join(self.unpackdir, 'git/.git/objects/info/alternates')
self.assertFalse(os.path.exists(alt))
+
+
+class FetchPremirroronlyLocalTest(FetcherTest):
+
+ def setUp(self):
+ super(FetchPremirroronlyLocalTest, self).setUp()
+ self.mirrordir = os.path.join(self.tempdir, "mirrors")
+ os.mkdir(self.mirrordir)
+ self.reponame = "bitbake"
+ self.gitdir = os.path.join(self.tempdir, "git", self.reponame)
+ self.recipe_url = "git://git.fake.repo/bitbake;branch=master;protocol=https"
+ self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
+ self.d.setVar("BB_NO_NETWORK", "1")
+ self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
+ self.mirrorname = "git2_git.fake.repo.bitbake.tar.gz"
+ self.mirrorfile = os.path.join(self.mirrordir, self.mirrorname)
+ self.testfilename = "bitbake-fetch.test"
+
+ def make_git_repo(self):
+ recipeurl = "git:/git.fake.repo/bitbake"
+ os.makedirs(self.gitdir)
+ self.git_init(cwd=self.gitdir)
+ for i in range(0):
+ self.git_new_commit()
+ bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd = self.gitdir)
+
+ def git_new_commit(self):
+ import random
+ os.unlink(os.path.join(self.mirrordir, self.mirrorname))
+ branch = self.git("branch --show-current", self.gitdir).split()
+ with open(os.path.join(self.gitdir, self.testfilename), "w") as testfile:
+ testfile.write("File {} from branch {}; Useless random data {}".format(self.testfilename, branch, random.random()))
+ self.git("add {}".format(self.testfilename), self.gitdir)
+ self.git("commit -a -m \"This random commit {} in branch {}. I'm useless.\"".format(random.random(), branch), self.gitdir)
+ bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd = self.gitdir)
+ return self.git("rev-parse HEAD", self.gitdir).strip()
+
+ def git_new_branch(self, name):
+ self.git_new_commit()
+ head = self.git("rev-parse HEAD", self.gitdir).strip()
+ self.git("checkout -b {}".format(name), self.gitdir)
+ newrev = self.git_new_commit()
+ self.git("checkout {}".format(head), self.gitdir)
+ return newrev
+
+ def test_mirror_multiple_fetches(self):
+ self.make_git_repo()
+ self.d.setVar("SRCREV", self.git_new_commit())
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ fetcher.download()
+ fetcher.unpack(self.unpackdir)
+ ## New commit in premirror. it's not in the download_dir
+ self.d.setVar("SRCREV", self.git_new_commit())
+ fetcher2 = bb.fetch.Fetch([self.recipe_url], self.d)
+ fetcher2.download()
+ fetcher2.unpack(self.unpackdir)
+ ## New commit in premirror. it's not in the download_dir
+ self.d.setVar("SRCREV", self.git_new_commit())
+ fetcher3 = bb.fetch.Fetch([self.recipe_url], self.d)
+ fetcher3.download()
+ fetcher3.unpack(self.unpackdir)
+
+
+ def test_mirror_commit_nonexistent(self):
+ self.make_git_repo()
+ self.d.setVar("SRCREV", "0"*40)
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ with self.assertRaises(bb.fetch2.NetworkAccess):
+ fetcher.download()
+
+ def test_mirror_commit_exists(self):
+ self.make_git_repo()
+ self.d.setVar("SRCREV", self.git_new_commit())
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ fetcher.download()
+ fetcher.unpack(self.unpackdir)
+
+ def test_mirror_tarball_nonexistent(self):
+ self.d.setVar("SRCREV", "0"*40)
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ with self.assertRaises(bb.fetch2.NetworkAccess):
+ fetcher.download()
+
+ def test_mirror_tarball_multiple_branches(self):
+ """
+ test if PREMIRRORS can handle multiple name/branches correctly
+ both branches have required revisions
+ """
+ self.make_git_repo()
+ branch1rev = self.git_new_branch("testbranch1")
+ branch2rev = self.git_new_branch("testbranch2")
+ self.recipe_url = "git://git.fake.repo/bitbake;branch=testbranch1,testbranch2;protocol=https;name=branch1,branch2"
+ self.d.setVar("SRCREV_branch1", branch1rev)
+ self.d.setVar("SRCREV_branch2", branch2rev)
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ self.assertTrue(os.path.exists(self.mirrorfile), "Mirror file doesn't exist")
+ fetcher.download()
+ fetcher.unpack(os.path.join(self.tempdir, "unpacked"))
+ unpacked = os.path.join(self.tempdir, "unpacked", "git", self.testfilename)
+ self.assertTrue(os.path.exists(unpacked), "Repo has not been unpackaged properly!")
+ with open(unpacked, 'r') as f:
+ content = f.read()
+ ## We expect to see testbranch1 in the file, not master, not testbranch2
+ self.assertTrue(content.find("testbranch1") != -1, "Wrong branch has been checked out!")
+
+ def test_mirror_tarball_multiple_branches_nobranch(self):
+ """
+ test if PREMIRRORS can handle multiple name/branches correctly
+ Unbalanced name/branches raises ParameterError
+ """
+ self.make_git_repo()
+ branch1rev = self.git_new_branch("testbranch1")
+ branch2rev = self.git_new_branch("testbranch2")
+ self.recipe_url = "git://git.fake.repo/bitbake;branch=testbranch1;protocol=https;name=branch1,branch2"
+ self.d.setVar("SRCREV_branch1", branch1rev)
+ self.d.setVar("SRCREV_branch2", branch2rev)
+ with self.assertRaises(bb.fetch2.ParameterError):
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+
+ def test_mirror_tarball_multiple_branches_norev(self):
+ """
+ test if PREMIRRORS can handle multiple name/branches correctly
+ one of the branches specifies non existing SRCREV
+ """
+ self.make_git_repo()
+ branch1rev = self.git_new_branch("testbranch1")
+ branch2rev = self.git_new_branch("testbranch2")
+ self.recipe_url = "git://git.fake.repo/bitbake;branch=testbranch1,testbranch2;protocol=https;name=branch1,branch2"
+ self.d.setVar("SRCREV_branch1", branch1rev)
+ self.d.setVar("SRCREV_branch2", "0"*40)
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ self.assertTrue(os.path.exists(self.mirrorfile), "Mirror file doesn't exist")
+ with self.assertRaises(bb.fetch2.NetworkAccess):
+ fetcher.download()
+
+
+class FetchPremirroronlyNetworkTest(FetcherTest):
+
+ def setUp(self):
+ super(FetchPremirroronlyNetworkTest, self).setUp()
+ self.mirrordir = os.path.join(self.tempdir, "mirrors")
+ os.mkdir(self.mirrordir)
+ self.reponame = "fstests"
+ self.clonedir = os.path.join(self.tempdir, "git")
+ self.gitdir = os.path.join(self.tempdir, "git", "{}.git".format(self.reponame))
+ self.recipe_url = "git://git.yoctoproject.org/fstests;protocol=https"
+ self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
+ self.d.setVar("BB_NO_NETWORK", "0")
+ self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
+
+ def make_git_repo(self):
+ import shutil
+ self.mirrorname = "git2_git.yoctoproject.org.fstests.tar.gz"
+ os.makedirs(self.clonedir)
+ self.git("clone --bare --shallow-since=\"01.01.2013\" {}".format(self.recipe_url), self.clonedir)
+ bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd = self.gitdir)
+ shutil.rmtree(self.clonedir)
+
+ @skipIfNoNetwork()
+ def test_mirror_tarball_updated(self):
+ self.make_git_repo()
+ ## Upstream commit is in the mirror
+ self.d.setVar("SRCREV", "49d65d53c2bf558ae6e9185af0f3af7b79d255ec")
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ fetcher.download()
+
+ @skipIfNoNetwork()
+ def test_mirror_tarball_outdated(self):
+ self.make_git_repo()
+ ## Upstream commit not in the mirror
+ self.d.setVar("SRCREV", "15413486df1f5a5b5af699b6f3ba5f0984e52a9f")
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ with self.assertRaises(bb.fetch2.NetworkAccess):
+ fetcher.download()
+
+class FetchPremirroronlyMercurialTest(FetcherTest):
+ """ Test for premirrors with mercurial repos
+ the test covers also basic hg:// clone (see fetch_and_create_tarball
+ """
+ def skipIfNoHg():
+ import shutil
+ if not shutil.which('hg'):
+ return unittest.skip('Mercurial not installed')
+ return lambda f: f
+
+ def setUp(self):
+ super(FetchPremirroronlyMercurialTest, self).setUp()
+ self.mirrordir = os.path.join(self.tempdir, "mirrors")
+ os.mkdir(self.mirrordir)
+ self.reponame = "libgnt"
+ self.clonedir = os.path.join(self.tempdir, "hg")
+ self.recipe_url = "hg://keep.imfreedom.org/libgnt;module=libgnt"
+ self.d.setVar("SRCREV", "53e8b422faaf")
+ self.mirrorname = "hg_libgnt_keep.imfreedom.org_.libgnt.tar.gz"
+
+ def fetch_and_create_tarball(self):
+ """
+ Ask bitbake to download repo and prepare mirror tarball for us
+ """
+ self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "1")
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ fetcher.download()
+ mirrorfile = os.path.join(self.d.getVar("DL_DIR"), self.mirrorname)
+ self.assertTrue(os.path.exists(mirrorfile), "Mirror tarball {} has not been created".format(mirrorfile))
+ ## moving tarball to mirror directory
+ os.rename(mirrorfile, os.path.join(self.mirrordir, self.mirrorname))
+ self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "0")
+
+
+ @skipIfNoNetwork()
+ @skipIfNoHg()
+ def test_premirror_mercurial(self):
+ self.fetch_and_create_tarball()
+ self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
+ self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
+ self.d.setVar("BB_NO_NETWORK", "1")
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ fetcher.download()
+
+class FetchPremirroronlyBrokenTarball(FetcherTest):
+
+ def setUp(self):
+ super(FetchPremirroronlyBrokenTarball, self).setUp()
+ self.mirrordir = os.path.join(self.tempdir, "mirrors")
+ os.mkdir(self.mirrordir)
+ self.reponame = "bitbake"
+ self.gitdir = os.path.join(self.tempdir, "git", self.reponame)
+ self.recipe_url = "git://git.fake.repo/bitbake;protocol=https"
+ self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
+ self.d.setVar("BB_NO_NETWORK", "1")
+ self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
+ self.mirrorname = "git2_git.fake.repo.bitbake.tar.gz"
+ with open(os.path.join(self.mirrordir, self.mirrorname), 'w') as targz:
+ targz.write("This is not tar.gz file!")
+
+ def test_mirror_broken_download(self):
+ import sys
+ self.d.setVar("SRCREV", "0"*40)
+ fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
+ with self.assertRaises(bb.fetch2.FetchError), self.assertLogs() as logs:
+ fetcher.download()
+ output = "".join(logs.output)
+ self.assertFalse(" not a git repository (or any parent up to mount point /)" in output)
diff --git a/lib/bb/tests/parse.py b/lib/bb/tests/parse.py
index 9e21e1842..72d1962e7 100644
--- a/lib/bb/tests/parse.py
+++ b/lib/bb/tests/parse.py
@@ -98,8 +98,8 @@ exportD = "d"
overridetest = """
-RRECOMMENDS_${PN} = "a"
-RRECOMMENDS_${PN}_libc = "b"
+RRECOMMENDS:${PN} = "a"
+RRECOMMENDS:${PN}:libc = "b"
OVERRIDES = "libc:${PN}"
PN = "gtk+"
"""
@@ -110,16 +110,16 @@ PN = "gtk+"
self.assertEqual(d.getVar("RRECOMMENDS"), "b")
bb.data.expandKeys(d)
self.assertEqual(d.getVar("RRECOMMENDS"), "b")
- d.setVar("RRECOMMENDS_gtk+", "c")
+ d.setVar("RRECOMMENDS:gtk+", "c")
self.assertEqual(d.getVar("RRECOMMENDS"), "c")
overridetest2 = """
EXTRA_OECONF = ""
-EXTRA_OECONF_class-target = "b"
-EXTRA_OECONF_append = " c"
+EXTRA_OECONF:class-target = "b"
+EXTRA_OECONF:append = " c"
"""
- def test_parse_overrides(self):
+ def test_parse_overrides2(self):
f = self.parsehelper(self.overridetest2)
d = bb.parse.handle(f.name, self.d)['']
d.appendVar("EXTRA_OECONF", " d")
@@ -128,7 +128,7 @@ EXTRA_OECONF_append = " c"
overridetest3 = """
DESCRIPTION = "A"
-DESCRIPTION_${PN}-dev = "${DESCRIPTION} B"
+DESCRIPTION:${PN}-dev = "${DESCRIPTION} B"
PN = "bc"
"""
@@ -136,15 +136,15 @@ PN = "bc"
f = self.parsehelper(self.overridetest3)
d = bb.parse.handle(f.name, self.d)['']
bb.data.expandKeys(d)
- self.assertEqual(d.getVar("DESCRIPTION_bc-dev"), "A B")
+ self.assertEqual(d.getVar("DESCRIPTION:bc-dev"), "A B")
d.setVar("DESCRIPTION", "E")
- d.setVar("DESCRIPTION_bc-dev", "C D")
+ d.setVar("DESCRIPTION:bc-dev", "C D")
d.setVar("OVERRIDES", "bc-dev")
self.assertEqual(d.getVar("DESCRIPTION"), "C D")
classextend = """
-VAR_var_override1 = "B"
+VAR_var:override1 = "B"
EXTRA = ":override1"
OVERRIDES = "nothing${EXTRA}"
@@ -164,6 +164,7 @@ python () {
# become unset/disappear.
#
def test_parse_classextend_contamination(self):
+ self.d.setVar("__bbclasstype", "recipe")
cls = self.parsehelper(self.classextend_bbclass, suffix=".bbclass")
#clsname = os.path.basename(cls.name).replace(".bbclass", "")
self.classextend = self.classextend.replace("###CLASS###", cls.name)
@@ -185,12 +186,158 @@ deltask ${EMPTYVAR}
"""
def test_parse_addtask_deltask(self):
import sys
- f = self.parsehelper(self.addtask_deltask)
+
+ with self.assertLogs() as logs:
+ f = self.parsehelper(self.addtask_deltask)
+ d = bb.parse.handle(f.name, self.d)['']
+
+ output = "".join(logs.output)
+ self.assertTrue("addtask contained multiple 'before' keywords" in output)
+ self.assertTrue("addtask contained multiple 'after' keywords" in output)
+ self.assertTrue('addtask ignored: " do_patch"' in output)
+ #self.assertTrue('dependent task do_foo for do_patch does not exist' in output)
+
+ broken_multiline_comment = """
+# First line of comment \\
+# Second line of comment \\
+
+"""
+ def test_parse_broken_multiline_comment(self):
+ f = self.parsehelper(self.broken_multiline_comment)
+ with self.assertRaises(bb.BBHandledException):
+ d = bb.parse.handle(f.name, self.d)['']
+
+
+ comment_in_var = """
+VAR = " \\
+ SOMEVAL \\
+# some comment \\
+ SOMEOTHERVAL \\
+"
+"""
+ def test_parse_comment_in_var(self):
+ f = self.parsehelper(self.comment_in_var)
+ with self.assertRaises(bb.BBHandledException):
+ d = bb.parse.handle(f.name, self.d)['']
+
+
+ at_sign_in_var_flag = """
+A[flag@.service] = "nonet"
+B[flag@.target] = "ntb"
+C[f] = "flag"
+
+unset A[flag@.service]
+"""
+ def test_parse_at_sign_in_var_flag(self):
+ f = self.parsehelper(self.at_sign_in_var_flag)
d = bb.parse.handle(f.name, self.d)['']
+ self.assertEqual(d.getVar("A"), None)
+ self.assertEqual(d.getVar("B"), None)
+ self.assertEqual(d.getVarFlag("A","flag@.service"), None)
+ self.assertEqual(d.getVarFlag("B","flag@.target"), "ntb")
+ self.assertEqual(d.getVarFlag("C","f"), "flag")
+
+ def test_parse_invalid_at_sign_in_var_flag(self):
+ invalid_at_sign = self.at_sign_in_var_flag.replace("B[f", "B[@f")
+ f = self.parsehelper(invalid_at_sign)
+ with self.assertRaises(bb.parse.ParseError):
+ d = bb.parse.handle(f.name, self.d)['']
+
+ export_function_recipe = """
+inherit someclass
+"""
+
+ export_function_recipe2 = """
+inherit someclass
+
+do_compile () {
+ false
+}
+
+python do_compilepython () {
+ bb.note("Something else")
+}
+
+"""
+ export_function_class = """
+someclass_do_compile() {
+ true
+}
+
+python someclass_do_compilepython () {
+ bb.note("Something")
+}
+
+EXPORT_FUNCTIONS do_compile do_compilepython
+"""
+
+ export_function_class2 = """
+secondclass_do_compile() {
+ true
+}
+
+python secondclass_do_compilepython () {
+ bb.note("Something")
+}
+
+EXPORT_FUNCTIONS do_compile do_compilepython
+"""
- stdout = sys.stdout.getvalue()
- self.assertTrue("addtask contained multiple 'before' keywords" in stdout)
- self.assertTrue("addtask contained multiple 'after' keywords" in stdout)
- self.assertTrue('addtask ignored: " do_patch"' in stdout)
- #self.assertTrue('dependent task do_foo for do_patch does not exist' in stdout)
+ def test_parse_export_functions(self):
+ def check_function_flags(d):
+ self.assertEqual(d.getVarFlag("do_compile", "func"), 1)
+ self.assertEqual(d.getVarFlag("do_compilepython", "func"), 1)
+ self.assertEqual(d.getVarFlag("do_compile", "python"), None)
+ self.assertEqual(d.getVarFlag("do_compilepython", "python"), "1")
+
+ with tempfile.TemporaryDirectory() as tempdir:
+ self.d.setVar("__bbclasstype", "recipe")
+ recipename = tempdir + "/recipe.bb"
+ os.makedirs(tempdir + "/classes")
+ with open(tempdir + "/classes/someclass.bbclass", "w") as f:
+ f.write(self.export_function_class)
+ f.flush()
+ with open(tempdir + "/classes/secondclass.bbclass", "w") as f:
+ f.write(self.export_function_class2)
+ f.flush()
+
+ with open(recipename, "w") as f:
+ f.write(self.export_function_recipe)
+ f.flush()
+ os.chdir(tempdir)
+ d = bb.parse.handle(recipename, bb.data.createCopy(self.d))['']
+ self.assertIn("someclass_do_compile", d.getVar("do_compile"))
+ self.assertIn("someclass_do_compilepython", d.getVar("do_compilepython"))
+ check_function_flags(d)
+
+ recipename2 = tempdir + "/recipe2.bb"
+ with open(recipename2, "w") as f:
+ f.write(self.export_function_recipe2)
+ f.flush()
+
+ d = bb.parse.handle(recipename2, bb.data.createCopy(self.d))['']
+ self.assertNotIn("someclass_do_compile", d.getVar("do_compile"))
+ self.assertNotIn("someclass_do_compilepython", d.getVar("do_compilepython"))
+ self.assertIn("false", d.getVar("do_compile"))
+ self.assertIn("else", d.getVar("do_compilepython"))
+ check_function_flags(d)
+
+ with open(recipename, "a+") as f:
+ f.write("\ninherit secondclass\n")
+ f.flush()
+ with open(recipename2, "a+") as f:
+ f.write("\ninherit secondclass\n")
+ f.flush()
+
+ d = bb.parse.handle(recipename, bb.data.createCopy(self.d))['']
+ self.assertIn("secondclass_do_compile", d.getVar("do_compile"))
+ self.assertIn("secondclass_do_compilepython", d.getVar("do_compilepython"))
+ check_function_flags(d)
+
+ d = bb.parse.handle(recipename2, bb.data.createCopy(self.d))['']
+ self.assertNotIn("someclass_do_compile", d.getVar("do_compile"))
+ self.assertNotIn("someclass_do_compilepython", d.getVar("do_compilepython"))
+ self.assertIn("false", d.getVar("do_compile"))
+ self.assertIn("else", d.getVar("do_compilepython"))
+ check_function_flags(d)
diff --git a/lib/bb/tests/runqueue-tests/conf/bitbake.conf b/lib/bb/tests/runqueue-tests/conf/bitbake.conf
index efebf001a..05d7fd07d 100644
--- a/lib/bb/tests/runqueue-tests/conf/bitbake.conf
+++ b/lib/bb/tests/runqueue-tests/conf/bitbake.conf
@@ -12,6 +12,6 @@ STAMP = "${TMPDIR}/stamps/${PN}"
T = "${TMPDIR}/workdir/${PN}/temp"
BB_NUMBER_THREADS = "4"
-BB_HASHBASE_WHITELIST = "BB_CURRENT_MC BB_HASHSERVE TMPDIR TOPDIR SLOWTASKS SSTATEVALID FILE"
+BB_BASEHASH_IGNORE_VARS = "BB_CURRENT_MC BB_HASHSERVE TMPDIR TOPDIR SLOWTASKS SSTATEVALID FILE BB_CURRENTTASK"
include conf/multiconfig/${BB_CURRENT_MC}.conf
diff --git a/lib/bb/tests/runqueue.py b/lib/bb/tests/runqueue.py
index 3d51779d6..cc87e8d6a 100644
--- a/lib/bb/tests/runqueue.py
+++ b/lib/bb/tests/runqueue.py
@@ -29,13 +29,14 @@ class RunQueueTests(unittest.TestCase):
def run_bitbakecmd(self, cmd, builddir, sstatevalid="", slowtasks="", extraenv=None, cleanup=False):
env = os.environ.copy()
env["BBPATH"] = os.path.realpath(os.path.join(os.path.dirname(__file__), "runqueue-tests"))
- env["BB_ENV_EXTRAWHITE"] = "SSTATEVALID SLOWTASKS"
+ env["BB_ENV_PASSTHROUGH_ADDITIONS"] = "SSTATEVALID SLOWTASKS TOPDIR"
env["SSTATEVALID"] = sstatevalid
env["SLOWTASKS"] = slowtasks
+ env["TOPDIR"] = builddir
if extraenv:
for k in extraenv:
env[k] = extraenv[k]
- env["BB_ENV_EXTRAWHITE"] = env["BB_ENV_EXTRAWHITE"] + " " + k
+ env["BB_ENV_PASSTHROUGH_ADDITIONS"] = env["BB_ENV_PASSTHROUGH_ADDITIONS"] + " " + k
try:
output = subprocess.check_output(cmd, env=env, stderr=subprocess.STDOUT,universal_newlines=True, cwd=builddir)
print(output)
@@ -58,6 +59,8 @@ class RunQueueTests(unittest.TestCase):
expected = ['a1:' + x for x in self.alltasks]
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_single_setscenevalid(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
cmd = ["bitbake", "a1"]
@@ -68,6 +71,8 @@ class RunQueueTests(unittest.TestCase):
'a1:populate_sysroot', 'a1:build']
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_intermediate_setscenevalid(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
cmd = ["bitbake", "a1"]
@@ -77,6 +82,8 @@ class RunQueueTests(unittest.TestCase):
'a1:populate_sysroot_setscene', 'a1:build']
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_intermediate_notcovered(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
cmd = ["bitbake", "a1"]
@@ -86,6 +93,8 @@ class RunQueueTests(unittest.TestCase):
'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene']
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_all_setscenevalid(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
cmd = ["bitbake", "a1"]
@@ -95,6 +104,8 @@ class RunQueueTests(unittest.TestCase):
'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene']
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_no_settasks(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
cmd = ["bitbake", "a1", "-c", "patch"]
@@ -103,6 +114,8 @@ class RunQueueTests(unittest.TestCase):
expected = ['a1:fetch', 'a1:unpack', 'a1:patch']
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_mix_covered_notcovered(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
cmd = ["bitbake", "a1:do_patch", "a1:do_populate_sysroot"]
@@ -111,6 +124,7 @@ class RunQueueTests(unittest.TestCase):
expected = ['a1:fetch', 'a1:unpack', 'a1:patch', 'a1:populate_sysroot_setscene']
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
# Test targets with intermediate setscene tasks alongside a target with no intermediate setscene tasks
def test_mixed_direct_tasks_setscene_tasks(self):
@@ -122,6 +136,8 @@ class RunQueueTests(unittest.TestCase):
'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene']
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
# This test slows down the execution of do_package_setscene until after other real tasks have
# started running which tests for a bug where tasks were being lost from the buildable list of real
# tasks if they weren't in tasks_covered or tasks_notcovered
@@ -136,12 +152,14 @@ class RunQueueTests(unittest.TestCase):
'a1:populate_sysroot', 'a1:build']
self.assertEqual(set(tasks), set(expected))
- def test_setscenewhitelist(self):
+ self.shutdown(tempdir)
+
+ def test_setscene_ignore_tasks(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
cmd = ["bitbake", "a1"]
extraenv = {
"BB_SETSCENE_ENFORCE" : "1",
- "BB_SETSCENE_ENFORCE_WHITELIST" : "a1:do_package_write_rpm a1:do_build"
+ "BB_SETSCENE_ENFORCE_IGNORE_TASKS" : "a1:do_package_write_rpm a1:do_build"
}
sstatevalid = "a1:do_package a1:do_package_qa a1:do_packagedata a1:do_package_write_ipk a1:do_populate_lic a1:do_populate_sysroot"
tasks = self.run_bitbakecmd(cmd, tempdir, sstatevalid, extraenv=extraenv)
@@ -149,6 +167,8 @@ class RunQueueTests(unittest.TestCase):
'a1:populate_sysroot_setscene', 'a1:package_setscene']
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
# Tests for problems with dependencies between setscene tasks
def test_no_setscenevalid_harddeps(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
@@ -162,6 +182,8 @@ class RunQueueTests(unittest.TestCase):
'd1:populate_sysroot', 'd1:build']
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_no_setscenevalid_withdeps(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
cmd = ["bitbake", "b1"]
@@ -172,6 +194,8 @@ class RunQueueTests(unittest.TestCase):
expected.remove('a1:package_qa')
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_single_a1_setscenevalid_withdeps(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
cmd = ["bitbake", "b1"]
@@ -182,6 +206,8 @@ class RunQueueTests(unittest.TestCase):
'a1:populate_sysroot'] + ['b1:' + x for x in self.alltasks]
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_single_b1_setscenevalid_withdeps(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
cmd = ["bitbake", "b1"]
@@ -193,6 +219,8 @@ class RunQueueTests(unittest.TestCase):
expected.remove('b1:package')
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_intermediate_setscenevalid_withdeps(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
cmd = ["bitbake", "b1"]
@@ -203,6 +231,8 @@ class RunQueueTests(unittest.TestCase):
expected.remove('b1:package')
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_all_setscenevalid_withdeps(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
cmd = ["bitbake", "b1"]
@@ -213,6 +243,8 @@ class RunQueueTests(unittest.TestCase):
'b1:packagedata_setscene', 'b1:package_qa_setscene', 'b1:populate_sysroot_setscene']
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_multiconfig_setscene_optimise(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
extraenv = {
@@ -232,6 +264,8 @@ class RunQueueTests(unittest.TestCase):
expected.remove(x)
self.assertEqual(set(tasks), set(expected))
+ self.shutdown(tempdir)
+
def test_multiconfig_bbmask(self):
# This test validates that multiconfigs can independently mask off
# recipes they do not want with BBMASK. It works by having recipes
@@ -248,11 +282,13 @@ class RunQueueTests(unittest.TestCase):
cmd = ["bitbake", "mc:mc-1:fails-mc2", "mc:mc_2:fails-mc1"]
self.run_bitbakecmd(cmd, tempdir, "", extraenv=extraenv)
+ self.shutdown(tempdir)
+
def test_multiconfig_mcdepends(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
extraenv = {
"BBMULTICONFIG" : "mc-1 mc_2",
- "BB_SIGNATURE_HANDLER" : "TestMulticonfigDepends",
+ "BB_SIGNATURE_HANDLER" : "basichash",
"EXTRA_BBFILES": "${COREBASE}/recipes/fails-mc/*.bb",
}
tasks = self.run_bitbakecmd(["bitbake", "mc:mc-1:f1"], tempdir, "", extraenv=extraenv, cleanup=True)
@@ -278,7 +314,8 @@ class RunQueueTests(unittest.TestCase):
["mc_2:a1:%s" % t for t in rerun_tasks]
self.assertEqual(set(tasks), set(expected))
- @unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required')
+ self.shutdown(tempdir)
+
def test_hashserv_single(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
extraenv = {
@@ -304,7 +341,6 @@ class RunQueueTests(unittest.TestCase):
self.shutdown(tempdir)
- @unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required')
def test_hashserv_double(self):
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
extraenv = {
@@ -329,7 +365,6 @@ class RunQueueTests(unittest.TestCase):
self.shutdown(tempdir)
- @unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required')
def test_hashserv_multiple_setscene(self):
# Runs e1:do_package_setscene twice
with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
@@ -361,7 +396,6 @@ class RunQueueTests(unittest.TestCase):
def shutdown(self, tempdir):
# Wait for the hashserve socket to disappear else we'll see races with the tempdir cleanup
- while os.path.exists(tempdir + "/hashserve.sock"):
+ while (os.path.exists(tempdir + "/hashserve.sock") or os.path.exists(tempdir + "cache/hashserv.db-wal") or os.path.exists(tempdir + "/bitbake.lock")):
time.sleep(0.5)
-
diff --git a/lib/bb/tests/siggen.py b/lib/bb/tests/siggen.py
index c21ab4e4f..0dc67e6cc 100644
--- a/lib/bb/tests/siggen.py
+++ b/lib/bb/tests/siggen.py
@@ -17,75 +17,12 @@ import bb.siggen
class SiggenTest(unittest.TestCase):
- def test_clean_basepath_simple_target_basepath(self):
- basepath = '/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
- expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask'
+ def test_build_pnid(self):
+ tests = {
+ ('', 'helloworld', 'do_sometask') : 'helloworld:do_sometask',
+ ('XX', 'helloworld', 'do_sometask') : 'mc:XX:helloworld:do_sometask',
+ }
- actual_cleaned = bb.siggen.clean_basepath(basepath)
+ for t in tests:
+ self.assertEqual(bb.siggen.build_pnid(*t), tests[t])
- self.assertEqual(actual_cleaned, expected_cleaned)
-
- def test_clean_basepath_basic_virtual_basepath(self):
- basepath = 'virtual:something:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
- expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something'
-
- actual_cleaned = bb.siggen.clean_basepath(basepath)
-
- self.assertEqual(actual_cleaned, expected_cleaned)
-
- def test_clean_basepath_mc_basepath(self):
- basepath = 'mc:somemachine:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
- expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:mc:somemachine'
-
- actual_cleaned = bb.siggen.clean_basepath(basepath)
-
- self.assertEqual(actual_cleaned, expected_cleaned)
-
- def test_clean_basepath_virtual_long_prefix_basepath(self):
- basepath = 'virtual:something:A:B:C:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
- expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something:A:B:C'
-
- actual_cleaned = bb.siggen.clean_basepath(basepath)
-
- self.assertEqual(actual_cleaned, expected_cleaned)
-
- def test_clean_basepath_mc_virtual_basepath(self):
- basepath = 'mc:somemachine:virtual:something:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
- expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something:mc:somemachine'
-
- actual_cleaned = bb.siggen.clean_basepath(basepath)
-
- self.assertEqual(actual_cleaned, expected_cleaned)
-
- def test_clean_basepath_mc_virtual_long_prefix_basepath(self):
- basepath = 'mc:X:virtual:something:C:B:A:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
- expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something:C:B:A:mc:X'
-
- actual_cleaned = bb.siggen.clean_basepath(basepath)
-
- self.assertEqual(actual_cleaned, expected_cleaned)
-
-
- # def test_clean_basepath_performance(self):
- # input_basepaths = [
- # 'mc:X:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
- # 'mc:X:virtual:something:C:B:A:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
- # 'virtual:something:C:B:A:/different/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
- # 'virtual:something:A:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
- # '/this/is/most/common/input/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
- # '/and/should/be/tested/with/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
- # '/more/weight/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
- # ]
-
- # time_start = time.time()
-
- # i = 2000000
- # while i >= 0:
- # for basepath in input_basepaths:
- # bb.siggen.clean_basepath(basepath)
- # i -= 1
-
- # elapsed = time.time() - time_start
- # print('{} ({}s)'.format(self.id(), round(elapsed, 3)))
-
- # self.assertTrue(False)
diff --git a/lib/bb/tests/utils.py b/lib/bb/tests/utils.py
index a7ff33db5..c363f62d7 100644
--- a/lib/bb/tests/utils.py
+++ b/lib/bb/tests/utils.py
@@ -418,7 +418,7 @@ MULTILINE = " stuff \\
['MULTILINE'],
handle_var)
- testvalue = re.sub('\s+', ' ', value_in_callback.strip())
+ testvalue = re.sub(r'\s+', ' ', value_in_callback.strip())
self.assertEqual(expected_value, testvalue)
class EditBbLayersConf(unittest.TestCase):
@@ -666,3 +666,21 @@ class GetReferencedVars(unittest.TestCase):
layers = [{"SRC_URI"}, {"QT_GIT", "QT_MODULE", "QT_MODULE_BRANCH_PARAM", "QT_GIT_PROTOCOL"}, {"QT_GIT_PROJECT", "QT_MODULE_BRANCH", "BPN"}, {"PN", "SPECIAL_PKGSUFFIX"}]
self.check_referenced("${SRC_URI}", layers)
+
+
+class EnvironmentTests(unittest.TestCase):
+ def test_environment(self):
+ os.environ["A"] = "this is A"
+ self.assertIn("A", os.environ)
+ self.assertEqual(os.environ["A"], "this is A")
+ self.assertNotIn("B", os.environ)
+
+ with bb.utils.environment(B="this is B"):
+ self.assertIn("A", os.environ)
+ self.assertEqual(os.environ["A"], "this is A")
+ self.assertIn("B", os.environ)
+ self.assertEqual(os.environ["B"], "this is B")
+
+ self.assertIn("A", os.environ)
+ self.assertEqual(os.environ["A"], "this is A")
+ self.assertNotIn("B", os.environ)
diff --git a/lib/bb/tinfoil.py b/lib/bb/tinfoil.py
index 27a341541..dcd3910cc 100644
--- a/lib/bb/tinfoil.py
+++ b/lib/bb/tinfoil.py
@@ -10,6 +10,7 @@
import logging
import os
import sys
+import time
import atexit
import re
from collections import OrderedDict, defaultdict
@@ -324,11 +325,11 @@ class Tinfoil:
self.recipes_parsed = False
self.quiet = 0
self.oldhandlers = self.logger.handlers[:]
+ self.localhandlers = []
if setup_logging:
# This is the *client-side* logger, nothing to do with
# logging messages from the server
bb.msg.logger_create('BitBake', output)
- self.localhandlers = []
for handler in self.logger.handlers:
if handler not in self.oldhandlers:
self.localhandlers.append(handler)
@@ -448,7 +449,13 @@ class Tinfoil:
self.run_actions(config_params)
self.recipes_parsed = True
- def run_command(self, command, *params):
+ def modified_files(self):
+ """
+ Notify the server it needs to revalidate it's caches since the client has modified files
+ """
+ self.run_command("revalidateCaches")
+
+ def run_command(self, command, *params, handle_events=True):
"""
Run a command on the server (as implemented in bb.command).
Note that there are two types of command - synchronous and
@@ -468,7 +475,7 @@ class Tinfoil:
try:
result = self.server_connection.connection.runCommand(commandline)
finally:
- while True:
+ while handle_events:
event = self.wait_event()
if not event:
break
@@ -493,7 +500,7 @@ class Tinfoil:
Wait for an event from the server for the specified time.
A timeout of 0 means don't wait if there are no events in the queue.
Returns the next event in the queue or None if the timeout was
- reached. Note that in order to recieve any events you will
+ reached. Note that in order to receive any events you will
first need to set the internal event mask using set_event_mask()
(otherwise whatever event mask the UI set up will be in effect).
"""
@@ -729,6 +736,7 @@ class Tinfoil:
ret = self.run_command('buildTargets', targets, task)
if handle_events:
+ lastevent = time.time()
result = False
# Borrowed from knotty, instead somewhat hackily we use the helper
# as the object to store "shutdown" on
@@ -741,6 +749,7 @@ class Tinfoil:
try:
event = self.wait_event(0.25)
if event:
+ lastevent = time.time()
if event_callback and event_callback(event):
continue
if helper.eventHandler(event):
@@ -761,7 +770,7 @@ class Tinfoil:
if parseprogress:
parseprogress.update(event.progress)
else:
- bb.warn("Got ProcessProgress event for someting that never started?")
+ bb.warn("Got ProcessProgress event for something that never started?")
continue
if isinstance(event, bb.event.ProcessFinished):
if self.quiet > 1:
@@ -773,7 +782,7 @@ class Tinfoil:
if isinstance(event, bb.command.CommandCompleted):
result = True
break
- if isinstance(event, bb.command.CommandFailed):
+ if isinstance(event, (bb.command.CommandFailed, bb.command.CommandExit)):
self.logger.error(str(event))
result = False
break
@@ -785,10 +794,13 @@ class Tinfoil:
self.logger.error(str(event))
result = False
break
-
elif helper.shutdown > 1:
break
termfilter.updateFooter()
+ if time.time() > (lastevent + (3*60)):
+ if not self.run_command('ping', handle_events=False):
+ print("\nUnable to ping server and no events, closing down...\n")
+ return False
except KeyboardInterrupt:
termfilter.clearFooter()
if helper.shutdown == 1:
diff --git a/lib/bb/ui/buildinfohelper.py b/lib/bb/ui/buildinfohelper.py
index 43aa59284..8b212b780 100644
--- a/lib/bb/ui/buildinfohelper.py
+++ b/lib/bb/ui/buildinfohelper.py
@@ -45,7 +45,7 @@ from pprint import pformat
import logging
from datetime import datetime, timedelta
-from django.db import transaction, connection
+from django.db import transaction
# pylint: disable=invalid-name
@@ -227,6 +227,12 @@ class ORMWrapper(object):
build.completed_on = timezone.now()
build.outcome = outcome
build.save()
+
+ # We force a sync point here to force the outcome status commit,
+ # which resolves a race condition with the build completion takedown
+ transaction.set_autocommit(True)
+ transaction.set_autocommit(False)
+
signal_runbuilds()
def update_target_set_license_manifest(self, target, license_manifest_path):
@@ -483,14 +489,14 @@ class ORMWrapper(object):
# we already created the root directory, so ignore any
# entry for it
- if len(path) == 0:
+ if not path:
continue
parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
- if len(parent_path) == 0:
+ if not parent_path:
parent_path = "/"
parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
- tf_obj = Target_File.objects.create(
+ Target_File.objects.create(
target = target_obj,
path = path,
size = size,
@@ -555,7 +561,7 @@ class ORMWrapper(object):
parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
- tf_obj = Target_File.objects.create(
+ Target_File.objects.create(
target = target_obj,
path = path,
size = size,
@@ -571,7 +577,7 @@ class ORMWrapper(object):
assert isinstance(build_obj, Build)
assert isinstance(target_obj, Target)
- errormsg = ""
+ errormsg = []
for p in packagedict:
# Search name swtiches round the installed name vs package name
# by default installed name == package name
@@ -633,10 +639,10 @@ class ORMWrapper(object):
packagefile_objects.append(Package_File( package = packagedict[p]['object'],
path = targetpath,
size = targetfilesize))
- if len(packagefile_objects):
+ if packagefile_objects:
Package_File.objects.bulk_create(packagefile_objects)
except KeyError as e:
- errormsg += " stpi: Key error, package %s key %s \n" % ( p, e )
+ errormsg.append(" stpi: Key error, package %s key %s \n" % (p, e))
# save disk installed size
packagedict[p]['object'].installed_size = packagedict[p]['size']
@@ -673,13 +679,13 @@ class ORMWrapper(object):
logger.warning("Could not add dependency to the package %s "
"because %s is an unknown package", p, px)
- if len(packagedeps_objs) > 0:
+ if packagedeps_objs:
Package_Dependency.objects.bulk_create(packagedeps_objs)
else:
logger.info("No package dependencies created")
- if len(errormsg) > 0:
- logger.warning("buildinfohelper: target_package_info could not identify recipes: \n%s", errormsg)
+ if errormsg:
+ logger.warning("buildinfohelper: target_package_info could not identify recipes: \n%s", "".join(errormsg))
def save_target_image_file_information(self, target_obj, file_name, file_size):
Target_Image_File.objects.create(target=target_obj,
@@ -767,7 +773,7 @@ class ORMWrapper(object):
packagefile_objects.append(Package_File( package = bp_object,
path = path,
size = package_info['FILES_INFO'][path] ))
- if len(packagefile_objects):
+ if packagefile_objects:
Package_File.objects.bulk_create(packagefile_objects)
def _po_byname(p):
@@ -809,7 +815,7 @@ class ORMWrapper(object):
packagedeps_objs.append(Package_Dependency( package = bp_object,
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS))
- if len(packagedeps_objs) > 0:
+ if packagedeps_objs:
Package_Dependency.objects.bulk_create(packagedeps_objs)
return bp_object
@@ -826,7 +832,7 @@ class ORMWrapper(object):
desc = vardump[root_var]['doc']
if desc is None:
desc = ''
- if len(desc):
+ if desc:
HelpText.objects.get_or_create(build=build_obj,
area=HelpText.VARIABLE,
key=k, text=desc)
@@ -846,7 +852,7 @@ class ORMWrapper(object):
file_name = vh['file'],
line_number = vh['line'],
operation = vh['op']))
- if len(varhist_objects):
+ if varhist_objects:
VariableHistory.objects.bulk_create(varhist_objects)
@@ -893,9 +899,6 @@ class BuildInfoHelper(object):
self.task_order = 0
self.autocommit_step = 1
self.server = server
- # we use manual transactions if the database doesn't autocommit on us
- if not connection.features.autocommits_when_autocommit_is_off:
- transaction.set_autocommit(False)
self.orm_wrapper = ORMWrapper()
self.has_build_history = has_build_history
self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0]
@@ -1059,27 +1062,6 @@ class BuildInfoHelper(object):
return recipe_info
- def _get_path_information(self, task_object):
- self._ensure_build()
-
- assert isinstance(task_object, Task)
- build_stats_format = "{tmpdir}/buildstats/{buildname}/{package}/"
- build_stats_path = []
-
- for t in self.internal_state['targets']:
- buildname = self.internal_state['build'].build_name
- pe, pv = task_object.recipe.version.split(":",1)
- if len(pe) > 0:
- package = task_object.recipe.name + "-" + pe + "_" + pv
- else:
- package = task_object.recipe.name + "-" + pv
-
- build_stats_path.append(build_stats_format.format(tmpdir=self.tmp_dir,
- buildname=buildname,
- package=package))
-
- return build_stats_path
-
################################
## external available methods to store information
@@ -1313,12 +1295,11 @@ class BuildInfoHelper(object):
task_information['outcome'] = Task.OUTCOME_FAILED
del self.internal_state['taskdata'][identifier]
- if not connection.features.autocommits_when_autocommit_is_off:
- # we force a sync point here, to get the progress bar to show
- if self.autocommit_step % 3 == 0:
- transaction.set_autocommit(True)
- transaction.set_autocommit(False)
- self.autocommit_step += 1
+ # we force a sync point here, to get the progress bar to show
+ if self.autocommit_step % 3 == 0:
+ transaction.set_autocommit(True)
+ transaction.set_autocommit(False)
+ self.autocommit_step += 1
self.orm_wrapper.get_update_task_object(task_information, True) # must exist
@@ -1404,7 +1385,7 @@ class BuildInfoHelper(object):
assert 'pn' in event._depgraph
assert 'tdepends' in event._depgraph
- errormsg = ""
+ errormsg = []
# save layer version priorities
if 'layer-priorities' in event._depgraph.keys():
@@ -1496,7 +1477,7 @@ class BuildInfoHelper(object):
elif dep in self.internal_state['recipes']:
dependency = self.internal_state['recipes'][dep]
else:
- errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, dep)
+ errormsg.append(" stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, dep))
continue
recipe_dep = Recipe_Dependency(recipe=target,
depends_on=dependency,
@@ -1537,8 +1518,8 @@ class BuildInfoHelper(object):
taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep ))
Task_Dependency.objects.bulk_create(taskdeps_objects)
- if len(errormsg) > 0:
- logger.warning("buildinfohelper: dependency info not identify recipes: \n%s", errormsg)
+ if errormsg:
+ logger.warning("buildinfohelper: dependency info not identify recipes: \n%s", "".join(errormsg))
def store_build_package_information(self, event):
@@ -1618,7 +1599,7 @@ class BuildInfoHelper(object):
if 'backlog' in self.internal_state:
# if we have a backlog of events, do our best to save them here
- if len(self.internal_state['backlog']):
+ if self.internal_state['backlog']:
tempevent = self.internal_state['backlog'].pop()
logger.debug("buildinfohelper: Saving stored event %s "
% tempevent)
@@ -1765,7 +1746,6 @@ class BuildInfoHelper(object):
buildname = self.server.runCommand(['getVariable', 'BUILDNAME'])[0]
machine = self.server.runCommand(['getVariable', 'MACHINE'])[0]
- image_name = self.server.runCommand(['getVariable', 'IMAGE_NAME'])[0]
# location of the manifest files for this build;
# note that this file is only produced if an image is produced
@@ -1786,6 +1766,18 @@ class BuildInfoHelper(object):
# filter out anything which isn't an image target
image_targets = [target for target in targets if target.is_image]
+ if len(image_targets) > 0:
+ #if there are image targets retrieve image_name
+ image_name = self.server.runCommand(['getVariable', 'IMAGE_NAME'])[0]
+ if not image_name:
+ #When build target is an image and image_name is not found as an environment variable
+ logger.info("IMAGE_NAME not found, extracting from bitbake command")
+ cmd = self.server.runCommand(['getVariable','BB_CMDLINE'])[0]
+ #filter out tokens that are command line options
+ cmd = [token for token in cmd if not token.startswith('-')]
+ image_name = cmd[1].split(':', 1)[0] # remove everything after : in image name
+ logger.info("IMAGE_NAME found as : %s " % image_name)
+
for image_target in image_targets:
# this is set to True if we find at least one file relating to
# this target; if this remains False after the scan, we copy the
@@ -1990,8 +1982,6 @@ class BuildInfoHelper(object):
# Do not skip command line build events
self.store_log_event(tempevent,False)
- if not connection.features.autocommits_when_autocommit_is_off:
- transaction.set_autocommit(True)
# unset the brbe; this is to prevent subsequent command-line builds
# being incorrectly attached to the previous Toaster-triggered build;
diff --git a/lib/bb/ui/eventreplay.py b/lib/bb/ui/eventreplay.py
new file mode 100644
index 000000000..d62ecbfa5
--- /dev/null
+++ b/lib/bb/ui/eventreplay.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python3
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# This file re-uses code spread throughout other Bitbake source files.
+# As such, all other copyrights belong to their own right holders.
+#
+
+
+import os
+import sys
+import json
+import pickle
+import codecs
+
+
+class EventPlayer:
+ """Emulate a connection to a bitbake server."""
+
+ def __init__(self, eventfile, variables):
+ self.eventfile = eventfile
+ self.variables = variables
+ self.eventmask = []
+
+ def waitEvent(self, _timeout):
+ """Read event from the file."""
+ line = self.eventfile.readline().strip()
+ if not line:
+ return
+ try:
+ decodedline = json.loads(line)
+ if 'allvariables' in decodedline:
+ self.variables = decodedline['allvariables']
+ return
+ if not 'vars' in decodedline:
+ raise ValueError
+ event_str = decodedline['vars'].encode('utf-8')
+ event = pickle.loads(codecs.decode(event_str, 'base64'))
+ event_name = "%s.%s" % (event.__module__, event.__class__.__name__)
+ if event_name not in self.eventmask:
+ return
+ return event
+ except ValueError as err:
+ print("Failed loading ", line)
+ raise err
+
+ def runCommand(self, command_line):
+ """Emulate running a command on the server."""
+ name = command_line[0]
+
+ if name == "getVariable":
+ var_name = command_line[1]
+ variable = self.variables.get(var_name)
+ if variable:
+ return variable['v'], None
+ return None, "Missing variable %s" % var_name
+
+ elif name == "getAllKeysWithFlags":
+ dump = {}
+ flaglist = command_line[1]
+ for key, val in self.variables.items():
+ try:
+ if not key.startswith("__"):
+ dump[key] = {
+ 'v': val['v'],
+ 'history' : val['history'],
+ }
+ for flag in flaglist:
+ dump[key][flag] = val[flag]
+ except Exception as err:
+ print(err)
+ return (dump, None)
+
+ elif name == 'setEventMask':
+ self.eventmask = command_line[-1]
+ return True, None
+
+ else:
+ raise Exception("Command %s not implemented" % command_line[0])
+
+ def getEventHandle(self):
+ """
+ This method is called by toasterui.
+ The return value is passed to self.runCommand but not used there.
+ """
+ pass
diff --git a/lib/bb/ui/knotty.py b/lib/bb/ui/knotty.py
index 65ff2727d..f86999bb0 100644
--- a/lib/bb/ui/knotty.py
+++ b/lib/bb/ui/knotty.py
@@ -25,7 +25,7 @@ from itertools import groupby
from bb.ui import uihelper
-featureSet = [bb.cooker.CookerFeatures.SEND_SANITYEVENTS]
+featureSet = [bb.cooker.CookerFeatures.SEND_SANITYEVENTS, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING]
logger = logging.getLogger("BitBake")
interactive = sys.stdout.isatty()
@@ -179,7 +179,7 @@ class TerminalFilter(object):
new[3] = new[3] & ~termios.ECHO
termios.tcsetattr(fd, termios.TCSADRAIN, new)
curses.setupterm()
- if curses.tigetnum("colors") > 2:
+ if curses.tigetnum("colors") > 2 and os.environ.get('NO_COLOR', '') == '':
for h in handlers:
try:
h.formatter.enable_color()
@@ -228,7 +228,9 @@ class TerminalFilter(object):
def keepAlive(self, t):
if not self.cuu:
- print("Bitbake still alive (%ds)" % t)
+ print("Bitbake still alive (no events for %ds). Active tasks:" % t)
+ for t in self.helper.running_tasks:
+ print(t)
sys.stdout.flush()
def updateFooter(self):
@@ -250,58 +252,68 @@ class TerminalFilter(object):
return
tasks = []
for t in runningpids:
+ start_time = activetasks[t].get("starttime", None)
+ if start_time:
+ msg = "%s - %s (pid %s)" % (activetasks[t]["title"], self.elapsed(currenttime - start_time), activetasks[t]["pid"])
+ else:
+ msg = "%s (pid %s)" % (activetasks[t]["title"], activetasks[t]["pid"])
progress = activetasks[t].get("progress", None)
if progress is not None:
pbar = activetasks[t].get("progressbar", None)
rate = activetasks[t].get("rate", None)
- start_time = activetasks[t].get("starttime", None)
if not pbar or pbar.bouncing != (progress < 0):
if progress < 0:
- pbar = BBProgress("0: %s (pid %s)" % (activetasks[t]["title"], activetasks[t]["pid"]), 100, widgets=[' ', progressbar.BouncingSlider(), ''], extrapos=3, resize_handler=self.sigwinch_handle)
+ pbar = BBProgress("0: %s" % msg, 100, widgets=[' ', progressbar.BouncingSlider(), ''], extrapos=3, resize_handler=self.sigwinch_handle)
pbar.bouncing = True
else:
- pbar = BBProgress("0: %s (pid %s)" % (activetasks[t]["title"], activetasks[t]["pid"]), 100, widgets=[' ', progressbar.Percentage(), ' ', progressbar.Bar(), ''], extrapos=5, resize_handler=self.sigwinch_handle)
+ pbar = BBProgress("0: %s" % msg, 100, widgets=[' ', progressbar.Percentage(), ' ', progressbar.Bar(), ''], extrapos=5, resize_handler=self.sigwinch_handle)
pbar.bouncing = False
activetasks[t]["progressbar"] = pbar
- tasks.append((pbar, progress, rate, start_time))
+ tasks.append((pbar, msg, progress, rate, start_time))
else:
- start_time = activetasks[t].get("starttime", None)
- if start_time:
- tasks.append("%s - %s (pid %s)" % (activetasks[t]["title"], self.elapsed(currenttime - start_time), activetasks[t]["pid"]))
- else:
- tasks.append("%s (pid %s)" % (activetasks[t]["title"], activetasks[t]["pid"]))
+ tasks.append(msg)
if self.main.shutdown:
- content = "Waiting for %s running tasks to finish:" % len(activetasks)
+ content = pluralise("Waiting for %s running task to finish",
+ "Waiting for %s running tasks to finish", len(activetasks))
+ if not self.quiet:
+ content += ':'
print(content)
else:
+ scene_tasks = "%s of %s" % (self.helper.setscene_current, self.helper.setscene_total)
+ cur_tasks = "%s of %s" % (self.helper.tasknumber_current, self.helper.tasknumber_total)
+
+ content = ''
+ if not self.quiet:
+ msg = "Setscene tasks: %s" % scene_tasks
+ content += msg + "\n"
+ print(msg)
+
if self.quiet:
- content = "Running tasks (%s of %s)" % (self.helper.tasknumber_current, self.helper.tasknumber_total)
+ msg = "Running tasks (%s, %s)" % (scene_tasks, cur_tasks)
elif not len(activetasks):
- content = "No currently running tasks (%s of %s)" % (self.helper.tasknumber_current, self.helper.tasknumber_total)
+ msg = "No currently running tasks (%s)" % cur_tasks
else:
- content = "Currently %2s running tasks (%s of %s)" % (len(activetasks), self.helper.tasknumber_current, self.helper.tasknumber_total)
+ msg = "Currently %2s running tasks (%s)" % (len(activetasks), cur_tasks)
maxtask = self.helper.tasknumber_total
if not self.main_progress or self.main_progress.maxval != maxtask:
widgets = [' ', progressbar.Percentage(), ' ', progressbar.Bar()]
self.main_progress = BBProgress("Running tasks", maxtask, widgets=widgets, resize_handler=self.sigwinch_handle)
self.main_progress.start(False)
- self.main_progress.setmessage(content)
- progress = self.helper.tasknumber_current - 1
- if progress < 0:
- progress = 0
- content = self.main_progress.update(progress)
+ self.main_progress.setmessage(msg)
+ progress = max(0, self.helper.tasknumber_current - 1)
+ content += self.main_progress.update(progress)
print('')
- lines = 1 + int(len(content) / (self.columns + 1))
- if self.quiet == 0:
- for tasknum, task in enumerate(tasks[:(self.rows - 2)]):
+ lines = self.getlines(content)
+ if not self.quiet:
+ for tasknum, task in enumerate(tasks[:(self.rows - 1 - lines)]):
if isinstance(task, tuple):
- pbar, progress, rate, start_time = task
+ pbar, msg, progress, rate, start_time = task
if not pbar.start_time:
pbar.start(False)
if start_time:
pbar.start_time = start_time
- pbar.setmessage('%s:%s' % (tasknum, pbar.msg.split(':', 1)[1]))
+ pbar.setmessage('%s: %s' % (tasknum, msg))
pbar.setextra(rate)
if progress > -1:
content = pbar.update(progress)
@@ -311,11 +323,17 @@ class TerminalFilter(object):
else:
content = "%s: %s" % (tasknum, task)
print(content)
- lines = lines + 1 + int(len(content) / (self.columns + 1))
+ lines = lines + self.getlines(content)
self.footer_present = lines
self.lastpids = runningpids[:]
self.lastcount = self.helper.tasknumber_current
+ def getlines(self, content):
+ lines = 0
+ for line in content.split("\n"):
+ lines = lines + 1 + int(len(line) / (self.columns + 1))
+ return lines
+
def finish(self):
if self.stdinbackup:
fd = sys.stdin.fileno()
@@ -402,6 +420,11 @@ def main(server, eventHandler, params, tf = TerminalFilter):
except bb.BBHandledException:
drain_events_errorhandling(eventHandler)
return 1
+ except Exception as e:
+ # bitbake-server comms failure
+ early_logger = bb.msg.logger_create('bitbake', sys.stdout)
+ early_logger.fatal("Attempting to set server environment: %s", e)
+ return 1
if params.options.quiet == 0:
console_loglevel = loglevel
@@ -567,7 +590,12 @@ def main(server, eventHandler, params, tf = TerminalFilter):
return
llevel, debug_domains = bb.msg.constructLogOptions()
- server.runCommand(["setEventMask", server.getEventHandle(), llevel, debug_domains, _evt_list])
+ try:
+ server.runCommand(["setEventMask", server.getEventHandle(), llevel, debug_domains, _evt_list])
+ except (BrokenPipeError, EOFError) as e:
+ # bitbake-server comms failure
+ logger.fatal("Attempting to set event mask: %s", e)
+ return 1
# The logging_tree module is *extremely* helpful in debugging logging
# domains. Uncomment here to dump the logging tree when bitbake starts
@@ -576,7 +604,11 @@ def main(server, eventHandler, params, tf = TerminalFilter):
universe = False
if not params.observe_only:
- params.updateFromServer(server)
+ try:
+ params.updateFromServer(server)
+ except Exception as e:
+ logger.fatal("Fetching command line: %s", e)
+ return 1
cmdline = params.parseActions()
if not cmdline:
print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
@@ -587,7 +619,12 @@ def main(server, eventHandler, params, tf = TerminalFilter):
if cmdline['action'][0] == "buildTargets" and "universe" in cmdline['action'][1]:
universe = True
- ret, error = server.runCommand(cmdline['action'])
+ try:
+ ret, error = server.runCommand(cmdline['action'])
+ except (BrokenPipeError, EOFError) as e:
+ # bitbake-server comms failure
+ logger.fatal("Command '{}' failed: %s".format(cmdline), e)
+ return 1
if error:
logger.error("Command '%s' failed: %s" % (cmdline, error))
return 1
@@ -605,26 +642,40 @@ def main(server, eventHandler, params, tf = TerminalFilter):
warnings = 0
taskfailures = []
- printinterval = 5000
- lastprint = time.time()
+ printintervaldelta = 10 * 60 # 10 minutes
+ printinterval = printintervaldelta
+ pinginterval = 1 * 60 # 1 minute
+ lastevent = lastprint = time.time()
termfilter = tf(main, helper, console_handlers, params.options.quiet)
atexit.register(termfilter.finish)
- while True:
+ # shutdown levels
+ # 0 - normal operation
+ # 1 - no new task execution, let current running tasks finish
+ # 2 - interrupting currently executing tasks
+ # 3 - we're done, exit
+ while main.shutdown < 3:
try:
if (lastprint + printinterval) <= time.time():
termfilter.keepAlive(printinterval)
- printinterval += 5000
+ printinterval += printintervaldelta
event = eventHandler.waitEvent(0)
if event is None:
- if main.shutdown > 1:
- break
+ if (lastevent + pinginterval) <= time.time():
+ ret, error = server.runCommand(["ping"])
+ if error or not ret:
+ termfilter.clearFooter()
+ print("No reply after pinging server (%s, %s), exiting." % (str(error), str(ret)))
+ return_value = 3
+ main.shutdown = 3
+ lastevent = time.time()
if not parseprogress:
termfilter.updateFooter()
event = eventHandler.waitEvent(0.25)
if event is None:
continue
+ lastevent = time.time()
helper.eventHandler(event)
if isinstance(event, bb.runqueue.runQueueExitWait):
if not main.shutdown:
@@ -646,8 +697,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
if isinstance(event, logging.LogRecord):
lastprint = time.time()
- printinterval = 5000
- if event.levelno >= bb.msg.BBLogFormatter.ERROR:
+ printinterval = printintervaldelta
+ if event.levelno >= bb.msg.BBLogFormatter.ERRORONCE:
errors = errors + 1
return_value = 1
elif event.levelno == bb.msg.BBLogFormatter.WARNING:
@@ -661,10 +712,10 @@ def main(server, eventHandler, params, tf = TerminalFilter):
continue
# Prefix task messages with recipe/task
- if event.taskpid in helper.pidmap and event.levelno != bb.msg.BBLogFormatter.PLAIN:
+ if event.taskpid in helper.pidmap and event.levelno not in [bb.msg.BBLogFormatter.PLAIN, bb.msg.BBLogFormatter.WARNONCE, bb.msg.BBLogFormatter.ERRORONCE]:
taskinfo = helper.running_tasks[helper.pidmap[event.taskpid]]
event.msg = taskinfo['title'] + ': ' + event.msg
- if hasattr(event, 'fn'):
+ if hasattr(event, 'fn') and event.levelno not in [bb.msg.BBLogFormatter.WARNONCE, bb.msg.BBLogFormatter.ERRORONCE]:
event.msg = event.fn + ': ' + event.msg
logging.getLogger(event.name).handle(event)
continue
@@ -729,15 +780,15 @@ def main(server, eventHandler, params, tf = TerminalFilter):
if event.error:
errors = errors + 1
logger.error(str(event))
- main.shutdown = 2
+ main.shutdown = 3
continue
if isinstance(event, bb.command.CommandExit):
if not return_value:
return_value = event.exitcode
- main.shutdown = 2
+ main.shutdown = 3
continue
if isinstance(event, (bb.command.CommandCompleted, bb.cooker.CookerExit)):
- main.shutdown = 2
+ main.shutdown = 3
continue
if isinstance(event, bb.event.MultipleProviders):
logger.info(str(event))
@@ -753,7 +804,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
continue
if isinstance(event, bb.runqueue.sceneQueueTaskStarted):
- logger.info("Running setscene task %d of %d (%s)" % (event.stats.completed + event.stats.active + event.stats.failed + 1, event.stats.total, event.taskstring))
+ logger.info("Running setscene task %d of %d (%s)" % (event.stats.setscene_covered + event.stats.setscene_active + event.stats.setscene_notcovered + 1, event.stats.setscene_total, event.taskstring))
continue
if isinstance(event, bb.runqueue.runQueueTaskStarted):
@@ -822,15 +873,26 @@ def main(server, eventHandler, params, tf = TerminalFilter):
logger.error("Unknown event: %s", event)
+ except (BrokenPipeError, EOFError) as e:
+ # bitbake-server comms failure, don't attempt further comms and exit
+ logger.fatal("Executing event: %s", e)
+ return_value = 1
+ errors = errors + 1
+ main.shutdown = 3
except EnvironmentError as ioerror:
termfilter.clearFooter()
# ignore interrupted io
if ioerror.args[0] == 4:
continue
sys.stderr.write(str(ioerror))
- if not params.observe_only:
- _, error = server.runCommand(["stateForceShutdown"])
main.shutdown = 2
+ if not params.observe_only:
+ try:
+ _, error = server.runCommand(["stateForceShutdown"])
+ except (BrokenPipeError, EOFError) as e:
+ # bitbake-server comms failure, don't attempt further comms and exit
+ logger.fatal("Unable to force shutdown: %s", e)
+ main.shutdown = 3
except KeyboardInterrupt:
termfilter.clearFooter()
if params.observe_only:
@@ -839,9 +901,13 @@ def main(server, eventHandler, params, tf = TerminalFilter):
def state_force_shutdown():
print("\nSecond Keyboard Interrupt, stopping...\n")
- _, error = server.runCommand(["stateForceShutdown"])
- if error:
- logger.error("Unable to cleanly stop: %s" % error)
+ try:
+ _, error = server.runCommand(["stateForceShutdown"])
+ if error:
+ logger.error("Unable to cleanly stop: %s" % error)
+ except (BrokenPipeError, EOFError) as e:
+ # bitbake-server comms failure
+ logger.fatal("Unable to cleanly stop: %s", e)
if not params.observe_only and main.shutdown == 1:
state_force_shutdown()
@@ -854,17 +920,24 @@ def main(server, eventHandler, params, tf = TerminalFilter):
_, error = server.runCommand(["stateShutdown"])
if error:
logger.error("Unable to cleanly shutdown: %s" % error)
+ except (BrokenPipeError, EOFError) as e:
+ # bitbake-server comms failure
+ logger.fatal("Unable to cleanly shutdown: %s", e)
except KeyboardInterrupt:
state_force_shutdown()
main.shutdown = main.shutdown + 1
- pass
except Exception as e:
import traceback
sys.stderr.write(traceback.format_exc())
- if not params.observe_only:
- _, error = server.runCommand(["stateForceShutdown"])
main.shutdown = 2
+ if not params.observe_only:
+ try:
+ _, error = server.runCommand(["stateForceShutdown"])
+ except (BrokenPipeError, EOFError) as e:
+ # bitbake-server comms failure, don't attempt further comms and exit
+ logger.fatal("Unable to force shutdown: %s", e)
+ main.shudown = 3
return_value = 1
try:
termfilter.clearFooter()
@@ -875,11 +948,11 @@ def main(server, eventHandler, params, tf = TerminalFilter):
for failure in taskfailures:
summary += "\n %s" % failure
if warnings:
- summary += pluralise("\nSummary: There was %s WARNING message shown.",
- "\nSummary: There were %s WARNING messages shown.", warnings)
+ summary += pluralise("\nSummary: There was %s WARNING message.",
+ "\nSummary: There were %s WARNING messages.", warnings)
if return_value and errors:
- summary += pluralise("\nSummary: There was %s ERROR message shown, returning a non-zero exit code.",
- "\nSummary: There were %s ERROR messages shown, returning a non-zero exit code.", errors)
+ summary += pluralise("\nSummary: There was %s ERROR message, returning a non-zero exit code.",
+ "\nSummary: There were %s ERROR messages, returning a non-zero exit code.", errors)
if summary and params.options.quiet == 0:
print(summary)
diff --git a/lib/bb/ui/ncurses.py b/lib/bb/ui/ncurses.py
index cf1c876a5..18a706547 100644
--- a/lib/bb/ui/ncurses.py
+++ b/lib/bb/ui/ncurses.py
@@ -227,6 +227,9 @@ class NCursesUI:
shutdown = 0
try:
+ if not params.observe_only:
+ params.updateToServer(server, os.environ.copy())
+
params.updateFromServer(server)
cmdline = params.parseActions()
if not cmdline:
diff --git a/lib/bb/ui/taskexp.py b/lib/bb/ui/taskexp.py
index 2b246710c..bedfd69b0 100644
--- a/lib/bb/ui/taskexp.py
+++ b/lib/bb/ui/taskexp.py
@@ -8,6 +8,7 @@
#
import sys
+import traceback
try:
import gi
@@ -176,7 +177,7 @@ class gtkthread(threading.Thread):
quit = threading.Event()
def __init__(self, shutdown):
threading.Thread.__init__(self)
- self.setDaemon(True)
+ self.daemon = True
self.shutdown = shutdown
if not Gtk.init_check()[0]:
sys.stderr.write("Gtk+ init failed. Make sure DISPLAY variable is set.\n")
@@ -196,6 +197,7 @@ def main(server, eventHandler, params):
gtkgui.start()
try:
+ params.updateToServer(server, os.environ.copy())
params.updateFromServer(server)
cmdline = params.parseActions()
if not cmdline:
@@ -218,6 +220,9 @@ def main(server, eventHandler, params):
except client.Fault as x:
print("XMLRPC Fault getting commandline:\n %s" % x)
return
+ except Exception as e:
+ print("Exception in startup:\n %s" % traceback.format_exc())
+ return
if gtkthread.quit.isSet():
return
diff --git a/lib/bb/ui/taskexp_ncurses.py b/lib/bb/ui/taskexp_ncurses.py
new file mode 100755
index 000000000..ea94a4987
--- /dev/null
+++ b/lib/bb/ui/taskexp_ncurses.py
@@ -0,0 +1,1511 @@
+#
+# BitBake Graphical ncurses-based Dependency Explorer
+# * Based on the GTK implementation
+# * Intended to run on any Linux host
+#
+# Copyright (C) 2007 Ross Burton
+# Copyright (C) 2007 - 2008 Richard Purdie
+# Copyright (C) 2022 - 2024 David Reyna
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+#
+# Execution example:
+# $ bitbake -g -u taskexp_ncurses zlib acl
+#
+# Self-test example (executes a script of GUI actions):
+# $ TASK_EXP_UNIT_TEST=1 bitbake -g -u taskexp_ncurses zlib acl
+# ...
+# $ echo $?
+# 0
+# $ TASK_EXP_UNIT_TEST=1 bitbake -g -u taskexp_ncurses zlib acl foo
+# ERROR: Nothing PROVIDES 'foo'. Close matches:
+# ofono
+# $ echo $?
+# 1
+#
+# Self-test with no terminal example (only tests dependency fetch from bitbake):
+# $ TASK_EXP_UNIT_TEST_NOTERM=1 bitbake -g -u taskexp_ncurses quilt
+# $ echo $?
+# 0
+#
+# Features:
+# * Ncurses is used for the presentation layer. Only the 'curses'
+# library is used (none of the extension libraries), plus only
+# one main screen is used (no sub-windows)
+# * Uses the 'generateDepTreeEvent' bitbake event to fetch the
+# dynamic dependency data based on passed recipes
+# * Computes and provides reverse dependencies
+# * Supports task sorting on:
+# (a) Task dependency order within each recipe
+# (b) Pure alphabetical order
+# (c) Provisions for third sort order (bitbake order?)
+# * The 'Filter' does a "*string*" wildcard filter on tasks in the
+# main window, dynamically re-ordering and re-centering the content
+# * A 'Print' function exports the selected task or its whole recipe
+# task set to the default file "taskdep.txt"
+# * Supports a progress bar for bitbake loads and file printing
+# * Line art for box drawing supported, ASCII art an alernative
+# * No horizontal scrolling support. Selected task's full name
+# shown in bottom bar
+# * Dynamically catches terminals that are (or become) too small
+# * Exception to insure return to normal terminal on errors
+# * Debugging support, self test option
+#
+
+import sys
+import traceback
+import curses
+import re
+import time
+
+# Bitbake server support
+import threading
+from xmlrpc import client
+import bb
+import bb.event
+
+# Dependency indexes (depends_model)
+(TYPE_DEP, TYPE_RDEP) = (0, 1)
+DEPENDS_TYPE = 0
+DEPENDS_TASK = 1
+DEPENDS_DEPS = 2
+# Task indexes (task_list)
+TASK_NAME = 0
+TASK_PRIMARY = 1
+TASK_SORT_ALPHA = 2
+TASK_SORT_DEPS = 3
+TASK_SORT_BITBAKE = 4
+# Sort options (default is SORT_DEPS)
+SORT_ALPHA = 0
+SORT_DEPS = 1
+SORT_BITBAKE_ENABLE = False # NOTE: future sort
+SORT_BITBAKE = 2
+sort_model = SORT_DEPS
+# Print options
+PRINT_MODEL_1 = 0
+PRINT_MODEL_2 = 1
+print_model = PRINT_MODEL_2
+print_file_name = "taskdep_print.log"
+print_file_backup_name = "taskdep_print_backup.log"
+is_printed = False
+is_filter = False
+
+# Standard (and backup) key mappings
+CHAR_NUL = 0 # Used as self-test nop char
+CHAR_BS_H = 8 # Alternate backspace key
+CHAR_TAB = 9
+CHAR_RETURN = 10
+CHAR_ESCAPE = 27
+CHAR_UP = ord('{') # Used as self-test ASCII char
+CHAR_DOWN = ord('}') # Used as self-test ASCII char
+
+# Color_pair IDs
+CURSES_NORMAL = 0
+CURSES_HIGHLIGHT = 1
+CURSES_WARNING = 2
+
+
+#################################################
+### Debugging support
+###
+
+verbose = False
+
+# Debug: message display slow-step through display update issues
+def alert(msg,screen):
+ if msg:
+ screen.addstr(0, 10, '[%-4s]' % msg)
+ screen.refresh();
+ curses.napms(2000)
+ else:
+ if do_line_art:
+ for i in range(10, 24):
+ screen.addch(0, i, curses.ACS_HLINE)
+ else:
+ screen.addstr(0, 10, '-' * 14)
+ screen.refresh();
+
+# Debug: display edge conditions on frame movements
+def debug_frame(nbox_ojb):
+ if verbose:
+ nbox_ojb.screen.addstr(0, 50, '[I=%2d,O=%2d,S=%3s,H=%2d,M=%4d]' % (
+ nbox_ojb.cursor_index,
+ nbox_ojb.cursor_offset,
+ nbox_ojb.scroll_offset,
+ nbox_ojb.inside_height,
+ len(nbox_ojb.task_list),
+ ))
+ nbox_ojb.screen.refresh();
+
+#
+# Unit test (assumes that 'quilt-native' is always present)
+#
+
+unit_test = os.environ.get('TASK_EXP_UNIT_TEST')
+unit_test_cmnds=[
+ '# Default selected task in primary box',
+ 'tst_selected=<TASK>.do_recipe_qa',
+ '# Default selected task in deps',
+ 'tst_entry=<TAB>',
+ 'tst_selected=',
+ '# Default selected task in rdeps',
+ 'tst_entry=<TAB>',
+ 'tst_selected=<TASK>.do_fetch',
+ "# Test 'select' back to primary box",
+ 'tst_entry=<CR>',
+ '#tst_entry=<DOWN>', # optional injected error
+ 'tst_selected=<TASK>.do_fetch',
+ '# Check filter',
+ 'tst_entry=/uilt-nativ/',
+ 'tst_selected=quilt-native.do_recipe_qa',
+ '# Check print',
+ 'tst_entry=p',
+ 'tst_printed=quilt-native.do_fetch',
+ '#tst_printed=quilt-foo.do_nothing', # optional injected error
+ '# Done!',
+ 'tst_entry=q',
+]
+unit_test_idx=0
+unit_test_command_chars=''
+unit_test_results=[]
+def unit_test_action(active_package):
+ global unit_test_idx
+ global unit_test_command_chars
+ global unit_test_results
+ ret = CHAR_NUL
+ if unit_test_command_chars:
+ ch = unit_test_command_chars[0]
+ unit_test_command_chars = unit_test_command_chars[1:]
+ time.sleep(0.5)
+ ret = ord(ch)
+ else:
+ line = unit_test_cmnds[unit_test_idx]
+ unit_test_idx += 1
+ line = re.sub('#.*', '', line).strip()
+ line = line.replace('<TASK>',active_package.primary[0])
+ line = line.replace('<TAB>','\t').replace('<CR>','\n')
+ line = line.replace('<UP>','{').replace('<DOWN>','}')
+ if not line: line = 'nop=nop'
+ cmnd,value = line.split('=')
+ if cmnd == 'tst_entry':
+ unit_test_command_chars = value
+ elif cmnd == 'tst_selected':
+ active_selected = active_package.get_selected()
+ if active_selected != value:
+ unit_test_results.append("ERROR:SELFTEST:expected '%s' but got '%s' (NOTE:bitbake may have changed)" % (value,active_selected))
+ ret = ord('Q')
+ else:
+ unit_test_results.append("Pass:SELFTEST:found '%s'" % (value))
+ elif cmnd == 'tst_printed':
+ result = os.system('grep %s %s' % (value,print_file_name))
+ if result:
+ unit_test_results.append("ERROR:PRINTTEST:expected '%s' in '%s'" % (value,print_file_name))
+ ret = ord('Q')
+ else:
+ unit_test_results.append("Pass:PRINTTEST:found '%s'" % (value))
+ # Return the action (CHAR_NUL for no action til next round)
+ return(ret)
+
+# Unit test without an interative terminal (e.g. ptest)
+unit_test_noterm = os.environ.get('TASK_EXP_UNIT_TEST_NOTERM')
+
+
+#################################################
+### Window frame rendering
+###
+### By default, use the normal line art. Since
+### these extended characters are not ASCII, one
+### must use the ncursus API to render them
+### The alternate ASCII line art set is optionally
+### available via the 'do_line_art' flag
+
+# By default, render frames using line art
+do_line_art = True
+
+# ASCII render set option
+CHAR_HBAR = '-'
+CHAR_VBAR = '|'
+CHAR_UL_CORNER = '/'
+CHAR_UR_CORNER = '\\'
+CHAR_LL_CORNER = '\\'
+CHAR_LR_CORNER = '/'
+
+# Box frame drawing with line-art
+def line_art_frame(box):
+ x = box.base_x
+ y = box.base_y
+ w = box.width
+ h = box.height + 1
+
+ if do_line_art:
+ for i in range(1, w - 1):
+ box.screen.addch(y, x + i, curses.ACS_HLINE, box.color)
+ box.screen.addch(y + h - 1, x + i, curses.ACS_HLINE, box.color)
+ body_line = "%s" % (' ' * (w - 2))
+ for i in range(1, h - 1):
+ box.screen.addch(y + i, x, curses.ACS_VLINE, box.color)
+ box.screen.addstr(y + i, x + 1, body_line, box.color)
+ box.screen.addch(y + i, x + w - 1, curses.ACS_VLINE, box.color)
+ box.screen.addch(y, x, curses.ACS_ULCORNER, box.color)
+ box.screen.addch(y, x + w - 1, curses.ACS_URCORNER, box.color)
+ box.screen.addch(y + h - 1, x, curses.ACS_LLCORNER, box.color)
+ box.screen.addch(y + h - 1, x + w - 1, curses.ACS_LRCORNER, box.color)
+ else:
+ top_line = "%s%s%s" % (CHAR_UL_CORNER,CHAR_HBAR * (w - 2),CHAR_UR_CORNER)
+ body_line = "%s%s%s" % (CHAR_VBAR,' ' * (w - 2),CHAR_VBAR)
+ bot_line = "%s%s%s" % (CHAR_UR_CORNER,CHAR_HBAR * (w - 2),CHAR_UL_CORNER)
+ tag_line = "%s%s%s" % ('[',CHAR_HBAR * (w - 2),']')
+ # Top bar
+ box.screen.addstr(y, x, top_line)
+ # Middle frame
+ for i in range(1, (h - 1)):
+ box.screen.addstr(y+i, x, body_line)
+ # Bottom bar
+ box.screen.addstr(y + (h - 1), x, bot_line)
+
+# Connect the separate boxes
+def line_art_fixup(box):
+ if do_line_art:
+ box.screen.addch(box.base_y+2, box.base_x, curses.ACS_LTEE, box.color)
+ box.screen.addch(box.base_y+2, box.base_x+box.width-1, curses.ACS_RTEE, box.color)
+
+
+#################################################
+### Ncurses box object : box frame object to display
+### and manage a sub-window's display elements
+### using basic ncurses
+###
+### Supports:
+### * Frame drawing, content (re)drawing
+### * Content scrolling via ArrowUp, ArrowDn, PgUp, PgDN,
+### * Highlighting for active selected item
+### * Content sorting based on selected sort model
+###
+
+class NBox():
+ def __init__(self, screen, label, primary, base_x, base_y, width, height):
+ # Box description
+ self.screen = screen
+ self.label = label
+ self.primary = primary
+ self.color = curses.color_pair(CURSES_NORMAL) if screen else None
+ # Box boundaries
+ self.base_x = base_x
+ self.base_y = base_y
+ self.width = width
+ self.height = height
+ # Cursor/scroll management
+ self.cursor_enable = False
+ self.cursor_index = 0 # Absolute offset
+ self.cursor_offset = 0 # Frame centric offset
+ self.scroll_offset = 0 # Frame centric offset
+ # Box specific content
+ # Format of each entry is [package_name,is_primary_recipe,alpha_sort_key,deps_sort_key]
+ self.task_list = []
+
+ @property
+ def inside_width(self):
+ return(self.width-2)
+
+ @property
+ def inside_height(self):
+ return(self.height-2)
+
+ # Populate the box's content, include the sort mappings and is_primary flag
+ def task_list_append(self,task_name,dep):
+ task_sort_alpha = task_name
+ task_sort_deps = dep.get_dep_sort(task_name)
+ is_primary = False
+ for primary in self.primary:
+ if task_name.startswith(primary+'.'):
+ is_primary = True
+ if SORT_BITBAKE_ENABLE:
+ task_sort_bitbake = dep.get_bb_sort(task_name)
+ self.task_list.append([task_name,is_primary,task_sort_alpha,task_sort_deps,task_sort_bitbake])
+ else:
+ self.task_list.append([task_name,is_primary,task_sort_alpha,task_sort_deps])
+
+ def reset(self):
+ self.task_list = []
+ self.cursor_index = 0 # Absolute offset
+ self.cursor_offset = 0 # Frame centric offset
+ self.scroll_offset = 0 # Frame centric offset
+
+ # Sort the box's content based on the current sort model
+ def sort(self):
+ if SORT_ALPHA == sort_model:
+ self.task_list.sort(key = lambda x: x[TASK_SORT_ALPHA])
+ elif SORT_DEPS == sort_model:
+ self.task_list.sort(key = lambda x: x[TASK_SORT_DEPS])
+ elif SORT_BITBAKE == sort_model:
+ self.task_list.sort(key = lambda x: x[TASK_SORT_BITBAKE])
+
+ # The target package list (to hightlight), from the command line
+ def set_primary(self,primary):
+ self.primary = primary
+
+ # Draw the box's outside frame
+ def draw_frame(self):
+ line_art_frame(self)
+ # Title
+ self.screen.addstr(self.base_y,
+ (self.base_x + (self.width//2))-((len(self.label)+2)//2),
+ '['+self.label+']')
+ self.screen.refresh()
+
+ # Draw the box's inside text content
+ def redraw(self):
+ task_list_len = len(self.task_list)
+ # Middle frame
+ body_line = "%s" % (' ' * (self.inside_width-1) )
+ for i in range(0,self.inside_height+1):
+ if i < (task_list_len + self.scroll_offset):
+ str_ctl = "%%-%ss" % (self.width-3)
+ # Safety assert
+ if (i + self.scroll_offset) >= task_list_len:
+ alert("REDRAW:%2d,%4d,%4d" % (i,self.scroll_offset,task_list_len),self.screen)
+ break
+
+ task_obj = self.task_list[i + self.scroll_offset]
+ task = task_obj[TASK_NAME][:self.inside_width-1]
+ task_primary = task_obj[TASK_PRIMARY]
+
+ if task_primary:
+ line = str_ctl % task[:self.inside_width-1]
+ self.screen.addstr(self.base_y+1+i, self.base_x+2, line, curses.A_BOLD)
+ else:
+ line = str_ctl % task[:self.inside_width-1]
+ self.screen.addstr(self.base_y+1+i, self.base_x+2, line)
+ else:
+ line = "%s" % (' ' * (self.inside_width-1) )
+ self.screen.addstr(self.base_y+1+i, self.base_x+2, line)
+ self.screen.refresh()
+
+ # Show the current selected task over the bottom of the frame
+ def show_selected(self,selected_task):
+ if not selected_task:
+ selected_task = self.get_selected()
+ tag_line = "%s%s%s" % ('[',CHAR_HBAR * (self.width-2),']')
+ self.screen.addstr(self.base_y + self.height, self.base_x, tag_line)
+ self.screen.addstr(self.base_y + self.height,
+ (self.base_x + (self.width//2))-((len(selected_task)+2)//2),
+ '['+selected_task+']')
+ self.screen.refresh()
+
+ # Load box with new table of content
+ def update_content(self,task_list):
+ self.task_list = task_list
+ if self.cursor_enable:
+ cursor_update(turn_on=False)
+ self.cursor_index = 0
+ self.cursor_offset = 0
+ self.scroll_offset = 0
+ self.redraw()
+ if self.cursor_enable:
+ cursor_update(turn_on=True)
+
+ # Manage the box's highlighted task and blinking cursor character
+ def cursor_on(self,is_on):
+ self.cursor_enable = is_on
+ self.cursor_update(is_on)
+
+ # High-light the current pointed package, normal for released packages
+ def cursor_update(self,turn_on=True):
+ str_ctl = "%%-%ss" % (self.inside_width-1)
+ try:
+ if len(self.task_list):
+ task_obj = self.task_list[self.cursor_index]
+ task = task_obj[TASK_NAME][:self.inside_width-1]
+ task_primary = task_obj[TASK_PRIMARY]
+ task_font = curses.A_BOLD if task_primary else 0
+ else:
+ task = ''
+ task_font = 0
+ except Exception as e:
+ alert("CURSOR_UPDATE:%s" % (e),self.screen)
+ return
+ if turn_on:
+ self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+1,">", curses.color_pair(CURSES_HIGHLIGHT) | curses.A_BLINK)
+ self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+2,str_ctl % task, curses.color_pair(CURSES_HIGHLIGHT) | task_font)
+ else:
+ self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+1," ")
+ self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+2,str_ctl % task, task_font)
+
+ # Down arrow
+ def line_down(self):
+ if len(self.task_list) <= (self.cursor_index+1):
+ return
+ self.cursor_update(turn_on=False)
+ self.cursor_index += 1
+ self.cursor_offset += 1
+ if self.cursor_offset > (self.inside_height):
+ self.cursor_offset -= 1
+ self.scroll_offset += 1
+ self.redraw()
+ self.cursor_update(turn_on=True)
+ debug_frame(self)
+
+ # Up arrow
+ def line_up(self):
+ if 0 > (self.cursor_index-1):
+ return
+ self.cursor_update(turn_on=False)
+ self.cursor_index -= 1
+ self.cursor_offset -= 1
+ if self.cursor_offset < 0:
+ self.cursor_offset += 1
+ self.scroll_offset -= 1
+ self.redraw()
+ self.cursor_update(turn_on=True)
+ debug_frame(self)
+
+ # Page down
+ def page_down(self):
+ max_task = len(self.task_list)-1
+ if max_task < self.inside_height:
+ return
+ self.cursor_update(turn_on=False)
+ self.cursor_index += 10
+ self.cursor_index = min(self.cursor_index,max_task)
+ self.cursor_offset = min(self.inside_height,self.cursor_index)
+ self.scroll_offset = self.cursor_index - self.cursor_offset
+ self.redraw()
+ self.cursor_update(turn_on=True)
+ debug_frame(self)
+
+ # Page up
+ def page_up(self):
+ max_task = len(self.task_list)-1
+ if max_task < self.inside_height:
+ return
+ self.cursor_update(turn_on=False)
+ self.cursor_index -= 10
+ self.cursor_index = max(self.cursor_index,0)
+ self.cursor_offset = max(0, self.inside_height - (max_task - self.cursor_index))
+ self.scroll_offset = self.cursor_index - self.cursor_offset
+ self.redraw()
+ self.cursor_update(turn_on=True)
+ debug_frame(self)
+
+ # Return the currently selected task name for this box
+ def get_selected(self):
+ if self.task_list:
+ return(self.task_list[self.cursor_index][TASK_NAME])
+ else:
+ return('')
+
+#################################################
+### The helper sub-windows
+###
+
+# Show persistent help at the top of the screen
+class HelpBarView(NBox):
+ def __init__(self, screen, label, primary, base_x, base_y, width, height):
+ super(HelpBarView, self).__init__(screen, label, primary, base_x, base_y, width, height)
+
+ def show_help(self,show):
+ self.screen.addstr(self.base_y,self.base_x, "%s" % (' ' * self.inside_width))
+ if show:
+ help = "Help='?' Filter='/' NextBox=<Tab> Select=<Enter> Print='p','P' Quit='q'"
+ bar_size = self.inside_width - 5 - len(help)
+ self.screen.addstr(self.base_y,self.base_x+((self.inside_width-len(help))//2), help)
+ self.screen.refresh()
+
+# Pop up a detailed Help box
+class HelpBoxView(NBox):
+ def __init__(self, screen, label, primary, base_x, base_y, width, height, dep):
+ super(HelpBoxView, self).__init__(screen, label, primary, base_x, base_y, width, height)
+ self.x_pos = 0
+ self.y_pos = 0
+ self.dep = dep
+
+ # Instantial the pop-up help box
+ def show_help(self,show):
+ self.x_pos = self.base_x + 4
+ self.y_pos = self.base_y + 2
+
+ def add_line(line):
+ if line:
+ self.screen.addstr(self.y_pos,self.x_pos,line)
+ self.y_pos += 1
+
+ # Gather some statisics
+ dep_count = 0
+ rdep_count = 0
+ for task_obj in self.dep.depends_model:
+ if TYPE_DEP == task_obj[DEPENDS_TYPE]:
+ dep_count += 1
+ elif TYPE_RDEP == task_obj[DEPENDS_TYPE]:
+ rdep_count += 1
+
+ self.draw_frame()
+ line_art_fixup(self.dep)
+ add_line("Quit : 'q' ")
+ add_line("Filter task names : '/'")
+ add_line("Tab to next box : <Tab>")
+ add_line("Select a task : <Enter>")
+ add_line("Print task's deps : 'p'")
+ add_line("Print recipe's deps : 'P'")
+ add_line(" -> '%s'" % print_file_name)
+ add_line("Sort toggle : 's'")
+ add_line(" %s Recipe inner-depends order" % ('->' if (SORT_DEPS == sort_model) else '- '))
+ add_line(" %s Alpha-numeric order" % ('->' if (SORT_ALPHA == sort_model) else '- '))
+ if SORT_BITBAKE_ENABLE:
+ add_line(" %s Bitbake order" % ('->' if (TASK_SORT_BITBAKE == sort_model) else '- '))
+ add_line("Alternate backspace : <CTRL-H>")
+ add_line("")
+ add_line("Primary recipes = %s" % ','.join(self.primary))
+ add_line("Task count = %4d" % len(self.dep.pkg_model))
+ add_line("Deps count = %4d" % dep_count)
+ add_line("RDeps count = %4d" % rdep_count)
+ add_line("")
+ self.screen.addstr(self.y_pos,self.x_pos+7,"<Press any key>", curses.color_pair(CURSES_HIGHLIGHT))
+ self.screen.refresh()
+ c = self.screen.getch()
+
+# Show a progress bar
+class ProgressView(NBox):
+ def __init__(self, screen, label, primary, base_x, base_y, width, height):
+ super(ProgressView, self).__init__(screen, label, primary, base_x, base_y, width, height)
+
+ def progress(self,title,current,max):
+ if title:
+ self.label = title
+ else:
+ title = self.label
+ if max <=0: max = 10
+ bar_size = self.width - 7 - len(title)
+ bar_done = int( (float(current)/float(max)) * float(bar_size) )
+ self.screen.addstr(self.base_y,self.base_x, " %s:[%s%s]" % (title,'*' * bar_done,' ' * (bar_size-bar_done)))
+ self.screen.refresh()
+ return(current+1)
+
+ def clear(self):
+ self.screen.addstr(self.base_y,self.base_x, "%s" % (' ' * self.width))
+ self.screen.refresh()
+
+# Implement a task filter bar
+class FilterView(NBox):
+ SEARCH_NOP = 0
+ SEARCH_GO = 1
+ SEARCH_CANCEL = 2
+
+ def __init__(self, screen, label, primary, base_x, base_y, width, height):
+ super(FilterView, self).__init__(screen, label, primary, base_x, base_y, width, height)
+ self.do_show = False
+ self.filter_str = ""
+
+ def clear(self,enable_show=True):
+ self.filter_str = ""
+
+ def show(self,enable_show=True):
+ self.do_show = enable_show
+ if self.do_show:
+ self.screen.addstr(self.base_y,self.base_x, "[ Filter: %-25s ] '/'=cancel, format='abc' " % self.filter_str[0:25])
+ else:
+ self.screen.addstr(self.base_y,self.base_x, "%s" % (' ' * self.width))
+ self.screen.refresh()
+
+ def show_prompt(self):
+ self.screen.addstr(self.base_y,self.base_x + 10 + len(self.filter_str), " ")
+ self.screen.addstr(self.base_y,self.base_x + 10 + len(self.filter_str), "")
+
+ # Keys specific to the filter box (start/stop filter keys are in the main loop)
+ def input(self,c,ch):
+ ret = self.SEARCH_GO
+ if c in (curses.KEY_BACKSPACE,CHAR_BS_H):
+ # Backspace
+ if self.filter_str:
+ self.filter_str = self.filter_str[0:-1]
+ self.show()
+ elif ((ch >= 'a') and (ch <= 'z')) or ((ch >= 'A') and (ch <= 'Z')) or ((ch >= '0') and (ch <= '9')) or (ch in (' ','_','.','-')):
+ # The isalnum() acts strangly with keypad(True), so explicit bounds
+ self.filter_str += ch
+ self.show()
+ else:
+ ret = self.SEARCH_NOP
+ return(ret)
+
+
+#################################################
+### The primary dependency windows
+###
+
+# The main list of package tasks
+class PackageView(NBox):
+ def __init__(self, screen, label, primary, base_x, base_y, width, height):
+ super(PackageView, self).__init__(screen, label, primary, base_x, base_y, width, height)
+
+ # Find and verticaly center a selected task (from filter or from dependent box)
+ # The 'task_filter_str' can be a full or a partial (filter) task name
+ def find(self,task_filter_str):
+ found = False
+ max = self.height-2
+ if not task_filter_str:
+ return(found)
+ for i,task_obj in enumerate(self.task_list):
+ task = task_obj[TASK_NAME]
+ if task.startswith(task_filter_str):
+ self.cursor_on(False)
+ self.cursor_index = i
+
+ # Position selected at vertical center
+ vcenter = self.inside_height // 2
+ if self.cursor_index <= vcenter:
+ self.scroll_offset = 0
+ self.cursor_offset = self.cursor_index
+ elif self.cursor_index >= (len(self.task_list) - vcenter - 1):
+ self.cursor_offset = self.inside_height-1
+ self.scroll_offset = self.cursor_index - self.cursor_offset
+ else:
+ self.cursor_offset = vcenter
+ self.scroll_offset = self.cursor_index - self.cursor_offset
+
+ self.redraw()
+ self.cursor_on(True)
+ found = True
+ break
+ return(found)
+
+# The view of dependent packages
+class PackageDepView(NBox):
+ def __init__(self, screen, label, primary, base_x, base_y, width, height):
+ super(PackageDepView, self).__init__(screen, label, primary, base_x, base_y, width, height)
+
+# The view of reverse-dependent packages
+class PackageReverseDepView(NBox):
+ def __init__(self, screen, label, primary, base_x, base_y, width, height):
+ super(PackageReverseDepView, self).__init__(screen, label, primary, base_x, base_y, width, height)
+
+
+#################################################
+### DepExplorer : The parent frame and object
+###
+
+class DepExplorer(NBox):
+ def __init__(self,screen):
+ title = "Task Dependency Explorer"
+ super(DepExplorer, self).__init__(screen, 'Task Dependency Explorer','',0,0,80,23)
+
+ self.screen = screen
+ self.pkg_model = []
+ self.depends_model = []
+ self.dep_sort_map = {}
+ self.bb_sort_map = {}
+ self.filter_str = ''
+ self.filter_prev = 'deadbeef'
+
+ if self.screen:
+ self.help_bar_view = HelpBarView(screen, "Help",'',1,1,79,1)
+ self.help_box_view = HelpBoxView(screen, "Help",'',0,2,40,20,self)
+ self.progress_view = ProgressView(screen, "Progress",'',2,1,76,1)
+ self.filter_view = FilterView(screen, "Filter",'',2,1,76,1)
+ self.package_view = PackageView(screen, "Package",'alpha', 0,2,40,20)
+ self.dep_view = PackageDepView(screen, "Dependencies",'beta',40,2,40,10)
+ self.reverse_view = PackageReverseDepView(screen, "Dependent Tasks",'gamma',40,13,40,9)
+ self.draw_frames()
+
+ # Draw this main window's frame and all sub-windows
+ def draw_frames(self):
+ self.draw_frame()
+ self.package_view.draw_frame()
+ self.dep_view.draw_frame()
+ self.reverse_view.draw_frame()
+ if is_filter:
+ self.filter_view.show(True)
+ self.filter_view.show_prompt()
+ else:
+ self.help_bar_view.show_help(True)
+ self.package_view.redraw()
+ self.dep_view.redraw()
+ self.reverse_view.redraw()
+ self.show_selected(self.package_view.get_selected())
+ line_art_fixup(self)
+
+ # Parse the bitbake dependency event object
+ def parse(self, depgraph):
+ for task in depgraph["tdepends"]:
+ self.pkg_model.insert(0, task)
+ for depend in depgraph["tdepends"][task]:
+ self.depends_model.insert (0, (TYPE_DEP, task, depend))
+ self.depends_model.insert (0, (TYPE_RDEP, depend, task))
+ if self.screen:
+ self.dep_sort_prep()
+
+ # Prepare the dependency sort order keys
+ # This method creates sort keys per recipe tasks in
+ # the order of each recipe's internal dependecies
+ # Method:
+ # Filter the tasks in dep order in dep_sort_map = {}
+ # (a) Find a task that has no dependecies
+ # Ignore non-recipe specific tasks
+ # (b) Add it to the sort mapping dict with
+ # key of "<task_group>_<order>"
+ # (c) Remove it as a dependency from the other tasks
+ # (d) Repeat till all tasks are mapped
+ # Use placeholders to insure each sub-dict is instantiated
+ def dep_sort_prep(self):
+ self.progress_view.progress('DepSort',0,4)
+ # Init the task base entries
+ self.progress_view.progress('DepSort',1,4)
+ dep_table = {}
+ bb_index = 0
+ for task in self.pkg_model:
+ # First define the incoming bitbake sort order
+ self.bb_sort_map[task] = "%04d" % (bb_index)
+ bb_index += 1
+ task_group = task[0:task.find('.')]
+ if task_group not in dep_table:
+ dep_table[task_group] = {}
+ dep_table[task_group]['-'] = {} # Placeholder
+ if task not in dep_table[task_group]:
+ dep_table[task_group][task] = {}
+ dep_table[task_group][task]['-'] = {} # Placeholder
+ # Add the task dependecy entries
+ self.progress_view.progress('DepSort',2,4)
+ for task_obj in self.depends_model:
+ if task_obj[DEPENDS_TYPE] != TYPE_DEP:
+ continue
+ task = task_obj[DEPENDS_TASK]
+ task_dep = task_obj[DEPENDS_DEPS]
+ task_group = task[0:task.find('.')]
+ # Only track depends within same group
+ if task_dep.startswith(task_group+'.'):
+ dep_table[task_group][task][task_dep] = 1
+ self.progress_view.progress('DepSort',3,4)
+ for task_group in dep_table:
+ dep_index = 0
+ # Whittle down the tasks of each group
+ this_pass = 1
+ do_loop = True
+ while (len(dep_table[task_group]) > 1) and do_loop:
+ this_pass += 1
+ is_change = False
+ delete_list = []
+ for task in dep_table[task_group]:
+ if '-' == task:
+ continue
+ if 1 == len(dep_table[task_group][task]):
+ is_change = True
+ # No more deps, so collect this task...
+ self.dep_sort_map[task] = "%s_%04d" % (task_group,dep_index)
+ dep_index += 1
+ # ... remove it from other lists as resolved ...
+ for dep_task in dep_table[task_group]:
+ if task in dep_table[task_group][dep_task]:
+ del dep_table[task_group][dep_task][task]
+ # ... and remove it from from the task group
+ delete_list.append(task)
+ for task in delete_list:
+ del dep_table[task_group][task]
+ if not is_change:
+ alert("ERROR:DEP_SIEVE_NO_CHANGE:%s" % task_group,self.screen)
+ do_loop = False
+ continue
+ self.progress_view.progress('',4,4)
+ self.progress_view.clear()
+ self.help_bar_view.show_help(True)
+ if len(self.dep_sort_map) != len(self.pkg_model):
+ alert("ErrorDepSort:%d/%d" % (len(self.dep_sort_map),len(self.pkg_model)),self.screen)
+
+ # Look up a dep sort order key
+ def get_dep_sort(self,key):
+ if key in self.dep_sort_map:
+ return(self.dep_sort_map[key])
+ else:
+ return(key)
+
+ # Look up a bitbake sort order key
+ def get_bb_sort(self,key):
+ if key in self.bb_sort_map:
+ return(self.bb_sort_map[key])
+ else:
+ return(key)
+
+ # Find the selected package in the main frame, update the dependency frames content accordingly
+ def select(self, package_name, only_update_dependents=False):
+ if not package_name:
+ package_name = self.package_view.get_selected()
+ # alert("SELECT:%s:" % package_name,self.screen)
+
+ if self.filter_str != self.filter_prev:
+ self.package_view.cursor_on(False)
+ # Fill of the main package task list using new filter
+ self.package_view.task_list = []
+ for package in self.pkg_model:
+ if self.filter_str:
+ if self.filter_str in package:
+ self.package_view.task_list_append(package,self)
+ else:
+ self.package_view.task_list_append(package,self)
+ self.package_view.sort()
+ self.filter_prev = self.filter_str
+
+ # Old position is lost, assert new position of previous task (if still filtered in)
+ self.package_view.cursor_index = 0
+ self.package_view.cursor_offset = 0
+ self.package_view.scroll_offset = 0
+ self.package_view.redraw()
+ self.package_view.cursor_on(True)
+
+ # Make sure the selected package is in view, with implicit redraw()
+ if (not only_update_dependents):
+ self.package_view.find(package_name)
+ # In case selected name change (i.e. filter removed previous)
+ package_name = self.package_view.get_selected()
+
+ # Filter the package's dependent list to the dependent view
+ self.dep_view.reset()
+ for package_def in self.depends_model:
+ if (package_def[DEPENDS_TYPE] == TYPE_DEP) and (package_def[DEPENDS_TASK] == package_name):
+ self.dep_view.task_list_append(package_def[DEPENDS_DEPS],self)
+ self.dep_view.sort()
+ self.dep_view.redraw()
+ # Filter the package's dependent list to the reverse dependent view
+ self.reverse_view.reset()
+ for package_def in self.depends_model:
+ if (package_def[DEPENDS_TYPE] == TYPE_RDEP) and (package_def[DEPENDS_TASK] == package_name):
+ self.reverse_view.task_list_append(package_def[DEPENDS_DEPS],self)
+ self.reverse_view.sort()
+ self.reverse_view.redraw()
+ self.show_selected(package_name)
+ self.screen.refresh()
+
+ # The print-to-file method
+ def print_deps(self,whole_group=False):
+ global is_printed
+ # Print the selected deptree(s) to a file
+ if not is_printed:
+ try:
+ # Move to backup any exiting file before first write
+ if os.path.isfile(print_file_name):
+ os.system('mv -f %s %s' % (print_file_name,print_file_backup_name))
+ except Exception as e:
+ alert(e,self.screen)
+ alert('',self.screen)
+ print_list = []
+ selected_task = self.package_view.get_selected()
+ if not selected_task:
+ return
+ if not whole_group:
+ print_list.append(selected_task)
+ else:
+ # Use the presorted task_group order from 'package_view'
+ task_group = selected_task[0:selected_task.find('.')+1]
+ for task_obj in self.package_view.task_list:
+ task = task_obj[TASK_NAME]
+ if task.startswith(task_group):
+ print_list.append(task)
+ with open(print_file_name, "a") as fd:
+ print_max = len(print_list)
+ print_count = 1
+ self.progress_view.progress('Write "%s"' % print_file_name,0,print_max)
+ for task in print_list:
+ print_count = self.progress_view.progress('',print_count,print_max)
+ self.select(task)
+ self.screen.refresh();
+ # Utilize the current print output model
+ if print_model == PRINT_MODEL_1:
+ print("=== Dependendency Snapshot ===",file=fd)
+ print(" = Package =",file=fd)
+ print(' '+task,file=fd)
+ # Fill in the matching dependencies
+ print(" = Dependencies =",file=fd)
+ for task_obj in self.dep_view.task_list:
+ print(' '+ task_obj[TASK_NAME],file=fd)
+ print(" = Dependent Tasks =",file=fd)
+ for task_obj in self.reverse_view.task_list:
+ print(' '+ task_obj[TASK_NAME],file=fd)
+ if print_model == PRINT_MODEL_2:
+ print("=== Dependendency Snapshot ===",file=fd)
+ dep_count = len(self.dep_view.task_list) - 1
+ for i,task_obj in enumerate(self.dep_view.task_list):
+ print('%s%s' % ("Dep =" if (i==dep_count) else " ",task_obj[TASK_NAME]),file=fd)
+ if not self.dep_view.task_list:
+ print('Dep =',file=fd)
+ print("Package=%s" % task,file=fd)
+ for i,task_obj in enumerate(self.reverse_view.task_list):
+ print('%s%s' % ("RDep =" if (i==0) else " ",task_obj[TASK_NAME]),file=fd)
+ if not self.reverse_view.task_list:
+ print('RDep =',file=fd)
+ curses.napms(2000)
+ self.progress_view.clear()
+ self.help_bar_view.show_help(True)
+ print('',file=fd)
+ # Restore display to original selected task
+ self.select(selected_task)
+ is_printed = True
+
+#################################################
+### Load bitbake data
+###
+
+def bitbake_load(server, eventHandler, params, dep, curses_off, screen):
+ global bar_len_old
+ bar_len_old = 0
+
+ # Support no screen
+ def progress(msg,count,max):
+ global bar_len_old
+ if screen:
+ dep.progress_view.progress(msg,count,max)
+ else:
+ if msg:
+ if bar_len_old:
+ bar_len_old = 0
+ print("\n")
+ print(f"{msg}: ({count} of {max})")
+ else:
+ bar_len = int((count*40)/max)
+ if bar_len_old != bar_len:
+ print(f"{'*' * (bar_len-bar_len_old)}",end='',flush=True)
+ bar_len_old = bar_len
+ def clear():
+ if screen:
+ dep.progress_view.clear()
+ def clear_curses(screen):
+ if screen:
+ curses_off(screen)
+
+ #
+ # Trigger bitbake "generateDepTreeEvent"
+ #
+
+ cmdline = ''
+ try:
+ params.updateToServer(server, os.environ.copy())
+ params.updateFromServer(server)
+ cmdline = params.parseActions()
+ if not cmdline:
+ clear_curses(screen)
+ print("ERROR: nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
+ return 1,cmdline
+ if 'msg' in cmdline and cmdline['msg']:
+ clear_curses(screen)
+ print('ERROR: ' + cmdline['msg'])
+ return 1,cmdline
+ cmdline = cmdline['action']
+ if not cmdline or cmdline[0] != "generateDotGraph":
+ clear_curses(screen)
+ print("ERROR: This UI requires the -g option")
+ return 1,cmdline
+ ret, error = server.runCommand(["generateDepTreeEvent", cmdline[1], cmdline[2]])
+ if error:
+ clear_curses(screen)
+ print("ERROR: running command '%s': %s" % (cmdline, error))
+ return 1,cmdline
+ elif not ret:
+ clear_curses(screen)
+ print("ERROR: running command '%s': returned %s" % (cmdline, ret))
+ return 1,cmdline
+ except client.Fault as x:
+ clear_curses(screen)
+ print("ERROR: XMLRPC Fault getting commandline:\n %s" % x)
+ return 1,cmdline
+ except Exception as e:
+ clear_curses(screen)
+ print("ERROR: in startup:\n %s" % traceback.format_exc())
+ return 1,cmdline
+
+ #
+ # Receive data from bitbake
+ #
+
+ progress_total = 0
+ load_bitbake = True
+ quit = False
+ try:
+ while load_bitbake:
+ try:
+ event = eventHandler.waitEvent(0.25)
+ if quit:
+ _, error = server.runCommand(["stateForceShutdown"])
+ clear_curses(screen)
+ if error:
+ print('Unable to cleanly stop: %s' % error)
+ break
+
+ if event is None:
+ continue
+
+ if isinstance(event, bb.event.CacheLoadStarted):
+ progress_total = event.total
+ progress('Loading Cache',0,progress_total)
+ continue
+
+ if isinstance(event, bb.event.CacheLoadProgress):
+ x = event.current
+ progress('',x,progress_total)
+ continue
+
+ if isinstance(event, bb.event.CacheLoadCompleted):
+ clear()
+ progress('Bitbake... ',1,2)
+ continue
+
+ if isinstance(event, bb.event.ParseStarted):
+ progress_total = event.total
+ progress('Processing recipes',0,progress_total)
+ if progress_total == 0:
+ continue
+
+ if isinstance(event, bb.event.ParseProgress):
+ x = event.current
+ progress('',x,progress_total)
+ continue
+
+ if isinstance(event, bb.event.ParseCompleted):
+ progress('Generating dependency tree',0,3)
+ continue
+
+ if isinstance(event, bb.event.DepTreeGenerated):
+ progress('Generating dependency tree',1,3)
+ dep.parse(event._depgraph)
+ progress('Generating dependency tree',2,3)
+
+ if isinstance(event, bb.command.CommandCompleted):
+ load_bitbake = False
+ progress('Generating dependency tree',3,3)
+ clear()
+ if screen:
+ dep.help_bar_view.show_help(True)
+ continue
+
+ if isinstance(event, bb.event.NoProvider):
+ clear_curses(screen)
+ print('ERROR: %s' % event)
+
+ _, error = server.runCommand(["stateShutdown"])
+ if error:
+ print('ERROR: Unable to cleanly shutdown: %s' % error)
+ return 1,cmdline
+
+ if isinstance(event, bb.command.CommandFailed):
+ clear_curses(screen)
+ print('ERROR: ' + str(event))
+ return event.exitcode,cmdline
+
+ if isinstance(event, bb.command.CommandExit):
+ clear_curses(screen)
+ return event.exitcode,cmdline
+
+ if isinstance(event, bb.cooker.CookerExit):
+ break
+
+ continue
+ except EnvironmentError as ioerror:
+ # ignore interrupted io
+ if ioerror.args[0] == 4:
+ pass
+ except KeyboardInterrupt:
+ if shutdown == 2:
+ clear_curses(screen)
+ print("\nThird Keyboard Interrupt, exit.\n")
+ break
+ if shutdown == 1:
+ clear_curses(screen)
+ print("\nSecond Keyboard Interrupt, stopping...\n")
+ _, error = server.runCommand(["stateForceShutdown"])
+ if error:
+ print('Unable to cleanly stop: %s' % error)
+ if shutdown == 0:
+ clear_curses(screen)
+ print("\nKeyboard Interrupt, closing down...\n")
+ _, error = server.runCommand(["stateShutdown"])
+ if error:
+ print('Unable to cleanly shutdown: %s' % error)
+ shutdown = shutdown + 1
+ pass
+ except Exception as e:
+ # Safe exit on error
+ clear_curses(screen)
+ print("Exception : %s" % e)
+ print("Exception in startup:\n %s" % traceback.format_exc())
+
+ return 0,cmdline
+
+#################################################
+### main
+###
+
+SCREEN_COL_MIN = 83
+SCREEN_ROW_MIN = 26
+
+def main(server, eventHandler, params):
+ global verbose
+ global sort_model
+ global print_model
+ global is_printed
+ global is_filter
+ global screen_too_small
+
+ shutdown = 0
+ screen_too_small = False
+ quit = False
+
+ # Unit test with no terminal?
+ if unit_test_noterm:
+ # Load bitbake, test that there is valid dependency data, then exit
+ screen = None
+ print("* UNIT TEST:START")
+ dep = DepExplorer(screen)
+ print("* UNIT TEST:BITBAKE FETCH")
+ ret,cmdline = bitbake_load(server, eventHandler, params, dep, None, screen)
+ if ret:
+ print("* UNIT TEST: BITBAKE FAILED")
+ return ret
+ # Test the acquired dependency data
+ quilt_native_deps = 0
+ quilt_native_rdeps = 0
+ quilt_deps = 0
+ quilt_rdeps = 0
+ for i,task_obj in enumerate(dep.depends_model):
+ if TYPE_DEP == task_obj[0]:
+ task = task_obj[1]
+ if task.startswith('quilt-native'):
+ quilt_native_deps += 1
+ elif task.startswith('quilt'):
+ quilt_deps += 1
+ elif TYPE_RDEP == task_obj[0]:
+ task = task_obj[1]
+ if task.startswith('quilt-native'):
+ quilt_native_rdeps += 1
+ elif task.startswith('quilt'):
+ quilt_rdeps += 1
+ # Print results
+ failed = False
+ if 0 < len(dep.depends_model):
+ print(f"Pass:Bitbake dependency count = {len(dep.depends_model)}")
+ else:
+ failed = True
+ print(f"FAIL:Bitbake dependency count = 0")
+ if quilt_native_deps:
+ print(f"Pass:Quilt-native depends count = {quilt_native_deps}")
+ else:
+ failed = True
+ print(f"FAIL:Quilt-native depends count = 0")
+ if quilt_native_rdeps:
+ print(f"Pass:Quilt-native rdepends count = {quilt_native_rdeps}")
+ else:
+ failed = True
+ print(f"FAIL:Quilt-native rdepends count = 0")
+ if quilt_deps:
+ print(f"Pass:Quilt depends count = {quilt_deps}")
+ else:
+ failed = True
+ print(f"FAIL:Quilt depends count = 0")
+ if quilt_rdeps:
+ print(f"Pass:Quilt rdepends count = {quilt_rdeps}")
+ else:
+ failed = True
+ print(f"FAIL:Quilt rdepends count = 0")
+ print("* UNIT TEST:STOP")
+ return failed
+
+ # Help method to dynamically test parent window too small
+ def check_screen_size(dep, active_package):
+ global screen_too_small
+ rows, cols = screen.getmaxyx()
+ if (rows >= SCREEN_ROW_MIN) and (cols >= SCREEN_COL_MIN):
+ if screen_too_small:
+ # Now big enough, remove error message and redraw screen
+ dep.draw_frames()
+ active_package.cursor_on(True)
+ screen_too_small = False
+ return True
+ # Test on App init
+ if not dep:
+ # Do not start this app if screen not big enough
+ curses.endwin()
+ print("")
+ print("ERROR(Taskexp_cli): Mininal screen size is %dx%d" % (SCREEN_COL_MIN,SCREEN_ROW_MIN))
+ print("Current screen is Cols=%s,Rows=%d" % (cols,rows))
+ return False
+ # First time window too small
+ if not screen_too_small:
+ active_package.cursor_on(False)
+ dep.screen.addstr(0,2,'[BIGGER WINDOW PLEASE]', curses.color_pair(CURSES_WARNING) | curses.A_BLINK)
+ screen_too_small = True
+ return False
+
+ # Helper method to turn off curses mode
+ def curses_off(screen):
+ if not screen: return
+ # Safe error exit
+ screen.keypad(False)
+ curses.echo()
+ curses.curs_set(1)
+ curses.endwin()
+
+ if unit_test_results:
+ print('\nUnit Test Results:')
+ for line in unit_test_results:
+ print(" %s" % line)
+
+ #
+ # Initialize the ncurse environment
+ #
+
+ screen = curses.initscr()
+ try:
+ if not check_screen_size(None, None):
+ exit(1)
+ try:
+ curses.start_color()
+ curses.use_default_colors();
+ curses.init_pair(0xFF, curses.COLOR_BLACK, curses.COLOR_WHITE);
+ curses.init_pair(CURSES_NORMAL, curses.COLOR_WHITE, curses.COLOR_BLACK)
+ curses.init_pair(CURSES_HIGHLIGHT, curses.COLOR_WHITE, curses.COLOR_BLUE)
+ curses.init_pair(CURSES_WARNING, curses.COLOR_WHITE, curses.COLOR_RED)
+ except:
+ curses.endwin()
+ print("")
+ print("ERROR(Taskexp_cli): Requires 256 colors. Please use this or the equivalent:")
+ print(" $ export TERM='xterm-256color'")
+ exit(1)
+
+ screen.keypad(True)
+ curses.noecho()
+ curses.curs_set(0)
+ screen.refresh();
+ except Exception as e:
+ # Safe error exit
+ curses_off(screen)
+ print("Exception : %s" % e)
+ print("Exception in startup:\n %s" % traceback.format_exc())
+ exit(1)
+
+ try:
+ #
+ # Instantiate the presentation layers
+ #
+
+ dep = DepExplorer(screen)
+
+ #
+ # Prepare bitbake
+ #
+
+ # Fetch bitbake dependecy data
+ ret,cmdline = bitbake_load(server, eventHandler, params, dep, curses_off, screen)
+ if ret: return ret
+
+ #
+ # Preset the views
+ #
+
+ # Cmdline example = ['generateDotGraph', ['acl', 'zlib'], 'build']
+ primary_packages = cmdline[1]
+ dep.package_view.set_primary(primary_packages)
+ dep.dep_view.set_primary(primary_packages)
+ dep.reverse_view.set_primary(primary_packages)
+ dep.help_box_view.set_primary(primary_packages)
+ dep.help_bar_view.show_help(True)
+ active_package = dep.package_view
+ active_package.cursor_on(True)
+ dep.select(primary_packages[0]+'.')
+ if unit_test:
+ alert('UNIT_TEST',screen)
+
+ # Help method to start/stop the filter feature
+ def filter_mode(new_filter_status):
+ global is_filter
+ if is_filter == new_filter_status:
+ # Ignore no changes
+ return
+ if not new_filter_status:
+ # Turn off
+ curses.curs_set(0)
+ #active_package.cursor_on(False)
+ active_package = dep.package_view
+ active_package.cursor_on(True)
+ is_filter = False
+ dep.help_bar_view.show_help(True)
+ dep.filter_str = ''
+ dep.select('')
+ else:
+ # Turn on
+ curses.curs_set(1)
+ dep.help_bar_view.show_help(False)
+ dep.filter_view.clear()
+ dep.filter_view.show(True)
+ dep.filter_view.show_prompt()
+ is_filter = True
+
+ #
+ # Main user loop
+ #
+
+ while not quit:
+ if is_filter:
+ dep.filter_view.show_prompt()
+ if unit_test:
+ c = unit_test_action(active_package)
+ else:
+ c = screen.getch()
+ ch = chr(c)
+
+ # Do not draw if window now too small
+ if not check_screen_size(dep,active_package):
+ continue
+
+ if verbose:
+ if c == CHAR_RETURN:
+ screen.addstr(0, 4, "|%3d,CR |" % (c))
+ else:
+ screen.addstr(0, 4, "|%3d,%3s|" % (c,chr(c)))
+
+ # pre-map alternate filter close keys
+ if is_filter and (c == CHAR_ESCAPE):
+ # Alternate exit from filter
+ ch = '/'
+ c = ord(ch)
+
+ # Filter and non-filter mode command keys
+ # https://docs.python.org/3/library/curses.html
+ if c in (curses.KEY_UP,CHAR_UP):
+ active_package.line_up()
+ if active_package == dep.package_view:
+ dep.select('',only_update_dependents=True)
+ elif c in (curses.KEY_DOWN,CHAR_DOWN):
+ active_package.line_down()
+ if active_package == dep.package_view:
+ dep.select('',only_update_dependents=True)
+ elif curses.KEY_PPAGE == c:
+ active_package.page_up()
+ if active_package == dep.package_view:
+ dep.select('',only_update_dependents=True)
+ elif curses.KEY_NPAGE == c:
+ active_package.page_down()
+ if active_package == dep.package_view:
+ dep.select('',only_update_dependents=True)
+ elif CHAR_TAB == c:
+ # Tab between boxes
+ active_package.cursor_on(False)
+ if active_package == dep.package_view:
+ active_package = dep.dep_view
+ elif active_package == dep.dep_view:
+ active_package = dep.reverse_view
+ else:
+ active_package = dep.package_view
+ active_package.cursor_on(True)
+ elif curses.KEY_BTAB == c:
+ # Shift-Tab reverse between boxes
+ active_package.cursor_on(False)
+ if active_package == dep.package_view:
+ active_package = dep.reverse_view
+ elif active_package == dep.reverse_view:
+ active_package = dep.dep_view
+ else:
+ active_package = dep.package_view
+ active_package.cursor_on(True)
+ elif (CHAR_RETURN == c):
+ # CR to select
+ selected = active_package.get_selected()
+ if selected:
+ active_package.cursor_on(False)
+ active_package = dep.package_view
+ filter_mode(False)
+ dep.select(selected)
+ else:
+ filter_mode(False)
+ dep.select(primary_packages[0]+'.')
+
+ elif '/' == ch: # Enter/exit dep.filter_view
+ if is_filter:
+ filter_mode(False)
+ else:
+ filter_mode(True)
+ elif is_filter:
+ # If in filter mode, re-direct all these other keys to the filter box
+ result = dep.filter_view.input(c,ch)
+ dep.filter_str = dep.filter_view.filter_str
+ dep.select('')
+
+ # Non-filter mode command keys
+ elif 'p' == ch:
+ dep.print_deps(whole_group=False)
+ elif 'P' == ch:
+ dep.print_deps(whole_group=True)
+ elif 'w' == ch:
+ # Toggle the print model
+ if print_model == PRINT_MODEL_1:
+ print_model = PRINT_MODEL_2
+ else:
+ print_model = PRINT_MODEL_1
+ elif 's' == ch:
+ # Toggle the sort model
+ if sort_model == SORT_DEPS:
+ sort_model = SORT_ALPHA
+ elif sort_model == SORT_ALPHA:
+ if SORT_BITBAKE_ENABLE:
+ sort_model = TASK_SORT_BITBAKE
+ else:
+ sort_model = SORT_DEPS
+ else:
+ sort_model = SORT_DEPS
+ active_package.cursor_on(False)
+ current_task = active_package.get_selected()
+ dep.package_view.sort()
+ dep.dep_view.sort()
+ dep.reverse_view.sort()
+ active_package = dep.package_view
+ active_package.cursor_on(True)
+ dep.select(current_task)
+ # Announce the new sort model
+ alert("SORT=%s" % ("ALPHA" if (sort_model == SORT_ALPHA) else "DEPS"),screen)
+ alert('',screen)
+
+ elif 'q' == ch:
+ quit = True
+ elif ch in ('h','?'):
+ dep.help_box_view.show_help(True)
+ dep.select(active_package.get_selected())
+
+ #
+ # Debugging commands
+ #
+
+ elif 'V' == ch:
+ verbose = not verbose
+ alert('Verbose=%s' % str(verbose),screen)
+ alert('',screen)
+ elif 'R' == ch:
+ screen.refresh()
+ elif 'B' == ch:
+ # Progress bar unit test
+ dep.progress_view.progress('Test',0,40)
+ curses.napms(1000)
+ dep.progress_view.progress('',10,40)
+ curses.napms(1000)
+ dep.progress_view.progress('',20,40)
+ curses.napms(1000)
+ dep.progress_view.progress('',30,40)
+ curses.napms(1000)
+ dep.progress_view.progress('',40,40)
+ curses.napms(1000)
+ dep.progress_view.clear()
+ dep.help_bar_view.show_help(True)
+ elif 'Q' == ch:
+ # Simulated error
+ curses_off(screen)
+ print('ERROR: simulated error exit')
+ return 1
+
+ # Safe exit
+ curses_off(screen)
+ except Exception as e:
+ # Safe exit on error
+ curses_off(screen)
+ print("Exception : %s" % e)
+ print("Exception in startup:\n %s" % traceback.format_exc())
+
+ # Reminder to pick up your printed results
+ if is_printed:
+ print("")
+ print("You have output ready!")
+ print(" * Your printed dependency file is: %s" % print_file_name)
+ print(" * Your previous results saved in: %s" % print_file_backup_name)
+ print("")
diff --git a/lib/bb/ui/toasterui.py b/lib/bb/ui/toasterui.py
index ec5bd4f10..6bd21f184 100644
--- a/lib/bb/ui/toasterui.py
+++ b/lib/bb/ui/toasterui.py
@@ -385,7 +385,7 @@ def main(server, eventHandler, params):
main.shutdown = 1
logger.info("ToasterUI build done, brbe: %s", brbe)
- continue
+ break
if isinstance(event, (bb.command.CommandCompleted,
bb.command.CommandFailed,
diff --git a/lib/bb/ui/uievent.py b/lib/bb/ui/uievent.py
index 8607d0523..c2f830d53 100644
--- a/lib/bb/ui/uievent.py
+++ b/lib/bb/ui/uievent.py
@@ -44,7 +44,7 @@ class BBUIEventQueue:
for count_tries in range(5):
ret = self.BBServer.registerEventHandler(self.host, self.port)
- if isinstance(ret, collections.Iterable):
+ if isinstance(ret, collections.abc.Iterable):
self.EventHandle, error = ret
else:
self.EventHandle = ret
@@ -65,35 +65,27 @@ class BBUIEventQueue:
self.server = server
self.t = threading.Thread()
- self.t.setDaemon(True)
+ self.t.daemon = True
self.t.run = self.startCallbackHandler
self.t.start()
def getEvent(self):
-
- self.eventQueueLock.acquire()
-
- if len(self.eventQueue) == 0:
- self.eventQueueLock.release()
- return None
-
- item = self.eventQueue.pop(0)
-
- if len(self.eventQueue) == 0:
- self.eventQueueNotify.clear()
-
- self.eventQueueLock.release()
- return item
+ with bb.utils.lock_timeout(self.eventQueueLock):
+ if not self.eventQueue:
+ return None
+ item = self.eventQueue.pop(0)
+ if not self.eventQueue:
+ self.eventQueueNotify.clear()
+ return item
def waitEvent(self, delay):
self.eventQueueNotify.wait(delay)
return self.getEvent()
def queue_event(self, event):
- self.eventQueueLock.acquire()
- self.eventQueue.append(event)
- self.eventQueueNotify.set()
- self.eventQueueLock.release()
+ with bb.utils.lock_timeout(self.eventQueueLock):
+ self.eventQueue.append(event)
+ self.eventQueueNotify.set()
def send_event(self, event):
self.queue_event(pickle.loads(event))
diff --git a/lib/bb/ui/uihelper.py b/lib/bb/ui/uihelper.py
index 48d808ae2..82913e0da 100644
--- a/lib/bb/ui/uihelper.py
+++ b/lib/bb/ui/uihelper.py
@@ -49,9 +49,11 @@ class BBUIHelper:
tid = event._fn + ":" + event._task
removetid(event.pid, tid)
self.failed_tasks.append( { 'title' : "%s %s" % (event._package, event._task)})
- elif isinstance(event, bb.runqueue.runQueueTaskStarted):
- self.tasknumber_current = event.stats.completed + event.stats.active + event.stats.failed + 1
+ elif isinstance(event, bb.runqueue.runQueueTaskStarted) or isinstance(event, bb.runqueue.sceneQueueTaskStarted):
+ self.tasknumber_current = event.stats.completed + event.stats.active + event.stats.failed
self.tasknumber_total = event.stats.total
+ self.setscene_current = event.stats.setscene_active + event.stats.setscene_covered + event.stats.setscene_notcovered
+ self.setscene_total = event.stats.setscene_total
self.needUpdate = True
elif isinstance(event, bb.build.TaskProgress):
if event.pid > 0 and event.pid in self.pidmap:
diff --git a/lib/bb/utils.py b/lib/bb/utils.py
index 6ba1d2a37..a4d8d2d1e 100644
--- a/lib/bb/utils.py
+++ b/lib/bb/utils.py
@@ -13,10 +13,12 @@ import errno
import logging
import bb
import bb.msg
+import locale
import multiprocessing
import fcntl
import importlib
-from importlib import machinery
+import importlib.machinery
+import importlib.util
import itertools
import subprocess
import glob
@@ -26,6 +28,11 @@ import errno
import signal
import collections
import copy
+import ctypes
+import random
+import socket
+import struct
+import tempfile
from subprocess import getstatusoutput
from contextlib import contextmanager
from ctypes import cdll
@@ -43,7 +50,7 @@ def clean_context():
def get_context():
return _context
-
+
def set_context(ctx):
_context = ctx
@@ -205,8 +212,8 @@ def explode_dep_versions2(s, *, sort=True):
inversion = True
# This list is based on behavior and supported comparisons from deb, opkg and rpm.
#
- # Even though =<, <<, ==, !=, =>, and >> may not be supported,
- # we list each possibly valid item.
+ # Even though =<, <<, ==, !=, =>, and >> may not be supported,
+ # we list each possibly valid item.
# The build system is responsible for validation of what it supports.
if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
lastcmp = i[0:2]
@@ -251,7 +258,7 @@ def explode_dep_versions(s):
"""
Take an RDEPENDS style string of format:
"DEPEND1 (optional version) DEPEND2 (optional version) ..."
- skip null value and items appeared in dependancy string multiple times
+ skip null value and items appeared in dependency string multiple times
and return a dictionary of dependencies and versions.
"""
r = explode_dep_versions2(s)
@@ -340,7 +347,7 @@ def _print_exception(t, value, tb, realfile, text, context):
exception = traceback.format_exception_only(t, value)
error.append('Error executing a python function in %s:\n' % realfile)
- # Strip 'us' from the stack (better_exec call) unless that was where the
+ # Strip 'us' from the stack (better_exec call) unless that was where the
# error came from
if tb.tb_next is not None:
tb = tb.tb_next
@@ -379,7 +386,7 @@ def _print_exception(t, value, tb, realfile, text, context):
error.append("Exception: %s" % ''.join(exception))
- # If the exception is from spwaning a task, let's be helpful and display
+ # If the exception is from spawning a task, let's be helpful and display
# the output (which hopefully includes stderr).
if isinstance(value, subprocess.CalledProcessError) and value.output:
error.append("Subprocess output:")
@@ -400,7 +407,7 @@ def better_exec(code, context, text = None, realfile = "<code>", pythonexception
code = better_compile(code, realfile, realfile)
try:
exec(code, get_context(), context)
- except (bb.BBHandledException, bb.parse.SkipRecipe, bb.data_smart.ExpansionError):
+ except (bb.BBHandledException, bb.parse.SkipRecipe, bb.data_smart.ExpansionError, bb.process.ExecutionError):
# Error already shown so passthrough, no need for traceback
raise
except Exception as e:
@@ -427,12 +434,14 @@ def better_eval(source, locals, extraglobals = None):
return eval(source, ctx, locals)
@contextmanager
-def fileslocked(files):
+def fileslocked(files, *args, **kwargs):
"""Context manager for locking and unlocking file locks."""
locks = []
if files:
for lockfile in files:
- locks.append(bb.utils.lockfile(lockfile))
+ l = bb.utils.lockfile(lockfile, *args, **kwargs)
+ if l is not None:
+ locks.append(l)
try:
yield
@@ -451,9 +460,16 @@ def lockfile(name, shared=False, retry=True, block=False):
consider the possibility of sending a signal to the process to break
out - at which point you want block=True rather than retry=True.
"""
+ basename = os.path.basename(name)
+ if len(basename) > 255:
+ root, ext = os.path.splitext(basename)
+ basename = root[:255 - len(ext)] + ext
+
dirname = os.path.dirname(name)
mkdirhier(dirname)
+ name = os.path.join(dirname, basename)
+
if not os.access(dirname, os.W_OK):
logger.error("Unable to acquire lock '%s', directory is not writable",
name)
@@ -487,7 +503,7 @@ def lockfile(name, shared=False, retry=True, block=False):
return lf
lf.close()
except OSError as e:
- if e.errno == errno.EACCES:
+ if e.errno == errno.EACCES or e.errno == errno.ENAMETOOLONG:
logger.error("Unable to acquire lock '%s', %s",
e.strerror, name)
sys.exit(1)
@@ -532,7 +548,12 @@ def md5_file(filename):
Return the hex string representation of the MD5 checksum of filename.
"""
import hashlib
- return _hasher(hashlib.md5(), filename)
+ try:
+ sig = hashlib.new('MD5', usedforsecurity=False)
+ except TypeError:
+ # Some configurations don't appear to support two arguments
+ sig = hashlib.new('MD5')
+ return _hasher(sig, filename)
def sha256_file(filename):
"""
@@ -583,11 +604,25 @@ def preserved_envvars():
v = [
'BBPATH',
'BB_PRESERVE_ENV',
- 'BB_ENV_WHITELIST',
- 'BB_ENV_EXTRAWHITE',
+ 'BB_ENV_PASSTHROUGH_ADDITIONS',
]
return v + preserved_envvars_exported()
+def check_system_locale():
+ """Make sure the required system locale are available and configured"""
+ default_locale = locale.getlocale(locale.LC_CTYPE)
+
+ try:
+ locale.setlocale(locale.LC_CTYPE, ("en_US", "UTF-8"))
+ except:
+ sys.exit("Please make sure locale 'en_US.UTF-8' is available on your system")
+ else:
+ locale.setlocale(locale.LC_CTYPE, default_locale)
+
+ if sys.getfilesystemencoding() != "utf-8":
+ sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\n"
+ "Python can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.")
+
def filter_environment(good_vars):
"""
Create a pristine environment for bitbake. This will remove variables that
@@ -615,21 +650,21 @@ def filter_environment(good_vars):
def approved_variables():
"""
- Determine and return the list of whitelisted variables which are approved
+ Determine and return the list of variables which are approved
to remain in the environment.
"""
if 'BB_PRESERVE_ENV' in os.environ:
return os.environ.keys()
approved = []
- if 'BB_ENV_WHITELIST' in os.environ:
- approved = os.environ['BB_ENV_WHITELIST'].split()
- approved.extend(['BB_ENV_WHITELIST'])
+ if 'BB_ENV_PASSTHROUGH' in os.environ:
+ approved = os.environ['BB_ENV_PASSTHROUGH'].split()
+ approved.extend(['BB_ENV_PASSTHROUGH'])
else:
approved = preserved_envvars()
- if 'BB_ENV_EXTRAWHITE' in os.environ:
- approved.extend(os.environ['BB_ENV_EXTRAWHITE'].split())
- if 'BB_ENV_EXTRAWHITE' not in approved:
- approved.extend(['BB_ENV_EXTRAWHITE'])
+ if 'BB_ENV_PASSTHROUGH_ADDITIONS' in os.environ:
+ approved.extend(os.environ['BB_ENV_PASSTHROUGH_ADDITIONS'].split())
+ if 'BB_ENV_PASSTHROUGH_ADDITIONS' not in approved:
+ approved.extend(['BB_ENV_PASSTHROUGH_ADDITIONS'])
return approved
def clean_environment():
@@ -683,8 +718,8 @@ def remove(path, recurse=False, ionice=False):
return
if recurse:
for name in glob.glob(path):
- if _check_unsafe_delete_path(path):
- raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path)
+ if _check_unsafe_delete_path(name):
+ raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % name)
# shutil.rmtree(name) would be ideal but its too slow
cmd = []
if ionice:
@@ -710,9 +745,9 @@ def prunedir(topdir, ionice=False):
# but thats possibly insane and suffixes is probably going to be small
#
def prune_suffix(var, suffixes, d):
- """
+ """
See if var ends with any of the suffixes listed and
- remove it if found
+ remove it if found
"""
for suffix in suffixes:
if suffix and var.endswith(suffix):
@@ -723,7 +758,8 @@ def mkdirhier(directory):
"""Create a directory like 'mkdir -p', but does not complain if
directory already exists like os.makedirs
"""
-
+ if '${' in str(directory):
+ bb.fatal("Directory name {} contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution.".format(directory))
try:
os.makedirs(directory)
except OSError as e:
@@ -742,7 +778,7 @@ def movefile(src, dest, newmtime = None, sstat = None):
if not sstat:
sstat = os.lstat(src)
except Exception as e:
- print("movefile: Stating source file failed...", e)
+ logger.warning("movefile: Stating source file failed...", e)
return None
destexists = 1
@@ -770,7 +806,7 @@ def movefile(src, dest, newmtime = None, sstat = None):
os.unlink(src)
return os.lstat(dest)
except Exception as e:
- print("movefile: failed to properly create symlink:", dest, "->", target, e)
+ logger.warning("movefile: failed to properly create symlink:", dest, "->", target, e)
return None
renamefailed = 1
@@ -787,7 +823,7 @@ def movefile(src, dest, newmtime = None, sstat = None):
except Exception as e:
if e.errno != errno.EXDEV:
# Some random error.
- print("movefile: Failed to move", src, "to", dest, e)
+ logger.warning("movefile: Failed to move", src, "to", dest, e)
return None
# Invalid cross-device-link 'bind' mounted or actually Cross-Device
@@ -799,13 +835,13 @@ def movefile(src, dest, newmtime = None, sstat = None):
bb.utils.rename(destpath + "#new", destpath)
didcopy = 1
except Exception as e:
- print('movefile: copy', src, '->', dest, 'failed.', e)
+ logger.warning('movefile: copy', src, '->', dest, 'failed.', e)
return None
else:
#we don't yet handle special, so we need to fall back to /bin/mv
a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
if a[0] != 0:
- print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
+ logger.warning("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
return None # failure
try:
if didcopy:
@@ -813,7 +849,7 @@ def movefile(src, dest, newmtime = None, sstat = None):
os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
os.unlink(src)
except Exception as e:
- print("movefile: Failed to chown/chmod/unlink", dest, e)
+ logger.warning("movefile: Failed to chown/chmod/unlink", dest, e)
return None
if newmtime:
@@ -965,13 +1001,16 @@ def umask(new_mask):
os.umask(current_mask)
def to_boolean(string, default=None):
- """
+ """
Check input string and return boolean value True/False/None
- depending upon the checks
+ depending upon the checks
"""
if not string:
return default
+ if isinstance(string, int):
+ return string != 0
+
normalized = string.lower()
if normalized in ("y", "yes", "1", "true"):
return True
@@ -1103,7 +1142,10 @@ def get_referenced_vars(start_expr, d):
def cpu_count():
- return multiprocessing.cpu_count()
+ try:
+ return len(os.sched_getaffinity(0))
+ except OSError:
+ return multiprocessing.cpu_count()
def nonblockingfd(fd):
fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
@@ -1178,7 +1220,7 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
variables: a list of variable names to look for. Functions
may also be specified, but must be specified with '()' at
the end of the name. Note that the function doesn't have
- any intrinsic understanding of _append, _prepend, _remove,
+ any intrinsic understanding of :append, :prepend, :remove,
or overrides, so these are considered as part of the name.
These values go into a regular expression, so regular
expression syntax is allowed.
@@ -1372,7 +1414,7 @@ def edit_metadata_file(meta_file, variables, varfunc):
return updated
-def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None):
+def edit_bblayers_conf(bblayers_conf, add, remove, prepend=None, edit_cb=None):
"""Edit bblayers.conf, adding and/or removing layers
Parameters:
bblayers_conf: path to bblayers.conf file to edit
@@ -1442,7 +1484,10 @@ def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None):
for addlayer in addlayers:
if addlayer not in bblayers:
updated = True
- bblayers.append(addlayer)
+ if prepend:
+ bblayers.insert(0,addlayer)
+ else:
+ bblayers.append(addlayer)
del addlayers[:]
if edit_cb:
@@ -1590,33 +1635,89 @@ def set_process_name(name):
except:
pass
+def enable_loopback_networking():
+ # From bits/ioctls.h
+ SIOCGIFFLAGS = 0x8913
+ SIOCSIFFLAGS = 0x8914
+ SIOCSIFADDR = 0x8916
+ SIOCSIFNETMASK = 0x891C
+
+ # if.h
+ IFF_UP = 0x1
+ IFF_RUNNING = 0x40
+
+ # bits/socket.h
+ AF_INET = 2
+
+ # char ifr_name[IFNAMSIZ=16]
+ ifr_name = struct.pack("@16s", b"lo")
+ def netdev_req(fd, req, data = b""):
+ # Pad and add interface name
+ data = ifr_name + data + (b'\x00' * (16 - len(data)))
+ # Return all data after interface name
+ return fcntl.ioctl(fd, req, data)[16:]
+
+ with socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_IP) as sock:
+ fd = sock.fileno()
+
+ # struct sockaddr_in ifr_addr { unsigned short family; uint16_t sin_port ; uint32_t in_addr; }
+ req = struct.pack("@H", AF_INET) + struct.pack("=H4B", 0, 127, 0, 0, 1)
+ netdev_req(fd, SIOCSIFADDR, req)
+
+ # short ifr_flags
+ flags = struct.unpack_from('@h', netdev_req(fd, SIOCGIFFLAGS))[0]
+ flags |= IFF_UP | IFF_RUNNING
+ netdev_req(fd, SIOCSIFFLAGS, struct.pack('@h', flags))
+
+ # struct sockaddr_in ifr_netmask
+ req = struct.pack("@H", AF_INET) + struct.pack("=H4B", 0, 255, 0, 0, 0)
+ netdev_req(fd, SIOCSIFNETMASK, req)
+
+def disable_network(uid=None, gid=None):
+ """
+ Disable networking in the current process if the kernel supports it, else
+ just return after logging to debug. To do this we need to create a new user
+ namespace, then map back to the original uid/gid.
+ """
+ libc = ctypes.CDLL('libc.so.6')
+
+ # From sched.h
+ # New user namespace
+ CLONE_NEWUSER = 0x10000000
+ # New network namespace
+ CLONE_NEWNET = 0x40000000
+
+ if uid is None:
+ uid = os.getuid()
+ if gid is None:
+ gid = os.getgid()
+
+ ret = libc.unshare(CLONE_NEWNET | CLONE_NEWUSER)
+ if ret != 0:
+ logger.debug("System doesn't support disabling network without admin privs")
+ return
+ with open("/proc/self/uid_map", "w") as f:
+ f.write("%s %s 1" % (uid, uid))
+ with open("/proc/self/setgroups", "w") as f:
+ f.write("deny")
+ with open("/proc/self/gid_map", "w") as f:
+ f.write("%s %s 1" % (gid, gid))
+
def export_proxies(d):
+ from bb.fetch2 import get_fetcher_environment
""" export common proxies variables from datastore to environment """
- import os
-
- variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY',
- 'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY',
- 'GIT_PROXY_COMMAND']
- exported = False
-
- for v in variables:
- if v in os.environ.keys():
- exported = True
- else:
- v_proxy = d.getVar(v)
- if v_proxy is not None:
- os.environ[v] = v_proxy
- exported = True
-
- return exported
-
+ newenv = get_fetcher_environment(d)
+ for v in newenv:
+ os.environ[v] = newenv[v]
def load_plugins(logger, plugins, pluginpath):
def load_plugin(name):
logger.debug('Loading plugin %s' % name)
spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
if spec:
- return spec.loader.load_module()
+ mod = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(mod)
+ return mod
logger.debug('Loading plugins from %s...' % pluginpath)
@@ -1681,3 +1782,90 @@ def rename(src, dst):
shutil.move(src, dst)
else:
raise err
+
+@contextmanager
+def environment(**envvars):
+ """
+ Context manager to selectively update the environment with the specified mapping.
+ """
+ backup = dict(os.environ)
+ try:
+ os.environ.update(envvars)
+ yield
+ finally:
+ for var in envvars:
+ if var in backup:
+ os.environ[var] = backup[var]
+ elif var in os.environ:
+ del os.environ[var]
+
+def is_local_uid(uid=''):
+ """
+ Check whether uid is a local one or not.
+ Can't use pwd module since it gets all UIDs, not local ones only.
+ """
+ if not uid:
+ uid = os.getuid()
+ with open('/etc/passwd', 'r') as f:
+ for line in f:
+ line_split = line.split(':')
+ if len(line_split) < 3:
+ continue
+ if str(uid) == line_split[2]:
+ return True
+ return False
+
+def mkstemp(suffix=None, prefix=None, dir=None, text=False):
+ """
+ Generates a unique filename, independent of time.
+
+ mkstemp() in glibc (at least) generates unique file names based on the
+ current system time. When combined with highly parallel builds, and
+ operating over NFS (e.g. shared sstate/downloads) this can result in
+ conflicts and race conditions.
+
+ This function adds additional entropy to the file name so that a collision
+ is independent of time and thus extremely unlikely.
+ """
+ entropy = "".join(random.choices("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890", k=20))
+ if prefix:
+ prefix = prefix + entropy
+ else:
+ prefix = tempfile.gettempprefix() + entropy
+ return tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=dir, text=text)
+
+def path_is_descendant(descendant, ancestor):
+ """
+ Returns True if the path `descendant` is a descendant of `ancestor`
+ (including being equivalent to `ancestor` itself). Otherwise returns False.
+ Correctly accounts for symlinks, bind mounts, etc. by using
+ os.path.samestat() to compare paths
+
+ May raise any exception that os.stat() raises
+ """
+
+ ancestor_stat = os.stat(ancestor)
+
+ # Recurse up each directory component of the descendant to see if it is
+ # equivalent to the ancestor
+ check_dir = os.path.abspath(descendant).rstrip("/")
+ while check_dir:
+ check_stat = os.stat(check_dir)
+ if os.path.samestat(check_stat, ancestor_stat):
+ return True
+ check_dir = os.path.dirname(check_dir).rstrip("/")
+
+ return False
+
+# If we don't have a timeout of some kind and a process/thread exits badly (for example
+# OOM killed) and held a lock, we'd just hang in the lock futex forever. It is better
+# we exit at some point than hang. 5 minutes with no progress means we're probably deadlocked.
+@contextmanager
+def lock_timeout(lock):
+ held = lock.acquire(timeout=5*60)
+ try:
+ if not held:
+ os._exit(1)
+ yield held
+ finally:
+ lock.release()
diff --git a/lib/bb/xattr.py b/lib/bb/xattr.py
new file mode 100755
index 000000000..7b634944a
--- /dev/null
+++ b/lib/bb/xattr.py
@@ -0,0 +1,126 @@
+#! /usr/bin/env python3
+#
+# Copyright 2023 by Garmin Ltd. or its subsidiaries
+#
+# SPDX-License-Identifier: MIT
+
+import sys
+import ctypes
+import os
+import errno
+
+libc = ctypes.CDLL("libc.so.6", use_errno=True)
+fsencoding = sys.getfilesystemencoding()
+
+
+libc.listxattr.argtypes = [ctypes.c_char_p, ctypes.c_char_p, ctypes.c_size_t]
+libc.llistxattr.argtypes = [ctypes.c_char_p, ctypes.c_char_p, ctypes.c_size_t]
+
+
+def listxattr(path, follow=True):
+ func = libc.listxattr if follow else libc.llistxattr
+
+ os_path = os.fsencode(path)
+
+ while True:
+ length = func(os_path, None, 0)
+
+ if length < 0:
+ err = ctypes.get_errno()
+ raise OSError(err, os.strerror(err), str(path))
+
+ if length == 0:
+ return []
+
+ arr = ctypes.create_string_buffer(length)
+
+ read_length = func(os_path, arr, length)
+ if read_length != length:
+ # Race!
+ continue
+
+ return [a.decode(fsencoding) for a in arr.raw.split(b"\x00") if a]
+
+
+libc.getxattr.argtypes = [
+ ctypes.c_char_p,
+ ctypes.c_char_p,
+ ctypes.c_char_p,
+ ctypes.c_size_t,
+]
+libc.lgetxattr.argtypes = [
+ ctypes.c_char_p,
+ ctypes.c_char_p,
+ ctypes.c_char_p,
+ ctypes.c_size_t,
+]
+
+
+def getxattr(path, name, follow=True):
+ func = libc.getxattr if follow else libc.lgetxattr
+
+ os_path = os.fsencode(path)
+ os_name = os.fsencode(name)
+
+ while True:
+ length = func(os_path, os_name, None, 0)
+
+ if length < 0:
+ err = ctypes.get_errno()
+ if err == errno.ENODATA:
+ return None
+ raise OSError(err, os.strerror(err), str(path))
+
+ if length == 0:
+ return ""
+
+ arr = ctypes.create_string_buffer(length)
+
+ read_length = func(os_path, os_name, arr, length)
+ if read_length != length:
+ # Race!
+ continue
+
+ return arr.raw
+
+
+def get_all_xattr(path, follow=True):
+ attrs = {}
+
+ names = listxattr(path, follow)
+
+ for name in names:
+ value = getxattr(path, name, follow)
+ if value is None:
+ # This can happen if a value is erased after listxattr is called,
+ # so ignore it
+ continue
+ attrs[name] = value
+
+ return attrs
+
+
+def main():
+ import argparse
+ from pathlib import Path
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("path", help="File Path", type=Path)
+
+ args = parser.parse_args()
+
+ attrs = get_all_xattr(args.path)
+
+ for name, value in attrs.items():
+ try:
+ value = value.decode(fsencoding)
+ except UnicodeDecodeError:
+ pass
+
+ print(f"{name} = {value}")
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/lib/bblayers/__init__.py b/lib/bblayers/__init__.py
index 4e7c09da0..78efd2975 100644
--- a/lib/bblayers/__init__.py
+++ b/lib/bblayers/__init__.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/lib/bblayers/action.py b/lib/bblayers/action.py
index f05f5d330..57d7195cd 100644
--- a/lib/bblayers/action.py
+++ b/lib/bblayers/action.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -9,6 +11,7 @@ import shutil
import sys
import tempfile
+from bb.cookerdata import findTopdir
import bb.utils
from bblayers.common import LayerPlugin
@@ -35,7 +38,7 @@ class ActionPlugin(LayerPlugin):
sys.stderr.write("Specified layer directory %s doesn't contain a conf/layer.conf file\n" % layerdir)
return 1
- bblayers_conf = os.path.join('conf', 'bblayers.conf')
+ bblayers_conf = os.path.join(findTopdir(),'conf', 'bblayers.conf')
if not os.path.exists(bblayers_conf):
sys.stderr.write("Unable to find bblayers.conf\n")
return 1
@@ -46,14 +49,16 @@ class ActionPlugin(LayerPlugin):
shutil.copy2(bblayers_conf, backup)
try:
- notadded, _ = bb.utils.edit_bblayers_conf(bblayers_conf, layerdirs, None)
+ notadded, _ = bb.utils.edit_bblayers_conf(bblayers_conf, layerdirs, None, args.prepend)
+ self.tinfoil.modified_files()
if not (args.force or notadded):
try:
self.tinfoil.run_command('parseConfiguration')
except (bb.tinfoil.TinfoilUIException, bb.BBHandledException):
# Restore the back up copy of bblayers.conf
shutil.copy2(backup, bblayers_conf)
- bb.fatal("Parse failure with the specified layer added, aborting.")
+ self.tinfoil.modified_files()
+ bb.fatal("Parse failure with the specified layer added, exiting.")
else:
for item in notadded:
sys.stderr.write("Specified layer %s is already in BBLAYERS\n" % item)
@@ -63,7 +68,7 @@ class ActionPlugin(LayerPlugin):
def do_remove_layer(self, args):
"""Remove one or more layers from bblayers.conf."""
- bblayers_conf = os.path.join('conf', 'bblayers.conf')
+ bblayers_conf = os.path.join(findTopdir() ,'conf', 'bblayers.conf')
if not os.path.exists(bblayers_conf):
sys.stderr.write("Unable to find bblayers.conf\n")
return 1
@@ -78,6 +83,7 @@ class ActionPlugin(LayerPlugin):
layerdir = os.path.abspath(item)
layerdirs.append(layerdir)
(_, notremoved) = bb.utils.edit_bblayers_conf(bblayers_conf, None, layerdirs)
+ self.tinfoil.modified_files()
if notremoved:
for item in notremoved:
sys.stderr.write("No layers matching %s found in BBLAYERS\n" % item)
@@ -237,6 +243,9 @@ build results (as the layer priority order has effectively changed).
if not entry_found:
logger.warning("File %s does not match the flattened layer's BBFILES setting, you may need to edit conf/layer.conf or move the file elsewhere" % f1full)
+ self.tinfoil.modified_files()
+
+
def get_file_layer(self, filename):
layerdir = self.get_file_layerdir(filename)
if layerdir:
@@ -258,6 +267,7 @@ build results (as the layer priority order has effectively changed).
def register_commands(self, sp):
parser_add_layer = self.add_command(sp, 'add-layer', self.do_add_layer, parserecipes=False)
parser_add_layer.add_argument('layerdir', nargs='+', help='Layer directory/directories to add')
+ parser_add_layer.add_argument('--prepend', action='store_true', help='Prepend layer directory/directories')
parser_remove_layer = self.add_command(sp, 'remove-layer', self.do_remove_layer, parserecipes=False)
parser_remove_layer.add_argument('layerdir', nargs='+', help='Layer directory/directories to remove (wildcards allowed, enclose in quotes to avoid shell expansion)')
diff --git a/lib/bblayers/common.py b/lib/bblayers/common.py
index 6c76ef350..f7b9cee37 100644
--- a/lib/bblayers/common.py
+++ b/lib/bblayers/common.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/lib/bblayers/layerindex.py b/lib/bblayers/layerindex.py
index 793651620..ba91fac66 100644
--- a/lib/bblayers/layerindex.py
+++ b/lib/bblayers/layerindex.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -47,6 +49,31 @@ class LayerIndexPlugin(ActionPlugin):
else:
logger.plain("Repository %s needs to be fetched" % url)
return subdir, layername, layerdir
+ elif os.path.exists(repodir) and branch:
+ """
+ If the repo is already cloned, ensure it is on the correct branch,
+ switching branches if necessary and possible.
+ """
+ base_cmd = ['git', '--git-dir=%s/.git' % repodir, '--work-tree=%s' % repodir]
+ cmd = base_cmd + ['branch']
+ completed_proc = subprocess.run(cmd, text=True, capture_output=True)
+ if completed_proc.returncode:
+ logger.error("Unable to validate repo %s (%s)" % (repodir, stderr))
+ return None, None, None
+ else:
+ if branch != completed_proc.stdout[2:-1]:
+ cmd = base_cmd + ['status', '--short']
+ completed_proc = subprocess.run(cmd, text=True, capture_output=True)
+ if completed_proc.stdout.count('\n') != 0:
+ logger.warning("There are uncommitted changes in repo %s" % repodir)
+ cmd = base_cmd + ['checkout', branch]
+ completed_proc = subprocess.run(cmd, text=True, capture_output=True)
+ if completed_proc.returncode:
+ # Could be due to original shallow clone on a different branch for example
+ logger.error("Unable to automatically switch %s to desired branch '%s' (%s)"
+ % (repodir, branch, completed_proc.stderr))
+ return None, None, None
+ return subdir, layername, layerdir
elif os.path.exists(layerdir):
return subdir, layername, layerdir
else:
diff --git a/lib/bblayers/query.py b/lib/bblayers/query.py
index 947422a72..bfc18a759 100644
--- a/lib/bblayers/query.py
+++ b/lib/bblayers/query.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -27,12 +29,12 @@ class QueryPlugin(LayerPlugin):
def do_show_layers(self, args):
"""show current configured layers."""
- logger.plain("%s %s %s" % ("layer".ljust(20), "path".ljust(40), "priority"))
- logger.plain('=' * 74)
+ logger.plain("%s %s %s" % ("layer".ljust(20), "path".ljust(70), "priority"))
+ logger.plain('=' * 104)
for layer, _, regex, pri in self.tinfoil.cooker.bbfile_config_priorities:
layerdir = self.bbfile_collections.get(layer, None)
- layername = self.get_layer_name(layerdir)
- logger.plain("%s %s %d" % (layername.ljust(20), layerdir.ljust(40), pri))
+ layername = layer
+ logger.plain("%s %s %s" % (layername.ljust(20), layerdir.ljust(70), pri))
def version_str(self, pe, pv, pr = None):
verstr = "%s" % pv
@@ -55,11 +57,12 @@ are overlayed will also be listed, with a " (skipped)" suffix.
# Check for overlayed .bbclass files
classes = collections.defaultdict(list)
for layerdir in self.bblayers:
- classdir = os.path.join(layerdir, 'classes')
- if os.path.exists(classdir):
- for classfile in os.listdir(classdir):
- if os.path.splitext(classfile)[1] == '.bbclass':
- classes[classfile].append(classdir)
+ for c in ["classes-global", "classes-recipe", "classes"]:
+ classdir = os.path.join(layerdir, c)
+ if os.path.exists(classdir):
+ for classfile in os.listdir(classdir):
+ if os.path.splitext(classfile)[1] == '.bbclass':
+ classes[classfile].append(classdir)
# Locating classes and other files is a bit more complicated than recipes -
# layer priority is not a factor; instead BitBake uses the first matching
@@ -122,9 +125,14 @@ skipped recipes will also be listed, with a " (skipped)" suffix.
if inherits:
bbpath = str(self.tinfoil.config_data.getVar('BBPATH'))
for classname in inherits:
- classfile = 'classes/%s.bbclass' % classname
- if not bb.utils.which(bbpath, classfile, history=False):
- logger.error('No class named %s found in BBPATH', classfile)
+ found = False
+ for c in ["classes-global", "classes-recipe", "classes"]:
+ cfile = c + '/%s.bbclass' % classname
+ if bb.utils.which(bbpath, cfile, history=False):
+ found = True
+ break
+ if not found:
+ logger.error('No class named %s found in BBPATH', classname)
sys.exit(1)
pkg_pn = self.tinfoil.cooker.recipecaches[mc].pkg_pn
@@ -154,7 +162,7 @@ skipped recipes will also be listed, with a " (skipped)" suffix.
def print_item(f, pn, ver, layer, ispref):
if not selected_layer or layer == selected_layer:
if not bare and f in skiplist:
- skipped = ' (skipped)'
+ skipped = ' (skipped: %s)' % self.tinfoil.cooker.skiplist[f].skipreason
else:
skipped = ''
if show_filenames:
@@ -172,7 +180,7 @@ skipped recipes will also be listed, with a " (skipped)" suffix.
logger.plain(" %s %s%s", layer.ljust(20), ver, skipped)
global_inherit = (self.tinfoil.config_data.getVar('INHERIT') or "").split()
- cls_re = re.compile('classes/')
+ cls_re = re.compile('classes.*/')
preffiles = []
show_unique_pn = []
@@ -274,7 +282,10 @@ Lists recipes with the bbappends that apply to them as subitems.
else:
logger.plain('=== Appended recipes ===')
- pnlist = list(self.tinfoil.cooker_data.pkg_pn.keys())
+
+ cooker_data = self.tinfoil.cooker.recipecaches[args.mc]
+
+ pnlist = list(cooker_data.pkg_pn.keys())
pnlist.sort()
appends = False
for pn in pnlist:
@@ -287,7 +298,7 @@ Lists recipes with the bbappends that apply to them as subitems.
if not found:
continue
- if self.show_appends_for_pn(pn):
+ if self.show_appends_for_pn(pn, cooker_data, args.mc):
appends = True
if not args.pnspec and self.show_appends_for_skipped():
@@ -296,8 +307,10 @@ Lists recipes with the bbappends that apply to them as subitems.
if not appends:
logger.plain('No append files found')
- def show_appends_for_pn(self, pn):
- filenames = self.tinfoil.cooker_data.pkg_pn[pn]
+ def show_appends_for_pn(self, pn, cooker_data, mc):
+ filenames = cooker_data.pkg_pn[pn]
+ if mc:
+ pn = "mc:%s:%s" % (mc, pn)
best = self.tinfoil.find_best_provider(pn)
best_filename = os.path.basename(best[3])
@@ -405,7 +418,7 @@ NOTE: .bbappend files can impact the dependencies.
self.check_cross_depends("RRECOMMENDS", layername, f, best, args.filenames, ignore_layers)
# The inherit class
- cls_re = re.compile('classes/')
+ cls_re = re.compile('classes.*/')
if f in self.tinfoil.cooker_data.inherits:
inherits = self.tinfoil.cooker_data.inherits[f]
for cls in inherits:
@@ -441,10 +454,10 @@ NOTE: .bbappend files can impact the dependencies.
line = fnfile.readline()
# The "require/include xxx" in conf/machine/*.conf, .inc and .bbclass
- conf_re = re.compile(".*/conf/machine/[^\/]*\.conf$")
- inc_re = re.compile(".*\.inc$")
+ conf_re = re.compile(r".*/conf/machine/[^\/]*\.conf$")
+ inc_re = re.compile(r".*\.inc$")
# The "inherit xxx" in .bbclass
- bbclass_re = re.compile(".*\.bbclass$")
+ bbclass_re = re.compile(r".*\.bbclass$")
for layerdir in self.bblayers:
layername = self.get_layer_name(layerdir)
for dirpath, dirnames, filenames in os.walk(layerdir):
@@ -522,6 +535,7 @@ NOTE: .bbappend files can impact the dependencies.
parser_show_appends = self.add_command(sp, 'show-appends', self.do_show_appends)
parser_show_appends.add_argument('pnspec', nargs='*', help='optional recipe name specification (wildcards allowed, enclose in quotes to avoid shell expansion)')
+ parser_show_appends.add_argument('--mc', help='use specified multiconfig', default='')
parser_show_cross_depends = self.add_command(sp, 'show-cross-depends', self.do_show_cross_depends)
parser_show_cross_depends.add_argument('-f', '--filenames', help='show full file path', action='store_true')
diff --git a/lib/bs4/tests/test_tree.py b/lib/bs4/tests/test_tree.py
index 8e5c66426..cf0f1abe0 100644
--- a/lib/bs4/tests/test_tree.py
+++ b/lib/bs4/tests/test_tree.py
@@ -585,7 +585,7 @@ class SiblingTest(TreeTest):
</html>'''
# All that whitespace looks good but makes the tests more
# difficult. Get rid of it.
- markup = re.compile("\n\s*").sub("", markup)
+ markup = re.compile(r"\n\s*").sub("", markup)
self.tree = self.soup(markup)
diff --git a/lib/codegen.py b/lib/codegen.py
index 62a6748c4..018b28317 100644
--- a/lib/codegen.py
+++ b/lib/codegen.py
@@ -392,14 +392,8 @@ class SourceGenerator(NodeVisitor):
def visit_Name(self, node):
self.write(node.id)
- def visit_Str(self, node):
- self.write(repr(node.s))
-
- def visit_Bytes(self, node):
- self.write(repr(node.s))
-
- def visit_Num(self, node):
- self.write(repr(node.n))
+ def visit_Constant(self, node):
+ self.write(repr(node.value))
def visit_Tuple(self, node):
self.write('(')
diff --git a/lib/hashserv/__init__.py b/lib/hashserv/__init__.py
index 5f2e101e5..74367eb6b 100644
--- a/lib/hashserv/__init__.py
+++ b/lib/hashserv/__init__.py
@@ -5,129 +5,102 @@
import asyncio
from contextlib import closing
-import re
-import sqlite3
import itertools
import json
+from collections import namedtuple
+from urllib.parse import urlparse
+from bb.asyncrpc.client import parse_address, ADDR_TYPE_UNIX, ADDR_TYPE_WS
+
+User = namedtuple("User", ("username", "permissions"))
+
+def create_server(
+ addr,
+ dbname,
+ *,
+ sync=True,
+ upstream=None,
+ read_only=False,
+ db_username=None,
+ db_password=None,
+ anon_perms=None,
+ admin_username=None,
+ admin_password=None,
+):
+ def sqlite_engine():
+ from .sqlite import DatabaseEngine
+
+ return DatabaseEngine(dbname, sync)
+
+ def sqlalchemy_engine():
+ from .sqlalchemy import DatabaseEngine
+
+ return DatabaseEngine(dbname, db_username, db_password)
-UNIX_PREFIX = "unix://"
-
-ADDR_TYPE_UNIX = 0
-ADDR_TYPE_TCP = 1
-
-# The Python async server defaults to a 64K receive buffer, so we hardcode our
-# maximum chunk size. It would be better if the client and server reported to
-# each other what the maximum chunk sizes were, but that will slow down the
-# connection setup with a round trip delay so I'd rather not do that unless it
-# is necessary
-DEFAULT_MAX_CHUNK = 32 * 1024
-
-TABLE_DEFINITION = (
- ("method", "TEXT NOT NULL"),
- ("outhash", "TEXT NOT NULL"),
- ("taskhash", "TEXT NOT NULL"),
- ("unihash", "TEXT NOT NULL"),
- ("created", "DATETIME"),
-
- # Optional fields
- ("owner", "TEXT"),
- ("PN", "TEXT"),
- ("PV", "TEXT"),
- ("PR", "TEXT"),
- ("task", "TEXT"),
- ("outhash_siginfo", "TEXT"),
-)
-
-TABLE_COLUMNS = tuple(name for name, _ in TABLE_DEFINITION)
-
-def setup_database(database, sync=True):
- db = sqlite3.connect(database)
- db.row_factory = sqlite3.Row
-
- with closing(db.cursor()) as cursor:
- cursor.execute('''
- CREATE TABLE IF NOT EXISTS tasks_v2 (
- id INTEGER PRIMARY KEY AUTOINCREMENT,
- %s
- UNIQUE(method, outhash, taskhash)
- )
- ''' % " ".join("%s %s," % (name, typ) for name, typ in TABLE_DEFINITION))
- cursor.execute('PRAGMA journal_mode = WAL')
- cursor.execute('PRAGMA synchronous = %s' % ('NORMAL' if sync else 'OFF'))
-
- # Drop old indexes
- cursor.execute('DROP INDEX IF EXISTS taskhash_lookup')
- cursor.execute('DROP INDEX IF EXISTS outhash_lookup')
-
- # Create new indexes
- cursor.execute('CREATE INDEX IF NOT EXISTS taskhash_lookup_v2 ON tasks_v2 (method, taskhash, created)')
- cursor.execute('CREATE INDEX IF NOT EXISTS outhash_lookup_v2 ON tasks_v2 (method, outhash)')
-
- return db
-
-
-def parse_address(addr):
- if addr.startswith(UNIX_PREFIX):
- return (ADDR_TYPE_UNIX, (addr[len(UNIX_PREFIX):],))
- else:
- m = re.match(r'\[(?P<host>[^\]]*)\]:(?P<port>\d+)$', addr)
- if m is not None:
- host = m.group('host')
- port = m.group('port')
- else:
- host, port = addr.split(':')
-
- return (ADDR_TYPE_TCP, (host, int(port)))
-
+ from . import server
-def chunkify(msg, max_chunk):
- if len(msg) < max_chunk - 1:
- yield ''.join((msg, "\n"))
+ if "://" in dbname:
+ db_engine = sqlalchemy_engine()
else:
- yield ''.join((json.dumps({
- 'chunk-stream': None
- }), "\n"))
+ db_engine = sqlite_engine()
- args = [iter(msg)] * (max_chunk - 1)
- for m in map(''.join, itertools.zip_longest(*args, fillvalue='')):
- yield ''.join(itertools.chain(m, "\n"))
- yield "\n"
+ if anon_perms is None:
+ anon_perms = server.DEFAULT_ANON_PERMS
-
-def create_server(addr, dbname, *, sync=True, upstream=None, read_only=False):
- from . import server
- db = setup_database(dbname, sync=sync)
- s = server.Server(db, upstream=upstream, read_only=read_only)
+ s = server.Server(
+ db_engine,
+ upstream=upstream,
+ read_only=read_only,
+ anon_perms=anon_perms,
+ admin_username=admin_username,
+ admin_password=admin_password,
+ )
(typ, a) = parse_address(addr)
if typ == ADDR_TYPE_UNIX:
s.start_unix_server(*a)
+ elif typ == ADDR_TYPE_WS:
+ url = urlparse(a[0])
+ s.start_websocket_server(url.hostname, url.port)
else:
s.start_tcp_server(*a)
return s
-def create_client(addr):
+def create_client(addr, username=None, password=None):
from . import client
- c = client.Client()
- (typ, a) = parse_address(addr)
- if typ == ADDR_TYPE_UNIX:
- c.connect_unix(*a)
- else:
- c.connect_tcp(*a)
+ c = client.Client(username, password)
+
+ try:
+ (typ, a) = parse_address(addr)
+ if typ == ADDR_TYPE_UNIX:
+ c.connect_unix(*a)
+ elif typ == ADDR_TYPE_WS:
+ c.connect_websocket(*a)
+ else:
+ c.connect_tcp(*a)
+ return c
+ except Exception as e:
+ c.close()
+ raise e
- return c
-async def create_async_client(addr):
+async def create_async_client(addr, username=None, password=None):
from . import client
- c = client.AsyncClient()
- (typ, a) = parse_address(addr)
- if typ == ADDR_TYPE_UNIX:
- await c.connect_unix(*a)
- else:
- await c.connect_tcp(*a)
+ c = client.AsyncClient(username, password)
+
+ try:
+ (typ, a) = parse_address(addr)
+ if typ == ADDR_TYPE_UNIX:
+ await c.connect_unix(*a)
+ elif typ == ADDR_TYPE_WS:
+ await c.connect_websocket(*a)
+ else:
+ await c.connect_tcp(*a)
- return c
+ return c
+ except Exception as e:
+ await c.close()
+ raise e
diff --git a/lib/hashserv/client.py b/lib/hashserv/client.py
index 531170967..0b254bedd 100644
--- a/lib/hashserv/client.py
+++ b/lib/hashserv/client.py
@@ -3,12 +3,10 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-import asyncio
-import json
import logging
import socket
-import os
import bb.asyncrpc
+import json
from . import create_async_client
@@ -18,107 +16,331 @@ logger = logging.getLogger("hashserv.client")
class AsyncClient(bb.asyncrpc.AsyncClient):
MODE_NORMAL = 0
MODE_GET_STREAM = 1
+ MODE_EXIST_STREAM = 2
- def __init__(self):
- super().__init__('OEHASHEQUIV', '1.1', logger)
+ def __init__(self, username=None, password=None):
+ super().__init__("OEHASHEQUIV", "1.1", logger)
self.mode = self.MODE_NORMAL
+ self.username = username
+ self.password = password
+ self.saved_become_user = None
async def setup_connection(self):
await super().setup_connection()
- cur_mode = self.mode
self.mode = self.MODE_NORMAL
- await self._set_mode(cur_mode)
+ if self.username:
+ # Save off become user temporarily because auth() resets it
+ become = self.saved_become_user
+ await self.auth(self.username, self.password)
- async def send_stream(self, msg):
+ if become:
+ await self.become_user(become)
+
+ async def send_stream(self, mode, msg):
async def proc():
- self.writer.write(("%s\n" % msg).encode("utf-8"))
- await self.writer.drain()
- l = await self.reader.readline()
- if not l:
- raise ConnectionError("Connection closed")
- return l.decode("utf-8").rstrip()
+ await self._set_mode(mode)
+ await self.socket.send(msg)
+ return await self.socket.recv()
return await self._send_wrapper(proc)
+ async def invoke(self, *args, **kwargs):
+ # It's OK if connection errors cause a failure here, because the mode
+ # is also reset to normal on a new connection
+ await self._set_mode(self.MODE_NORMAL)
+ return await super().invoke(*args, **kwargs)
+
async def _set_mode(self, new_mode):
- if new_mode == self.MODE_NORMAL and self.mode == self.MODE_GET_STREAM:
- r = await self.send_stream("END")
+ async def stream_to_normal():
+ await self.socket.send("END")
+ return await self.socket.recv()
+
+ async def normal_to_stream(command):
+ r = await self.invoke({command: None})
if r != "ok":
- raise ConnectionError("Bad response from server %r" % r)
- elif new_mode == self.MODE_GET_STREAM and self.mode == self.MODE_NORMAL:
- r = await self.send_message({"get-stream": None})
+ raise ConnectionError(
+ f"Unable to transition to stream mode: Bad response from server {r!r}"
+ )
+
+ self.logger.debug("Mode is now %s", command)
+
+ if new_mode == self.mode:
+ return
+
+ self.logger.debug("Transitioning mode %s -> %s", self.mode, new_mode)
+
+ # Always transition to normal mode before switching to any other mode
+ if self.mode != self.MODE_NORMAL:
+ r = await self._send_wrapper(stream_to_normal)
if r != "ok":
- raise ConnectionError("Bad response from server %r" % r)
- elif new_mode != self.mode:
- raise Exception(
- "Undefined mode transition %r -> %r" % (self.mode, new_mode)
- )
+ self.check_invoke_error(r)
+ raise ConnectionError(
+ f"Unable to transition to normal mode: Bad response from server {r!r}"
+ )
+ self.logger.debug("Mode is now normal")
+
+ if new_mode == self.MODE_GET_STREAM:
+ await normal_to_stream("get-stream")
+ elif new_mode == self.MODE_EXIST_STREAM:
+ await normal_to_stream("exists-stream")
+ elif new_mode != self.MODE_NORMAL:
+ raise Exception("Undefined mode transition {self.mode!r} -> {new_mode!r}")
self.mode = new_mode
async def get_unihash(self, method, taskhash):
- await self._set_mode(self.MODE_GET_STREAM)
- r = await self.send_stream("%s %s" % (method, taskhash))
+ r = await self.send_stream(self.MODE_GET_STREAM, "%s %s" % (method, taskhash))
if not r:
return None
return r
async def report_unihash(self, taskhash, method, outhash, unihash, extra={}):
- await self._set_mode(self.MODE_NORMAL)
m = extra.copy()
m["taskhash"] = taskhash
m["method"] = method
m["outhash"] = outhash
m["unihash"] = unihash
- return await self.send_message({"report": m})
+ return await self.invoke({"report": m})
async def report_unihash_equiv(self, taskhash, method, unihash, extra={}):
- await self._set_mode(self.MODE_NORMAL)
m = extra.copy()
m["taskhash"] = taskhash
m["method"] = method
m["unihash"] = unihash
- return await self.send_message({"report-equiv": m})
+ return await self.invoke({"report-equiv": m})
async def get_taskhash(self, method, taskhash, all_properties=False):
- await self._set_mode(self.MODE_NORMAL)
- return await self.send_message(
+ return await self.invoke(
{"get": {"taskhash": taskhash, "method": method, "all": all_properties}}
)
- async def get_outhash(self, method, outhash, taskhash):
- await self._set_mode(self.MODE_NORMAL)
- return await self.send_message(
- {"get-outhash": {"outhash": outhash, "taskhash": taskhash, "method": method}}
+ async def unihash_exists(self, unihash):
+ r = await self.send_stream(self.MODE_EXIST_STREAM, unihash)
+ return r == "true"
+
+ async def get_outhash(self, method, outhash, taskhash, with_unihash=True):
+ return await self.invoke(
+ {
+ "get-outhash": {
+ "outhash": outhash,
+ "taskhash": taskhash,
+ "method": method,
+ "with_unihash": with_unihash,
+ }
+ }
)
async def get_stats(self):
- await self._set_mode(self.MODE_NORMAL)
- return await self.send_message({"get-stats": None})
+ return await self.invoke({"get-stats": None})
async def reset_stats(self):
- await self._set_mode(self.MODE_NORMAL)
- return await self.send_message({"reset-stats": None})
+ return await self.invoke({"reset-stats": None})
async def backfill_wait(self):
- await self._set_mode(self.MODE_NORMAL)
- return (await self.send_message({"backfill-wait": None}))["tasks"]
+ return (await self.invoke({"backfill-wait": None}))["tasks"]
+
+ async def remove(self, where):
+ return await self.invoke({"remove": {"where": where}})
+
+ async def clean_unused(self, max_age):
+ return await self.invoke({"clean-unused": {"max_age_seconds": max_age}})
+
+ async def auth(self, username, token):
+ result = await self.invoke({"auth": {"username": username, "token": token}})
+ self.username = username
+ self.password = token
+ self.saved_become_user = None
+ return result
+
+ async def refresh_token(self, username=None):
+ m = {}
+ if username:
+ m["username"] = username
+ result = await self.invoke({"refresh-token": m})
+ if (
+ self.username
+ and not self.saved_become_user
+ and result["username"] == self.username
+ ):
+ self.password = result["token"]
+ return result
+
+ async def set_user_perms(self, username, permissions):
+ return await self.invoke(
+ {"set-user-perms": {"username": username, "permissions": permissions}}
+ )
+
+ async def get_user(self, username=None):
+ m = {}
+ if username:
+ m["username"] = username
+ return await self.invoke({"get-user": m})
+
+ async def get_all_users(self):
+ return (await self.invoke({"get-all-users": {}}))["users"]
+
+ async def new_user(self, username, permissions):
+ return await self.invoke(
+ {"new-user": {"username": username, "permissions": permissions}}
+ )
+
+ async def delete_user(self, username):
+ return await self.invoke({"delete-user": {"username": username}})
+
+ async def become_user(self, username):
+ result = await self.invoke({"become-user": {"username": username}})
+ if username == self.username:
+ self.saved_become_user = None
+ else:
+ self.saved_become_user = username
+ return result
+
+ async def get_db_usage(self):
+ return (await self.invoke({"get-db-usage": {}}))["usage"]
+
+ async def get_db_query_columns(self):
+ return (await self.invoke({"get-db-query-columns": {}}))["columns"]
+
+ async def gc_status(self):
+ return await self.invoke({"gc-status": {}})
+
+ async def gc_mark(self, mark, where):
+ """
+ Starts a new garbage collection operation identified by "mark". If
+ garbage collection is already in progress with "mark", the collection
+ is continued.
+
+ All unihash entries that match the "where" clause are marked to be
+ kept. In addition, any new entries added to the database after this
+ command will be automatically marked with "mark"
+ """
+ return await self.invoke({"gc-mark": {"mark": mark, "where": where}})
+
+ async def gc_sweep(self, mark):
+ """
+ Finishes garbage collection for "mark". All unihash entries that have
+ not been marked will be deleted.
+
+ It is recommended to clean unused outhash entries after running this to
+ cleanup any dangling outhashes
+ """
+ return await self.invoke({"gc-sweep": {"mark": mark}})
class Client(bb.asyncrpc.Client):
- def __init__(self):
+ def __init__(self, username=None, password=None):
+ self.username = username
+ self.password = password
+
super().__init__()
self._add_methods(
"connect_tcp",
- "close",
+ "connect_websocket",
"get_unihash",
"report_unihash",
"report_unihash_equiv",
"get_taskhash",
+ "unihash_exists",
+ "get_outhash",
"get_stats",
"reset_stats",
"backfill_wait",
+ "remove",
+ "clean_unused",
+ "auth",
+ "refresh_token",
+ "set_user_perms",
+ "get_user",
+ "get_all_users",
+ "new_user",
+ "delete_user",
+ "become_user",
+ "get_db_usage",
+ "get_db_query_columns",
+ "gc_status",
+ "gc_mark",
+ "gc_sweep",
)
def _get_async_client(self):
- return AsyncClient()
+ return AsyncClient(self.username, self.password)
+
+
+class ClientPool(bb.asyncrpc.ClientPool):
+ def __init__(
+ self,
+ address,
+ max_clients,
+ *,
+ username=None,
+ password=None,
+ become=None,
+ ):
+ super().__init__(max_clients)
+ self.address = address
+ self.username = username
+ self.password = password
+ self.become = become
+
+ async def _new_client(self):
+ client = await create_async_client(
+ self.address,
+ username=self.username,
+ password=self.password,
+ )
+ if self.become:
+ await client.become_user(self.become)
+ return client
+
+ def _run_key_tasks(self, queries, call):
+ results = {key: None for key in queries.keys()}
+
+ def make_task(key, args):
+ async def task(client):
+ nonlocal results
+ unihash = await call(client, args)
+ results[key] = unihash
+
+ return task
+
+ def gen_tasks():
+ for key, args in queries.items():
+ yield make_task(key, args)
+
+ self.run_tasks(gen_tasks())
+ return results
+
+ def get_unihashes(self, queries):
+ """
+ Query multiple unihashes in parallel.
+
+ The queries argument is a dictionary with arbitrary key. The values
+ must be a tuple of (method, taskhash).
+
+ Returns a dictionary with a corresponding key for each input key, and
+ the value is the queried unihash (which might be none if the query
+ failed)
+ """
+
+ async def call(client, args):
+ method, taskhash = args
+ return await client.get_unihash(method, taskhash)
+
+ return self._run_key_tasks(queries, call)
+
+ def unihashes_exist(self, queries):
+ """
+ Query multiple unihash existence checks in parallel.
+
+ The queries argument is a dictionary with arbitrary key. The values
+ must be a unihash.
+
+ Returns a dictionary with a corresponding key for each input key, and
+ the value is True or False if the unihash is known by the server (or
+ None if there was a failure)
+ """
+
+ async def call(client, unihash):
+ return await client.unihash_exists(unihash)
+
+ return self._run_key_tasks(queries, call)
diff --git a/lib/hashserv/server.py b/lib/hashserv/server.py
index c941c0e9d..68f64f983 100644
--- a/lib/hashserv/server.py
+++ b/lib/hashserv/server.py
@@ -3,22 +3,51 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-from contextlib import closing, contextmanager
-from datetime import datetime
+from datetime import datetime, timedelta
import asyncio
-import json
import logging
import math
-import os
-import signal
-import socket
-import sys
import time
-from . import create_async_client, TABLE_COLUMNS
+import os
+import base64
+import hashlib
+from . import create_async_client
import bb.asyncrpc
+logger = logging.getLogger("hashserv.server")
+
+
+# This permission only exists to match nothing
+NONE_PERM = "@none"
+
+READ_PERM = "@read"
+REPORT_PERM = "@report"
+DB_ADMIN_PERM = "@db-admin"
+USER_ADMIN_PERM = "@user-admin"
+ALL_PERM = "@all"
+
+ALL_PERMISSIONS = {
+ READ_PERM,
+ REPORT_PERM,
+ DB_ADMIN_PERM,
+ USER_ADMIN_PERM,
+ ALL_PERM,
+}
+
+DEFAULT_ANON_PERMS = (
+ READ_PERM,
+ REPORT_PERM,
+ DB_ADMIN_PERM,
+)
+
+TOKEN_ALGORITHM = "sha256"
+
+# 48 bytes of random data will result in 64 characters when base64
+# encoded. This number also ensures that the base64 encoding won't have any
+# trailing '=' characters.
+TOKEN_SIZE = 48
-logger = logging.getLogger('hashserv.server')
+SALT_SIZE = 8
class Measurement(object):
@@ -108,360 +137,745 @@ class Stats(object):
return math.sqrt(self.s / (self.num - 1))
def todict(self):
- return {k: getattr(self, k) for k in ('num', 'total_time', 'max_time', 'average', 'stdev')}
+ return {
+ k: getattr(self, k)
+ for k in ("num", "total_time", "max_time", "average", "stdev")
+ }
+
+
+token_refresh_semaphore = asyncio.Lock()
+
+
+async def new_token():
+ # Prevent malicious users from using this API to deduce the entropy
+ # pool on the server and thus be able to guess a token. *All* token
+ # refresh requests lock the same global semaphore and then sleep for a
+ # short time. The effectively rate limits the total number of requests
+ # than can be made across all clients to 10/second, which should be enough
+ # since you have to be an authenticated users to make the request in the
+ # first place
+ async with token_refresh_semaphore:
+ await asyncio.sleep(0.1)
+ raw = os.getrandom(TOKEN_SIZE, os.GRND_NONBLOCK)
+
+ return base64.b64encode(raw, b"._").decode("utf-8")
+
+
+def new_salt():
+ return os.getrandom(SALT_SIZE, os.GRND_NONBLOCK).hex()
-def insert_task(cursor, data, ignore=False):
- keys = sorted(data.keys())
- query = '''INSERT%s INTO tasks_v2 (%s) VALUES (%s)''' % (
- " OR IGNORE" if ignore else "",
- ', '.join(keys),
- ', '.join(':' + k for k in keys))
- cursor.execute(query, data)
+def hash_token(algo, salt, token):
+ h = hashlib.new(algo)
+ h.update(salt.encode("utf-8"))
+ h.update(token.encode("utf-8"))
+ return ":".join([algo, salt, h.hexdigest()])
-async def copy_from_upstream(client, db, method, taskhash):
- d = await client.get_taskhash(method, taskhash, True)
- if d is not None:
- # Filter out unknown columns
- d = {k: v for k, v in d.items() if k in TABLE_COLUMNS}
- keys = sorted(d.keys())
- with closing(db.cursor()) as cursor:
- insert_task(cursor, d)
- db.commit()
+def permissions(*permissions, allow_anon=True, allow_self_service=False):
+ """
+ Function decorator that can be used to decorate an RPC function call and
+ check that the current users permissions match the require permissions.
- return d
+ If allow_anon is True, the user will also be allowed to make the RPC call
+ if the anonymous user permissions match the permissions.
-async def copy_outhash_from_upstream(client, db, method, outhash, taskhash):
- d = await client.get_outhash(method, outhash, taskhash)
- if d is not None:
- # Filter out unknown columns
- d = {k: v for k, v in d.items() if k in TABLE_COLUMNS}
- keys = sorted(d.keys())
+ If allow_self_service is True, and the "username" property in the request
+ is the currently logged in user, or not specified, the user will also be
+ allowed to make the request. This allows users to access normal privileged
+ API, as long as they are only modifying their own user properties (e.g.
+ users can be allowed to reset their own token without @user-admin
+ permissions, but not the token for any other user.
+ """
- with closing(db.cursor()) as cursor:
- insert_task(cursor, d)
- db.commit()
+ def wrapper(func):
+ async def wrap(self, request):
+ if allow_self_service and self.user is not None:
+ username = request.get("username", self.user.username)
+ if username == self.user.username:
+ request["username"] = self.user.username
+ return await func(self, request)
+
+ if not self.user_has_permissions(*permissions, allow_anon=allow_anon):
+ if not self.user:
+ username = "Anonymous user"
+ user_perms = self.server.anon_perms
+ else:
+ username = self.user.username
+ user_perms = self.user.permissions
+
+ self.logger.info(
+ "User %s with permissions %r denied from calling %s. Missing permissions(s) %r",
+ username,
+ ", ".join(user_perms),
+ func.__name__,
+ ", ".join(permissions),
+ )
+ raise bb.asyncrpc.InvokeError(
+ f"{username} is not allowed to access permissions(s) {', '.join(permissions)}"
+ )
+
+ return await func(self, request)
+
+ return wrap
+
+ return wrapper
- return d
class ServerClient(bb.asyncrpc.AsyncServerConnection):
- FAST_QUERY = 'SELECT taskhash, method, unihash FROM tasks_v2 WHERE method=:method AND taskhash=:taskhash ORDER BY created ASC LIMIT 1'
- ALL_QUERY = 'SELECT * FROM tasks_v2 WHERE method=:method AND taskhash=:taskhash ORDER BY created ASC LIMIT 1'
- OUTHASH_QUERY = '''
- -- Find tasks with a matching outhash (that is, tasks that
- -- are equivalent)
- SELECT * FROM tasks_v2 WHERE method=:method AND outhash=:outhash
-
- -- If there is an exact match on the taskhash, return it.
- -- Otherwise return the oldest matching outhash of any
- -- taskhash
- ORDER BY CASE WHEN taskhash=:taskhash THEN 1 ELSE 2 END,
- created ASC
-
- -- Only return one row
- LIMIT 1
- '''
-
- def __init__(self, reader, writer, db, request_stats, backfill_queue, upstream, read_only):
- super().__init__(reader, writer, 'OEHASHEQUIV', logger)
- self.db = db
- self.request_stats = request_stats
+ def __init__(self, socket, server):
+ super().__init__(socket, "OEHASHEQUIV", server.logger)
+ self.server = server
self.max_chunk = bb.asyncrpc.DEFAULT_MAX_CHUNK
- self.backfill_queue = backfill_queue
- self.upstream = upstream
+ self.user = None
+
+ self.handlers.update(
+ {
+ "get": self.handle_get,
+ "get-outhash": self.handle_get_outhash,
+ "get-stream": self.handle_get_stream,
+ "exists-stream": self.handle_exists_stream,
+ "get-stats": self.handle_get_stats,
+ "get-db-usage": self.handle_get_db_usage,
+ "get-db-query-columns": self.handle_get_db_query_columns,
+ # Not always read-only, but internally checks if the server is
+ # read-only
+ "report": self.handle_report,
+ "auth": self.handle_auth,
+ "get-user": self.handle_get_user,
+ "get-all-users": self.handle_get_all_users,
+ "become-user": self.handle_become_user,
+ }
+ )
+
+ if not self.server.read_only:
+ self.handlers.update(
+ {
+ "report-equiv": self.handle_equivreport,
+ "reset-stats": self.handle_reset_stats,
+ "backfill-wait": self.handle_backfill_wait,
+ "remove": self.handle_remove,
+ "gc-mark": self.handle_gc_mark,
+ "gc-sweep": self.handle_gc_sweep,
+ "gc-status": self.handle_gc_status,
+ "clean-unused": self.handle_clean_unused,
+ "refresh-token": self.handle_refresh_token,
+ "set-user-perms": self.handle_set_perms,
+ "new-user": self.handle_new_user,
+ "delete-user": self.handle_delete_user,
+ }
+ )
- self.handlers.update({
- 'get': self.handle_get,
- 'get-outhash': self.handle_get_outhash,
- 'get-stream': self.handle_get_stream,
- 'get-stats': self.handle_get_stats,
- })
-
- if not read_only:
- self.handlers.update({
- 'report': self.handle_report,
- 'report-equiv': self.handle_equivreport,
- 'reset-stats': self.handle_reset_stats,
- 'backfill-wait': self.handle_backfill_wait,
- })
+ def raise_no_user_error(self, username):
+ raise bb.asyncrpc.InvokeError(f"No user named '{username}' exists")
+
+ def user_has_permissions(self, *permissions, allow_anon=True):
+ permissions = set(permissions)
+ if allow_anon:
+ if ALL_PERM in self.server.anon_perms:
+ return True
+
+ if not permissions - self.server.anon_perms:
+ return True
+
+ if self.user is None:
+ return False
+
+ if ALL_PERM in self.user.permissions:
+ return True
+
+ if not permissions - self.user.permissions:
+ return True
+
+ return False
def validate_proto_version(self):
- return (self.proto_version > (1, 0) and self.proto_version <= (1, 1))
+ return self.proto_version > (1, 0) and self.proto_version <= (1, 1)
async def process_requests(self):
- if self.upstream is not None:
- self.upstream_client = await create_async_client(self.upstream)
- else:
- self.upstream_client = None
-
- await super().process_requests()
+ async with self.server.db_engine.connect(self.logger) as db:
+ self.db = db
+ if self.server.upstream is not None:
+ self.upstream_client = await create_async_client(self.server.upstream)
+ else:
+ self.upstream_client = None
- if self.upstream_client is not None:
- await self.upstream_client.close()
+ try:
+ await super().process_requests()
+ finally:
+ if self.upstream_client is not None:
+ await self.upstream_client.close()
async def dispatch_message(self, msg):
for k in self.handlers.keys():
if k in msg:
- logger.debug('Handling %s' % k)
- if 'stream' in k:
- await self.handlers[k](msg[k])
+ self.logger.debug("Handling %s" % k)
+ if "stream" in k:
+ return await self.handlers[k](msg[k])
else:
- with self.request_stats.start_sample() as self.request_sample, \
- self.request_sample.measure():
- await self.handlers[k](msg[k])
- return
+ with self.server.request_stats.start_sample() as self.request_sample, self.request_sample.measure():
+ return await self.handlers[k](msg[k])
raise bb.asyncrpc.ClientError("Unrecognized command %r" % msg)
+ @permissions(READ_PERM)
async def handle_get(self, request):
- method = request['method']
- taskhash = request['taskhash']
-
- if request.get('all', False):
- row = self.query_equivalent(method, taskhash, self.ALL_QUERY)
+ method = request["method"]
+ taskhash = request["taskhash"]
+ fetch_all = request.get("all", False)
+
+ return await self.get_unihash(method, taskhash, fetch_all)
+
+ async def get_unihash(self, method, taskhash, fetch_all=False):
+ d = None
+
+ if fetch_all:
+ row = await self.db.get_unihash_by_taskhash_full(method, taskhash)
+ if row is not None:
+ d = {k: row[k] for k in row.keys()}
+ elif self.upstream_client is not None:
+ d = await self.upstream_client.get_taskhash(method, taskhash, True)
+ await self.update_unified(d)
else:
- row = self.query_equivalent(method, taskhash, self.FAST_QUERY)
+ row = await self.db.get_equivalent(method, taskhash)
- if row is not None:
- logger.debug('Found equivalent task %s -> %s', (row['taskhash'], row['unihash']))
- d = {k: row[k] for k in row.keys()}
- elif self.upstream_client is not None:
- d = await copy_from_upstream(self.upstream_client, self.db, method, taskhash)
- else:
- d = None
+ if row is not None:
+ d = {k: row[k] for k in row.keys()}
+ elif self.upstream_client is not None:
+ d = await self.upstream_client.get_taskhash(method, taskhash)
+ await self.db.insert_unihash(d["method"], d["taskhash"], d["unihash"])
- self.write_message(d)
+ return d
+ @permissions(READ_PERM)
async def handle_get_outhash(self, request):
- with closing(self.db.cursor()) as cursor:
- cursor.execute(self.OUTHASH_QUERY,
- {k: request[k] for k in ('method', 'outhash', 'taskhash')})
+ method = request["method"]
+ outhash = request["outhash"]
+ taskhash = request["taskhash"]
+ with_unihash = request.get("with_unihash", True)
- row = cursor.fetchone()
+ return await self.get_outhash(method, outhash, taskhash, with_unihash)
+
+ async def get_outhash(self, method, outhash, taskhash, with_unihash=True):
+ d = None
+ if with_unihash:
+ row = await self.db.get_unihash_by_outhash(method, outhash)
+ else:
+ row = await self.db.get_outhash(method, outhash)
if row is not None:
- logger.debug('Found equivalent outhash %s -> %s', (row['outhash'], row['unihash']))
d = {k: row[k] for k in row.keys()}
- else:
- d = None
+ elif self.upstream_client is not None:
+ d = await self.upstream_client.get_outhash(method, outhash, taskhash)
+ await self.update_unified(d)
- self.write_message(d)
+ return d
- async def handle_get_stream(self, request):
- self.write_message('ok')
+ async def update_unified(self, data):
+ if data is None:
+ return
+
+ await self.db.insert_unihash(data["method"], data["taskhash"], data["unihash"])
+ await self.db.insert_outhash(data)
+
+ async def _stream_handler(self, handler):
+ await self.socket.send_message("ok")
while True:
upstream = None
- l = await self.reader.readline()
+ l = await self.socket.recv()
if not l:
- return
+ break
try:
# This inner loop is very sensitive and must be as fast as
# possible (which is why the request sample is handled manually
# instead of using 'with', and also why logging statements are
# commented out.
- self.request_sample = self.request_stats.start_sample()
+ self.request_sample = self.server.request_stats.start_sample()
request_measure = self.request_sample.measure()
request_measure.start()
- l = l.decode('utf-8').rstrip()
- if l == 'END':
- self.writer.write('ok\n'.encode('utf-8'))
- return
-
- (method, taskhash) = l.split()
- #logger.debug('Looking up %s %s' % (method, taskhash))
- row = self.query_equivalent(method, taskhash, self.FAST_QUERY)
- if row is not None:
- msg = ('%s\n' % row['unihash']).encode('utf-8')
- #logger.debug('Found equivalent task %s -> %s', (row['taskhash'], row['unihash']))
- elif self.upstream_client is not None:
- upstream = await self.upstream_client.get_unihash(method, taskhash)
- if upstream:
- msg = ("%s\n" % upstream).encode("utf-8")
- else:
- msg = "\n".encode("utf-8")
- else:
- msg = '\n'.encode('utf-8')
+ if l == "END":
+ break
- self.writer.write(msg)
+ msg = await handler(l)
+ await self.socket.send(msg)
finally:
request_measure.end()
self.request_sample.end()
- await self.writer.drain()
+ await self.socket.send("ok")
+ return self.NO_RESPONSE
+
+ @permissions(READ_PERM)
+ async def handle_get_stream(self, request):
+ async def handler(l):
+ (method, taskhash) = l.split()
+ # self.logger.debug('Looking up %s %s' % (method, taskhash))
+ row = await self.db.get_equivalent(method, taskhash)
- # Post to the backfill queue after writing the result to minimize
- # the turn around time on a request
- if upstream is not None:
- await self.backfill_queue.put((method, taskhash))
+ if row is not None:
+ # self.logger.debug('Found equivalent task %s -> %s', (row['taskhash'], row['unihash']))
+ return row["unihash"]
- async def handle_report(self, data):
- with closing(self.db.cursor()) as cursor:
- cursor.execute(self.OUTHASH_QUERY,
- {k: data[k] for k in ('method', 'outhash', 'taskhash')})
-
- row = cursor.fetchone()
-
- if row is None and self.upstream_client:
- # Try upstream
- row = await copy_outhash_from_upstream(self.upstream_client,
- self.db,
- data['method'],
- data['outhash'],
- data['taskhash'])
-
- # If no matching outhash was found, or one *was* found but it
- # wasn't an exact match on the taskhash, a new entry for this
- # taskhash should be added
- if row is None or row['taskhash'] != data['taskhash']:
- # If a row matching the outhash was found, the unihash for
- # the new taskhash should be the same as that one.
- # Otherwise the caller provided unihash is used.
- unihash = data['unihash']
- if row is not None:
- unihash = row['unihash']
-
- insert_data = {
- 'method': data['method'],
- 'outhash': data['outhash'],
- 'taskhash': data['taskhash'],
- 'unihash': unihash,
- 'created': datetime.now()
- }
+ if self.upstream_client is not None:
+ upstream = await self.upstream_client.get_unihash(method, taskhash)
+ if upstream:
+ await self.server.backfill_queue.put((method, taskhash))
+ return upstream
- for k in ('owner', 'PN', 'PV', 'PR', 'task', 'outhash_siginfo'):
- if k in data:
- insert_data[k] = data[k]
+ return ""
- insert_task(cursor, insert_data)
- self.db.commit()
+ return await self._stream_handler(handler)
- logger.info('Adding taskhash %s with unihash %s',
- data['taskhash'], unihash)
+ @permissions(READ_PERM)
+ async def handle_exists_stream(self, request):
+ async def handler(l):
+ if await self.db.unihash_exists(l):
+ return "true"
- d = {
- 'taskhash': data['taskhash'],
- 'method': data['method'],
- 'unihash': unihash
- }
- else:
- d = {k: row[k] for k in ('taskhash', 'method', 'unihash')}
+ if self.upstream_client is not None:
+ if await self.upstream_client.unihash_exists(l):
+ return "true"
- self.write_message(d)
+ return "false"
- async def handle_equivreport(self, data):
- with closing(self.db.cursor()) as cursor:
- insert_data = {
- 'method': data['method'],
- 'outhash': "",
- 'taskhash': data['taskhash'],
- 'unihash': data['unihash'],
- 'created': datetime.now()
- }
+ return await self._stream_handler(handler)
- for k in ('owner', 'PN', 'PV', 'PR', 'task', 'outhash_siginfo'):
- if k in data:
- insert_data[k] = data[k]
+ async def report_readonly(self, data):
+ method = data["method"]
+ outhash = data["outhash"]
+ taskhash = data["taskhash"]
- insert_task(cursor, insert_data, ignore=True)
- self.db.commit()
+ info = await self.get_outhash(method, outhash, taskhash)
+ if info:
+ unihash = info["unihash"]
+ else:
+ unihash = data["unihash"]
- # Fetch the unihash that will be reported for the taskhash. If the
- # unihash matches, it means this row was inserted (or the mapping
- # was already valid)
- row = self.query_equivalent(data['method'], data['taskhash'], self.FAST_QUERY)
+ return {
+ "taskhash": taskhash,
+ "method": method,
+ "unihash": unihash,
+ }
+
+ # Since this can be called either read only or to report, the check to
+ # report is made inside the function
+ @permissions(READ_PERM)
+ async def handle_report(self, data):
+ if self.server.read_only or not self.user_has_permissions(REPORT_PERM):
+ return await self.report_readonly(data)
+
+ outhash_data = {
+ "method": data["method"],
+ "outhash": data["outhash"],
+ "taskhash": data["taskhash"],
+ "created": datetime.now(),
+ }
+
+ for k in ("owner", "PN", "PV", "PR", "task", "outhash_siginfo"):
+ if k in data:
+ outhash_data[k] = data[k]
+
+ if self.user:
+ outhash_data["owner"] = self.user.username
+
+ # Insert the new entry, unless it already exists
+ if await self.db.insert_outhash(outhash_data):
+ # If this row is new, check if it is equivalent to another
+ # output hash
+ row = await self.db.get_equivalent_for_outhash(
+ data["method"], data["outhash"], data["taskhash"]
+ )
+
+ if row is not None:
+ # A matching output hash was found. Set our taskhash to the
+ # same unihash since they are equivalent
+ unihash = row["unihash"]
+ else:
+ # No matching output hash was found. This is probably the
+ # first outhash to be added.
+ unihash = data["unihash"]
+
+ # Query upstream to see if it has a unihash we can use
+ if self.upstream_client is not None:
+ upstream_data = await self.upstream_client.get_outhash(
+ data["method"], data["outhash"], data["taskhash"]
+ )
+ if upstream_data is not None:
+ unihash = upstream_data["unihash"]
+
+ await self.db.insert_unihash(data["method"], data["taskhash"], unihash)
+
+ unihash_data = await self.get_unihash(data["method"], data["taskhash"])
+ if unihash_data is not None:
+ unihash = unihash_data["unihash"]
+ else:
+ unihash = data["unihash"]
+
+ return {
+ "taskhash": data["taskhash"],
+ "method": data["method"],
+ "unihash": unihash,
+ }
- if row['unihash'] == data['unihash']:
- logger.info('Adding taskhash equivalence for %s with unihash %s',
- data['taskhash'], row['unihash'])
+ @permissions(READ_PERM, REPORT_PERM)
+ async def handle_equivreport(self, data):
+ await self.db.insert_unihash(data["method"], data["taskhash"], data["unihash"])
- d = {k: row[k] for k in ('taskhash', 'method', 'unihash')}
+ # Fetch the unihash that will be reported for the taskhash. If the
+ # unihash matches, it means this row was inserted (or the mapping
+ # was already valid)
+ row = await self.db.get_equivalent(data["method"], data["taskhash"])
- self.write_message(d)
+ if row["unihash"] == data["unihash"]:
+ self.logger.info(
+ "Adding taskhash equivalence for %s with unihash %s",
+ data["taskhash"],
+ row["unihash"],
+ )
+ return {k: row[k] for k in ("taskhash", "method", "unihash")}
+ @permissions(READ_PERM)
async def handle_get_stats(self, request):
- d = {
- 'requests': self.request_stats.todict(),
+ return {
+ "requests": self.server.request_stats.todict(),
}
- self.write_message(d)
-
+ @permissions(DB_ADMIN_PERM)
async def handle_reset_stats(self, request):
d = {
- 'requests': self.request_stats.todict(),
+ "requests": self.server.request_stats.todict(),
}
- self.request_stats.reset()
- self.write_message(d)
+ self.server.request_stats.reset()
+ return d
+ @permissions(READ_PERM)
async def handle_backfill_wait(self, request):
d = {
- 'tasks': self.backfill_queue.qsize(),
+ "tasks": self.server.backfill_queue.qsize(),
+ }
+ await self.server.backfill_queue.join()
+ return d
+
+ @permissions(DB_ADMIN_PERM)
+ async def handle_remove(self, request):
+ condition = request["where"]
+ if not isinstance(condition, dict):
+ raise TypeError("Bad condition type %s" % type(condition))
+
+ return {"count": await self.db.remove(condition)}
+
+ @permissions(DB_ADMIN_PERM)
+ async def handle_gc_mark(self, request):
+ condition = request["where"]
+ mark = request["mark"]
+
+ if not isinstance(condition, dict):
+ raise TypeError("Bad condition type %s" % type(condition))
+
+ if not isinstance(mark, str):
+ raise TypeError("Bad mark type %s" % type(mark))
+
+ return {"count": await self.db.gc_mark(mark, condition)}
+
+ @permissions(DB_ADMIN_PERM)
+ async def handle_gc_sweep(self, request):
+ mark = request["mark"]
+
+ if not isinstance(mark, str):
+ raise TypeError("Bad mark type %s" % type(mark))
+
+ current_mark = await self.db.get_current_gc_mark()
+
+ if not current_mark or mark != current_mark:
+ raise bb.asyncrpc.InvokeError(
+ f"'{mark}' is not the current mark. Refusing to sweep"
+ )
+
+ count = await self.db.gc_sweep()
+
+ return {"count": count}
+
+ @permissions(DB_ADMIN_PERM)
+ async def handle_gc_status(self, request):
+ (keep_rows, remove_rows, current_mark) = await self.db.gc_status()
+ return {
+ "keep": keep_rows,
+ "remove": remove_rows,
+ "mark": current_mark,
}
- await self.backfill_queue.join()
- self.write_message(d)
- def query_equivalent(self, method, taskhash, query):
- # This is part of the inner loop and must be as fast as possible
+ @permissions(DB_ADMIN_PERM)
+ async def handle_clean_unused(self, request):
+ max_age = request["max_age_seconds"]
+ oldest = datetime.now() - timedelta(seconds=-max_age)
+ return {"count": await self.db.clean_unused(oldest)}
+
+ @permissions(DB_ADMIN_PERM)
+ async def handle_get_db_usage(self, request):
+ return {"usage": await self.db.get_usage()}
+
+ @permissions(DB_ADMIN_PERM)
+ async def handle_get_db_query_columns(self, request):
+ return {"columns": await self.db.get_query_columns()}
+
+ # The authentication API is always allowed
+ async def handle_auth(self, request):
+ username = str(request["username"])
+ token = str(request["token"])
+
+ async def fail_auth():
+ nonlocal username
+ # Rate limit bad login attempts
+ await asyncio.sleep(1)
+ raise bb.asyncrpc.InvokeError(f"Unable to authenticate as {username}")
+
+ user, db_token = await self.db.lookup_user_token(username)
+
+ if not user or not db_token:
+ await fail_auth()
+
+ try:
+ algo, salt, _ = db_token.split(":")
+ except ValueError:
+ await fail_auth()
+
+ if hash_token(algo, salt, token) != db_token:
+ await fail_auth()
+
+ self.user = user
+
+ self.logger.info("Authenticated as %s", username)
+
+ return {
+ "result": True,
+ "username": self.user.username,
+ "permissions": sorted(list(self.user.permissions)),
+ }
+
+ @permissions(USER_ADMIN_PERM, allow_self_service=True, allow_anon=False)
+ async def handle_refresh_token(self, request):
+ username = str(request["username"])
+
+ token = await new_token()
+
+ updated = await self.db.set_user_token(
+ username,
+ hash_token(TOKEN_ALGORITHM, new_salt(), token),
+ )
+ if not updated:
+ self.raise_no_user_error(username)
+
+ return {"username": username, "token": token}
+
+ def get_perm_arg(self, arg):
+ if not isinstance(arg, list):
+ raise bb.asyncrpc.InvokeError("Unexpected type for permissions")
+
+ arg = set(arg)
try:
- cursor = self.db.cursor()
- cursor.execute(query, {'method': method, 'taskhash': taskhash})
- return cursor.fetchone()
- except:
- cursor.close()
+ arg.remove(NONE_PERM)
+ except KeyError:
+ pass
+
+ unknown_perms = arg - ALL_PERMISSIONS
+ if unknown_perms:
+ raise bb.asyncrpc.InvokeError(
+ "Unknown permissions %s" % ", ".join(sorted(list(unknown_perms)))
+ )
+
+ return sorted(list(arg))
+
+ def return_perms(self, permissions):
+ if ALL_PERM in permissions:
+ return sorted(list(ALL_PERMISSIONS))
+ return sorted(list(permissions))
+
+ @permissions(USER_ADMIN_PERM, allow_anon=False)
+ async def handle_set_perms(self, request):
+ username = str(request["username"])
+ permissions = self.get_perm_arg(request["permissions"])
+
+ if not await self.db.set_user_perms(username, permissions):
+ self.raise_no_user_error(username)
+
+ return {
+ "username": username,
+ "permissions": self.return_perms(permissions),
+ }
+
+ @permissions(USER_ADMIN_PERM, allow_self_service=True, allow_anon=False)
+ async def handle_get_user(self, request):
+ username = str(request["username"])
+
+ user = await self.db.lookup_user(username)
+ if user is None:
+ return None
+
+ return {
+ "username": user.username,
+ "permissions": self.return_perms(user.permissions),
+ }
+
+ @permissions(USER_ADMIN_PERM, allow_anon=False)
+ async def handle_get_all_users(self, request):
+ users = await self.db.get_all_users()
+ return {
+ "users": [
+ {
+ "username": u.username,
+ "permissions": self.return_perms(u.permissions),
+ }
+ for u in users
+ ]
+ }
+
+ @permissions(USER_ADMIN_PERM, allow_anon=False)
+ async def handle_new_user(self, request):
+ username = str(request["username"])
+ permissions = self.get_perm_arg(request["permissions"])
+
+ token = await new_token()
+
+ inserted = await self.db.new_user(
+ username,
+ permissions,
+ hash_token(TOKEN_ALGORITHM, new_salt(), token),
+ )
+ if not inserted:
+ raise bb.asyncrpc.InvokeError(f"Cannot create new user '{username}'")
+
+ return {
+ "username": username,
+ "permissions": self.return_perms(permissions),
+ "token": token,
+ }
+
+ @permissions(USER_ADMIN_PERM, allow_self_service=True, allow_anon=False)
+ async def handle_delete_user(self, request):
+ username = str(request["username"])
+
+ if not await self.db.delete_user(username):
+ self.raise_no_user_error(username)
+
+ return {"username": username}
+
+ @permissions(USER_ADMIN_PERM, allow_anon=False)
+ async def handle_become_user(self, request):
+ username = str(request["username"])
+
+ user = await self.db.lookup_user(username)
+ if user is None:
+ raise bb.asyncrpc.InvokeError(f"User {username} doesn't exist")
+
+ self.user = user
+
+ self.logger.info("Became user %s", username)
+
+ return {
+ "username": self.user.username,
+ "permissions": self.return_perms(self.user.permissions),
+ }
class Server(bb.asyncrpc.AsyncServer):
- def __init__(self, db, loop=None, upstream=None, read_only=False):
+ def __init__(
+ self,
+ db_engine,
+ upstream=None,
+ read_only=False,
+ anon_perms=DEFAULT_ANON_PERMS,
+ admin_username=None,
+ admin_password=None,
+ ):
if upstream and read_only:
- raise bb.asyncrpc.ServerError("Read-only hashserv cannot pull from an upstream server")
+ raise bb.asyncrpc.ServerError(
+ "Read-only hashserv cannot pull from an upstream server"
+ )
+
+ disallowed_perms = set(anon_perms) - set(
+ [NONE_PERM, READ_PERM, REPORT_PERM, DB_ADMIN_PERM]
+ )
- super().__init__(logger, loop)
+ if disallowed_perms:
+ raise bb.asyncrpc.ServerError(
+ f"Permission(s) {' '.join(disallowed_perms)} are not allowed for anonymous users"
+ )
+
+ super().__init__(logger)
self.request_stats = Stats()
- self.db = db
+ self.db_engine = db_engine
self.upstream = upstream
self.read_only = read_only
+ self.backfill_queue = None
+ self.anon_perms = set(anon_perms)
+ self.admin_username = admin_username
+ self.admin_password = admin_password
+
+ self.logger.info(
+ "Anonymous user permissions are: %s", ", ".join(self.anon_perms)
+ )
+
+ def accept_client(self, socket):
+ return ServerClient(socket, self)
+
+ async def create_admin_user(self):
+ admin_permissions = (ALL_PERM,)
+ async with self.db_engine.connect(self.logger) as db:
+ added = await db.new_user(
+ self.admin_username,
+ admin_permissions,
+ hash_token(TOKEN_ALGORITHM, new_salt(), self.admin_password),
+ )
+ if added:
+ self.logger.info("Created admin user '%s'", self.admin_username)
+ else:
+ await db.set_user_perms(
+ self.admin_username,
+ admin_permissions,
+ )
+ await db.set_user_token(
+ self.admin_username,
+ hash_token(TOKEN_ALGORITHM, new_salt(), self.admin_password),
+ )
+ self.logger.info("Admin user '%s' updated", self.admin_username)
+
+ async def backfill_worker_task(self):
+ async with await create_async_client(
+ self.upstream
+ ) as client, self.db_engine.connect(self.logger) as db:
+ while True:
+ item = await self.backfill_queue.get()
+ if item is None:
+ self.backfill_queue.task_done()
+ break
- def accept_client(self, reader, writer):
- return ServerClient(reader, writer, self.db, self.request_stats, self.backfill_queue, self.upstream, self.read_only)
+ method, taskhash = item
+ d = await client.get_taskhash(method, taskhash)
+ if d is not None:
+ await db.insert_unihash(d["method"], d["taskhash"], d["unihash"])
+ self.backfill_queue.task_done()
- @contextmanager
- def _backfill_worker(self):
- async def backfill_worker_task():
- client = await create_async_client(self.upstream)
- try:
- while True:
- item = await self.backfill_queue.get()
- if item is None:
- self.backfill_queue.task_done()
- break
- method, taskhash = item
- await copy_from_upstream(client, self.db, method, taskhash)
- self.backfill_queue.task_done()
- finally:
- await client.close()
+ def start(self):
+ tasks = super().start()
+ if self.upstream:
+ self.backfill_queue = asyncio.Queue()
+ tasks += [self.backfill_worker_task()]
- async def join_worker(worker):
- await self.backfill_queue.put(None)
- await worker
+ self.loop.run_until_complete(self.db_engine.create())
- if self.upstream is not None:
- worker = asyncio.ensure_future(backfill_worker_task())
- try:
- yield
- finally:
- self.loop.run_until_complete(join_worker(worker))
- else:
- yield
+ if self.admin_username:
+ self.loop.run_until_complete(self.create_admin_user())
- def run_loop_forever(self):
- self.backfill_queue = asyncio.Queue()
+ return tasks
- with self._backfill_worker():
- super().run_loop_forever()
+ async def stop(self):
+ if self.backfill_queue is not None:
+ await self.backfill_queue.put(None)
+ await super().stop()
diff --git a/lib/hashserv/sqlalchemy.py b/lib/hashserv/sqlalchemy.py
new file mode 100644
index 000000000..f7b0226a7
--- /dev/null
+++ b/lib/hashserv/sqlalchemy.py
@@ -0,0 +1,598 @@
+#! /usr/bin/env python3
+#
+# Copyright (C) 2023 Garmin Ltd.
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import logging
+from datetime import datetime
+from . import User
+
+from sqlalchemy.ext.asyncio import create_async_engine
+from sqlalchemy.pool import NullPool
+from sqlalchemy import (
+ MetaData,
+ Column,
+ Table,
+ Text,
+ Integer,
+ UniqueConstraint,
+ DateTime,
+ Index,
+ select,
+ insert,
+ exists,
+ literal,
+ and_,
+ delete,
+ update,
+ func,
+ inspect,
+)
+import sqlalchemy.engine
+from sqlalchemy.orm import declarative_base
+from sqlalchemy.exc import IntegrityError
+from sqlalchemy.dialects.postgresql import insert as postgres_insert
+
+Base = declarative_base()
+
+
+class UnihashesV3(Base):
+ __tablename__ = "unihashes_v3"
+ id = Column(Integer, primary_key=True, autoincrement=True)
+ method = Column(Text, nullable=False)
+ taskhash = Column(Text, nullable=False)
+ unihash = Column(Text, nullable=False)
+ gc_mark = Column(Text, nullable=False)
+
+ __table_args__ = (
+ UniqueConstraint("method", "taskhash"),
+ Index("taskhash_lookup_v4", "method", "taskhash"),
+ Index("unihash_lookup_v1", "unihash"),
+ )
+
+
+class OuthashesV2(Base):
+ __tablename__ = "outhashes_v2"
+ id = Column(Integer, primary_key=True, autoincrement=True)
+ method = Column(Text, nullable=False)
+ taskhash = Column(Text, nullable=False)
+ outhash = Column(Text, nullable=False)
+ created = Column(DateTime)
+ owner = Column(Text)
+ PN = Column(Text)
+ PV = Column(Text)
+ PR = Column(Text)
+ task = Column(Text)
+ outhash_siginfo = Column(Text)
+
+ __table_args__ = (
+ UniqueConstraint("method", "taskhash", "outhash"),
+ Index("outhash_lookup_v3", "method", "outhash"),
+ )
+
+
+class Users(Base):
+ __tablename__ = "users"
+ id = Column(Integer, primary_key=True, autoincrement=True)
+ username = Column(Text, nullable=False)
+ token = Column(Text, nullable=False)
+ permissions = Column(Text)
+
+ __table_args__ = (UniqueConstraint("username"),)
+
+
+class Config(Base):
+ __tablename__ = "config"
+ id = Column(Integer, primary_key=True, autoincrement=True)
+ name = Column(Text, nullable=False)
+ value = Column(Text)
+ __table_args__ = (
+ UniqueConstraint("name"),
+ Index("config_lookup", "name"),
+ )
+
+
+#
+# Old table versions
+#
+DeprecatedBase = declarative_base()
+
+
+class UnihashesV2(DeprecatedBase):
+ __tablename__ = "unihashes_v2"
+ id = Column(Integer, primary_key=True, autoincrement=True)
+ method = Column(Text, nullable=False)
+ taskhash = Column(Text, nullable=False)
+ unihash = Column(Text, nullable=False)
+
+ __table_args__ = (
+ UniqueConstraint("method", "taskhash"),
+ Index("taskhash_lookup_v3", "method", "taskhash"),
+ )
+
+
+class DatabaseEngine(object):
+ def __init__(self, url, username=None, password=None):
+ self.logger = logging.getLogger("hashserv.sqlalchemy")
+ self.url = sqlalchemy.engine.make_url(url)
+
+ if username is not None:
+ self.url = self.url.set(username=username)
+
+ if password is not None:
+ self.url = self.url.set(password=password)
+
+ async def create(self):
+ def check_table_exists(conn, name):
+ return inspect(conn).has_table(name)
+
+ self.logger.info("Using database %s", self.url)
+ if self.url.drivername == 'postgresql+psycopg':
+ # Psygopg 3 (psygopg) driver can handle async connection pooling
+ self.engine = create_async_engine(self.url, max_overflow=-1)
+ else:
+ self.engine = create_async_engine(self.url, poolclass=NullPool)
+
+ async with self.engine.begin() as conn:
+ # Create tables
+ self.logger.info("Creating tables...")
+ await conn.run_sync(Base.metadata.create_all)
+
+ if await conn.run_sync(check_table_exists, UnihashesV2.__tablename__):
+ self.logger.info("Upgrading Unihashes V2 -> V3...")
+ statement = insert(UnihashesV3).from_select(
+ ["id", "method", "unihash", "taskhash", "gc_mark"],
+ select(
+ UnihashesV2.id,
+ UnihashesV2.method,
+ UnihashesV2.unihash,
+ UnihashesV2.taskhash,
+ literal("").label("gc_mark"),
+ ),
+ )
+ self.logger.debug("%s", statement)
+ await conn.execute(statement)
+
+ await conn.run_sync(Base.metadata.drop_all, [UnihashesV2.__table__])
+ self.logger.info("Upgrade complete")
+
+ def connect(self, logger):
+ return Database(self.engine, logger)
+
+
+def map_row(row):
+ if row is None:
+ return None
+ return dict(**row._mapping)
+
+
+def map_user(row):
+ if row is None:
+ return None
+ return User(
+ username=row.username,
+ permissions=set(row.permissions.split()),
+ )
+
+
+def _make_condition_statement(table, condition):
+ where = {}
+ for c in table.__table__.columns:
+ if c.key in condition and condition[c.key] is not None:
+ where[c] = condition[c.key]
+
+ return [(k == v) for k, v in where.items()]
+
+
+class Database(object):
+ def __init__(self, engine, logger):
+ self.engine = engine
+ self.db = None
+ self.logger = logger
+
+ async def __aenter__(self):
+ self.db = await self.engine.connect()
+ return self
+
+ async def __aexit__(self, exc_type, exc_value, traceback):
+ await self.close()
+
+ async def close(self):
+ await self.db.close()
+ self.db = None
+
+ async def _execute(self, statement):
+ self.logger.debug("%s", statement)
+ return await self.db.execute(statement)
+
+ async def _set_config(self, name, value):
+ while True:
+ result = await self._execute(
+ update(Config).where(Config.name == name).values(value=value)
+ )
+
+ if result.rowcount == 0:
+ self.logger.debug("Config '%s' not found. Adding it", name)
+ try:
+ await self._execute(insert(Config).values(name=name, value=value))
+ except IntegrityError:
+ # Race. Try again
+ continue
+
+ break
+
+ def _get_config_subquery(self, name, default=None):
+ if default is not None:
+ return func.coalesce(
+ select(Config.value).where(Config.name == name).scalar_subquery(),
+ default,
+ )
+ return select(Config.value).where(Config.name == name).scalar_subquery()
+
+ async def _get_config(self, name):
+ result = await self._execute(select(Config.value).where(Config.name == name))
+ row = result.first()
+ if row is None:
+ return None
+ return row.value
+
+ async def get_unihash_by_taskhash_full(self, method, taskhash):
+ async with self.db.begin():
+ result = await self._execute(
+ select(
+ OuthashesV2,
+ UnihashesV3.unihash.label("unihash"),
+ )
+ .join(
+ UnihashesV3,
+ and_(
+ UnihashesV3.method == OuthashesV2.method,
+ UnihashesV3.taskhash == OuthashesV2.taskhash,
+ ),
+ )
+ .where(
+ OuthashesV2.method == method,
+ OuthashesV2.taskhash == taskhash,
+ )
+ .order_by(
+ OuthashesV2.created.asc(),
+ )
+ .limit(1)
+ )
+ return map_row(result.first())
+
+ async def get_unihash_by_outhash(self, method, outhash):
+ async with self.db.begin():
+ result = await self._execute(
+ select(OuthashesV2, UnihashesV3.unihash.label("unihash"))
+ .join(
+ UnihashesV3,
+ and_(
+ UnihashesV3.method == OuthashesV2.method,
+ UnihashesV3.taskhash == OuthashesV2.taskhash,
+ ),
+ )
+ .where(
+ OuthashesV2.method == method,
+ OuthashesV2.outhash == outhash,
+ )
+ .order_by(
+ OuthashesV2.created.asc(),
+ )
+ .limit(1)
+ )
+ return map_row(result.first())
+
+ async def unihash_exists(self, unihash):
+ async with self.db.begin():
+ result = await self._execute(
+ select(UnihashesV3).where(UnihashesV3.unihash == unihash).limit(1)
+ )
+
+ return result.first() is not None
+
+ async def get_outhash(self, method, outhash):
+ async with self.db.begin():
+ result = await self._execute(
+ select(OuthashesV2)
+ .where(
+ OuthashesV2.method == method,
+ OuthashesV2.outhash == outhash,
+ )
+ .order_by(
+ OuthashesV2.created.asc(),
+ )
+ .limit(1)
+ )
+ return map_row(result.first())
+
+ async def get_equivalent_for_outhash(self, method, outhash, taskhash):
+ async with self.db.begin():
+ result = await self._execute(
+ select(
+ OuthashesV2.taskhash.label("taskhash"),
+ UnihashesV3.unihash.label("unihash"),
+ )
+ .join(
+ UnihashesV3,
+ and_(
+ UnihashesV3.method == OuthashesV2.method,
+ UnihashesV3.taskhash == OuthashesV2.taskhash,
+ ),
+ )
+ .where(
+ OuthashesV2.method == method,
+ OuthashesV2.outhash == outhash,
+ OuthashesV2.taskhash != taskhash,
+ )
+ .order_by(
+ OuthashesV2.created.asc(),
+ )
+ .limit(1)
+ )
+ return map_row(result.first())
+
+ async def get_equivalent(self, method, taskhash):
+ async with self.db.begin():
+ result = await self._execute(
+ select(
+ UnihashesV3.unihash,
+ UnihashesV3.method,
+ UnihashesV3.taskhash,
+ ).where(
+ UnihashesV3.method == method,
+ UnihashesV3.taskhash == taskhash,
+ )
+ )
+ return map_row(result.first())
+
+ async def remove(self, condition):
+ async def do_remove(table):
+ where = _make_condition_statement(table, condition)
+ if where:
+ async with self.db.begin():
+ result = await self._execute(delete(table).where(*where))
+ return result.rowcount
+
+ return 0
+
+ count = 0
+ count += await do_remove(UnihashesV3)
+ count += await do_remove(OuthashesV2)
+
+ return count
+
+ async def get_current_gc_mark(self):
+ async with self.db.begin():
+ return await self._get_config("gc-mark")
+
+ async def gc_status(self):
+ async with self.db.begin():
+ gc_mark_subquery = self._get_config_subquery("gc-mark", "")
+
+ result = await self._execute(
+ select(func.count())
+ .select_from(UnihashesV3)
+ .where(UnihashesV3.gc_mark == gc_mark_subquery)
+ )
+ keep_rows = result.scalar()
+
+ result = await self._execute(
+ select(func.count())
+ .select_from(UnihashesV3)
+ .where(UnihashesV3.gc_mark != gc_mark_subquery)
+ )
+ remove_rows = result.scalar()
+
+ return (keep_rows, remove_rows, await self._get_config("gc-mark"))
+
+ async def gc_mark(self, mark, condition):
+ async with self.db.begin():
+ await self._set_config("gc-mark", mark)
+
+ where = _make_condition_statement(UnihashesV3, condition)
+ if not where:
+ return 0
+
+ result = await self._execute(
+ update(UnihashesV3)
+ .values(gc_mark=self._get_config_subquery("gc-mark", ""))
+ .where(*where)
+ )
+ return result.rowcount
+
+ async def gc_sweep(self):
+ async with self.db.begin():
+ result = await self._execute(
+ delete(UnihashesV3).where(
+ # A sneaky conditional that provides some errant use
+ # protection: If the config mark is NULL, this will not
+ # match any rows because No default is specified in the
+ # select statement
+ UnihashesV3.gc_mark
+ != self._get_config_subquery("gc-mark")
+ )
+ )
+ await self._set_config("gc-mark", None)
+
+ return result.rowcount
+
+ async def clean_unused(self, oldest):
+ async with self.db.begin():
+ result = await self._execute(
+ delete(OuthashesV2).where(
+ OuthashesV2.created < oldest,
+ ~(
+ select(UnihashesV3.id)
+ .where(
+ UnihashesV3.method == OuthashesV2.method,
+ UnihashesV3.taskhash == OuthashesV2.taskhash,
+ )
+ .limit(1)
+ .exists()
+ ),
+ )
+ )
+ return result.rowcount
+
+ async def insert_unihash(self, method, taskhash, unihash):
+ # Postgres specific ignore on insert duplicate
+ if self.engine.name == "postgresql":
+ statement = (
+ postgres_insert(UnihashesV3)
+ .values(
+ method=method,
+ taskhash=taskhash,
+ unihash=unihash,
+ gc_mark=self._get_config_subquery("gc-mark", ""),
+ )
+ .on_conflict_do_nothing(index_elements=("method", "taskhash"))
+ )
+ else:
+ statement = insert(UnihashesV3).values(
+ method=method,
+ taskhash=taskhash,
+ unihash=unihash,
+ gc_mark=self._get_config_subquery("gc-mark", ""),
+ )
+
+ try:
+ async with self.db.begin():
+ result = await self._execute(statement)
+ return result.rowcount != 0
+ except IntegrityError:
+ self.logger.debug(
+ "%s, %s, %s already in unihash database", method, taskhash, unihash
+ )
+ return False
+
+ async def insert_outhash(self, data):
+ outhash_columns = set(c.key for c in OuthashesV2.__table__.columns)
+
+ data = {k: v for k, v in data.items() if k in outhash_columns}
+
+ if "created" in data and not isinstance(data["created"], datetime):
+ data["created"] = datetime.fromisoformat(data["created"])
+
+ # Postgres specific ignore on insert duplicate
+ if self.engine.name == "postgresql":
+ statement = (
+ postgres_insert(OuthashesV2)
+ .values(**data)
+ .on_conflict_do_nothing(
+ index_elements=("method", "taskhash", "outhash")
+ )
+ )
+ else:
+ statement = insert(OuthashesV2).values(**data)
+
+ try:
+ async with self.db.begin():
+ result = await self._execute(statement)
+ return result.rowcount != 0
+ except IntegrityError:
+ self.logger.debug(
+ "%s, %s already in outhash database", data["method"], data["outhash"]
+ )
+ return False
+
+ async def _get_user(self, username):
+ async with self.db.begin():
+ result = await self._execute(
+ select(
+ Users.username,
+ Users.permissions,
+ Users.token,
+ ).where(
+ Users.username == username,
+ )
+ )
+ return result.first()
+
+ async def lookup_user_token(self, username):
+ row = await self._get_user(username)
+ if not row:
+ return None, None
+ return map_user(row), row.token
+
+ async def lookup_user(self, username):
+ return map_user(await self._get_user(username))
+
+ async def set_user_token(self, username, token):
+ async with self.db.begin():
+ result = await self._execute(
+ update(Users)
+ .where(
+ Users.username == username,
+ )
+ .values(
+ token=token,
+ )
+ )
+ return result.rowcount != 0
+
+ async def set_user_perms(self, username, permissions):
+ async with self.db.begin():
+ result = await self._execute(
+ update(Users)
+ .where(Users.username == username)
+ .values(permissions=" ".join(permissions))
+ )
+ return result.rowcount != 0
+
+ async def get_all_users(self):
+ async with self.db.begin():
+ result = await self._execute(
+ select(
+ Users.username,
+ Users.permissions,
+ )
+ )
+ return [map_user(row) for row in result]
+
+ async def new_user(self, username, permissions, token):
+ try:
+ async with self.db.begin():
+ await self._execute(
+ insert(Users).values(
+ username=username,
+ permissions=" ".join(permissions),
+ token=token,
+ )
+ )
+ return True
+ except IntegrityError as e:
+ self.logger.debug("Cannot create new user %s: %s", username, e)
+ return False
+
+ async def delete_user(self, username):
+ async with self.db.begin():
+ result = await self._execute(
+ delete(Users).where(Users.username == username)
+ )
+ return result.rowcount != 0
+
+ async def get_usage(self):
+ usage = {}
+ async with self.db.begin() as session:
+ for name, table in Base.metadata.tables.items():
+ result = await self._execute(
+ statement=select(func.count()).select_from(table)
+ )
+ usage[name] = {
+ "rows": result.scalar(),
+ }
+
+ return usage
+
+ async def get_query_columns(self):
+ columns = set()
+ for table in (UnihashesV3, OuthashesV2):
+ for c in table.__table__.columns:
+ if not isinstance(c.type, Text):
+ continue
+ columns.add(c.key)
+
+ return list(columns)
diff --git a/lib/hashserv/sqlite.py b/lib/hashserv/sqlite.py
new file mode 100644
index 000000000..da2e844a0
--- /dev/null
+++ b/lib/hashserv/sqlite.py
@@ -0,0 +1,562 @@
+#! /usr/bin/env python3
+#
+# Copyright (C) 2023 Garmin Ltd.
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+import sqlite3
+import logging
+from contextlib import closing
+from . import User
+
+logger = logging.getLogger("hashserv.sqlite")
+
+UNIHASH_TABLE_DEFINITION = (
+ ("method", "TEXT NOT NULL", "UNIQUE"),
+ ("taskhash", "TEXT NOT NULL", "UNIQUE"),
+ ("unihash", "TEXT NOT NULL", ""),
+ ("gc_mark", "TEXT NOT NULL", ""),
+)
+
+UNIHASH_TABLE_COLUMNS = tuple(name for name, _, _ in UNIHASH_TABLE_DEFINITION)
+
+OUTHASH_TABLE_DEFINITION = (
+ ("method", "TEXT NOT NULL", "UNIQUE"),
+ ("taskhash", "TEXT NOT NULL", "UNIQUE"),
+ ("outhash", "TEXT NOT NULL", "UNIQUE"),
+ ("created", "DATETIME", ""),
+ # Optional fields
+ ("owner", "TEXT", ""),
+ ("PN", "TEXT", ""),
+ ("PV", "TEXT", ""),
+ ("PR", "TEXT", ""),
+ ("task", "TEXT", ""),
+ ("outhash_siginfo", "TEXT", ""),
+)
+
+OUTHASH_TABLE_COLUMNS = tuple(name for name, _, _ in OUTHASH_TABLE_DEFINITION)
+
+USERS_TABLE_DEFINITION = (
+ ("username", "TEXT NOT NULL", "UNIQUE"),
+ ("token", "TEXT NOT NULL", ""),
+ ("permissions", "TEXT NOT NULL", ""),
+)
+
+USERS_TABLE_COLUMNS = tuple(name for name, _, _ in USERS_TABLE_DEFINITION)
+
+
+CONFIG_TABLE_DEFINITION = (
+ ("name", "TEXT NOT NULL", "UNIQUE"),
+ ("value", "TEXT", ""),
+)
+
+CONFIG_TABLE_COLUMNS = tuple(name for name, _, _ in CONFIG_TABLE_DEFINITION)
+
+
+def _make_table(cursor, name, definition):
+ cursor.execute(
+ """
+ CREATE TABLE IF NOT EXISTS {name} (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ {fields}
+ UNIQUE({unique})
+ )
+ """.format(
+ name=name,
+ fields=" ".join("%s %s," % (name, typ) for name, typ, _ in definition),
+ unique=", ".join(
+ name for name, _, flags in definition if "UNIQUE" in flags
+ ),
+ )
+ )
+
+
+def map_user(row):
+ if row is None:
+ return None
+ return User(
+ username=row["username"],
+ permissions=set(row["permissions"].split()),
+ )
+
+
+def _make_condition_statement(columns, condition):
+ where = {}
+ for c in columns:
+ if c in condition and condition[c] is not None:
+ where[c] = condition[c]
+
+ return where, " AND ".join("%s=:%s" % (k, k) for k in where.keys())
+
+
+def _get_sqlite_version(cursor):
+ cursor.execute("SELECT sqlite_version()")
+
+ version = []
+ for v in cursor.fetchone()[0].split("."):
+ try:
+ version.append(int(v))
+ except ValueError:
+ version.append(v)
+
+ return tuple(version)
+
+
+def _schema_table_name(version):
+ if version >= (3, 33):
+ return "sqlite_schema"
+
+ return "sqlite_master"
+
+
+class DatabaseEngine(object):
+ def __init__(self, dbname, sync):
+ self.dbname = dbname
+ self.logger = logger
+ self.sync = sync
+
+ async def create(self):
+ db = sqlite3.connect(self.dbname)
+ db.row_factory = sqlite3.Row
+
+ with closing(db.cursor()) as cursor:
+ _make_table(cursor, "unihashes_v3", UNIHASH_TABLE_DEFINITION)
+ _make_table(cursor, "outhashes_v2", OUTHASH_TABLE_DEFINITION)
+ _make_table(cursor, "users", USERS_TABLE_DEFINITION)
+ _make_table(cursor, "config", CONFIG_TABLE_DEFINITION)
+
+ cursor.execute("PRAGMA journal_mode = WAL")
+ cursor.execute(
+ "PRAGMA synchronous = %s" % ("NORMAL" if self.sync else "OFF")
+ )
+
+ # Drop old indexes
+ cursor.execute("DROP INDEX IF EXISTS taskhash_lookup")
+ cursor.execute("DROP INDEX IF EXISTS outhash_lookup")
+ cursor.execute("DROP INDEX IF EXISTS taskhash_lookup_v2")
+ cursor.execute("DROP INDEX IF EXISTS outhash_lookup_v2")
+ cursor.execute("DROP INDEX IF EXISTS taskhash_lookup_v3")
+
+ # TODO: Upgrade from tasks_v2?
+ cursor.execute("DROP TABLE IF EXISTS tasks_v2")
+
+ # Create new indexes
+ cursor.execute(
+ "CREATE INDEX IF NOT EXISTS taskhash_lookup_v4 ON unihashes_v3 (method, taskhash)"
+ )
+ cursor.execute(
+ "CREATE INDEX IF NOT EXISTS unihash_lookup_v1 ON unihashes_v3 (unihash)"
+ )
+ cursor.execute(
+ "CREATE INDEX IF NOT EXISTS outhash_lookup_v3 ON outhashes_v2 (method, outhash)"
+ )
+ cursor.execute("CREATE INDEX IF NOT EXISTS config_lookup ON config (name)")
+
+ sqlite_version = _get_sqlite_version(cursor)
+
+ cursor.execute(
+ f"""
+ SELECT name FROM {_schema_table_name(sqlite_version)} WHERE type = 'table' AND name = 'unihashes_v2'
+ """
+ )
+ if cursor.fetchone():
+ self.logger.info("Upgrading Unihashes V2 -> V3...")
+ cursor.execute(
+ """
+ INSERT INTO unihashes_v3 (id, method, unihash, taskhash, gc_mark)
+ SELECT id, method, unihash, taskhash, '' FROM unihashes_v2
+ """
+ )
+ cursor.execute("DROP TABLE unihashes_v2")
+ db.commit()
+ self.logger.info("Upgrade complete")
+
+ def connect(self, logger):
+ return Database(logger, self.dbname, self.sync)
+
+
+class Database(object):
+ def __init__(self, logger, dbname, sync):
+ self.dbname = dbname
+ self.logger = logger
+
+ self.db = sqlite3.connect(self.dbname)
+ self.db.row_factory = sqlite3.Row
+
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute("PRAGMA journal_mode = WAL")
+ cursor.execute(
+ "PRAGMA synchronous = %s" % ("NORMAL" if sync else "OFF")
+ )
+
+ self.sqlite_version = _get_sqlite_version(cursor)
+
+ async def __aenter__(self):
+ return self
+
+ async def __aexit__(self, exc_type, exc_value, traceback):
+ await self.close()
+
+ async def _set_config(self, cursor, name, value):
+ cursor.execute(
+ """
+ INSERT OR REPLACE INTO config (id, name, value) VALUES
+ ((SELECT id FROM config WHERE name=:name), :name, :value)
+ """,
+ {
+ "name": name,
+ "value": value,
+ },
+ )
+
+ async def _get_config(self, cursor, name):
+ cursor.execute(
+ "SELECT value FROM config WHERE name=:name",
+ {
+ "name": name,
+ },
+ )
+ row = cursor.fetchone()
+ if row is None:
+ return None
+ return row["value"]
+
+ async def close(self):
+ self.db.close()
+
+ async def get_unihash_by_taskhash_full(self, method, taskhash):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ """
+ SELECT *, unihashes_v3.unihash AS unihash FROM outhashes_v2
+ INNER JOIN unihashes_v3 ON unihashes_v3.method=outhashes_v2.method AND unihashes_v3.taskhash=outhashes_v2.taskhash
+ WHERE outhashes_v2.method=:method AND outhashes_v2.taskhash=:taskhash
+ ORDER BY outhashes_v2.created ASC
+ LIMIT 1
+ """,
+ {
+ "method": method,
+ "taskhash": taskhash,
+ },
+ )
+ return cursor.fetchone()
+
+ async def get_unihash_by_outhash(self, method, outhash):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ """
+ SELECT *, unihashes_v3.unihash AS unihash FROM outhashes_v2
+ INNER JOIN unihashes_v3 ON unihashes_v3.method=outhashes_v2.method AND unihashes_v3.taskhash=outhashes_v2.taskhash
+ WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash
+ ORDER BY outhashes_v2.created ASC
+ LIMIT 1
+ """,
+ {
+ "method": method,
+ "outhash": outhash,
+ },
+ )
+ return cursor.fetchone()
+
+ async def unihash_exists(self, unihash):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ """
+ SELECT * FROM unihashes_v3 WHERE unihash=:unihash
+ LIMIT 1
+ """,
+ {
+ "unihash": unihash,
+ },
+ )
+ return cursor.fetchone() is not None
+
+ async def get_outhash(self, method, outhash):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ """
+ SELECT * FROM outhashes_v2
+ WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash
+ ORDER BY outhashes_v2.created ASC
+ LIMIT 1
+ """,
+ {
+ "method": method,
+ "outhash": outhash,
+ },
+ )
+ return cursor.fetchone()
+
+ async def get_equivalent_for_outhash(self, method, outhash, taskhash):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ """
+ SELECT outhashes_v2.taskhash AS taskhash, unihashes_v3.unihash AS unihash FROM outhashes_v2
+ INNER JOIN unihashes_v3 ON unihashes_v3.method=outhashes_v2.method AND unihashes_v3.taskhash=outhashes_v2.taskhash
+ -- Select any matching output hash except the one we just inserted
+ WHERE outhashes_v2.method=:method AND outhashes_v2.outhash=:outhash AND outhashes_v2.taskhash!=:taskhash
+ -- Pick the oldest hash
+ ORDER BY outhashes_v2.created ASC
+ LIMIT 1
+ """,
+ {
+ "method": method,
+ "outhash": outhash,
+ "taskhash": taskhash,
+ },
+ )
+ return cursor.fetchone()
+
+ async def get_equivalent(self, method, taskhash):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ "SELECT taskhash, method, unihash FROM unihashes_v3 WHERE method=:method AND taskhash=:taskhash",
+ {
+ "method": method,
+ "taskhash": taskhash,
+ },
+ )
+ return cursor.fetchone()
+
+ async def remove(self, condition):
+ def do_remove(columns, table_name, cursor):
+ where, clause = _make_condition_statement(columns, condition)
+ if where:
+ query = f"DELETE FROM {table_name} WHERE {clause}"
+ cursor.execute(query, where)
+ return cursor.rowcount
+
+ return 0
+
+ count = 0
+ with closing(self.db.cursor()) as cursor:
+ count += do_remove(OUTHASH_TABLE_COLUMNS, "outhashes_v2", cursor)
+ count += do_remove(UNIHASH_TABLE_COLUMNS, "unihashes_v3", cursor)
+ self.db.commit()
+
+ return count
+
+ async def get_current_gc_mark(self):
+ with closing(self.db.cursor()) as cursor:
+ return await self._get_config(cursor, "gc-mark")
+
+ async def gc_status(self):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ """
+ SELECT COUNT() FROM unihashes_v3 WHERE
+ gc_mark=COALESCE((SELECT value FROM config WHERE name='gc-mark'), '')
+ """
+ )
+ keep_rows = cursor.fetchone()[0]
+
+ cursor.execute(
+ """
+ SELECT COUNT() FROM unihashes_v3 WHERE
+ gc_mark!=COALESCE((SELECT value FROM config WHERE name='gc-mark'), '')
+ """
+ )
+ remove_rows = cursor.fetchone()[0]
+
+ current_mark = await self._get_config(cursor, "gc-mark")
+
+ return (keep_rows, remove_rows, current_mark)
+
+ async def gc_mark(self, mark, condition):
+ with closing(self.db.cursor()) as cursor:
+ await self._set_config(cursor, "gc-mark", mark)
+
+ where, clause = _make_condition_statement(UNIHASH_TABLE_COLUMNS, condition)
+
+ new_rows = 0
+ if where:
+ cursor.execute(
+ f"""
+ UPDATE unihashes_v3 SET
+ gc_mark=COALESCE((SELECT value FROM config WHERE name='gc-mark'), '')
+ WHERE {clause}
+ """,
+ where,
+ )
+ new_rows = cursor.rowcount
+
+ self.db.commit()
+ return new_rows
+
+ async def gc_sweep(self):
+ with closing(self.db.cursor()) as cursor:
+ # NOTE: COALESCE is not used in this query so that if the current
+ # mark is NULL, nothing will happen
+ cursor.execute(
+ """
+ DELETE FROM unihashes_v3 WHERE
+ gc_mark!=(SELECT value FROM config WHERE name='gc-mark')
+ """
+ )
+ count = cursor.rowcount
+ await self._set_config(cursor, "gc-mark", None)
+
+ self.db.commit()
+ return count
+
+ async def clean_unused(self, oldest):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ """
+ DELETE FROM outhashes_v2 WHERE created<:oldest AND NOT EXISTS (
+ SELECT unihashes_v3.id FROM unihashes_v3 WHERE unihashes_v3.method=outhashes_v2.method AND unihashes_v3.taskhash=outhashes_v2.taskhash LIMIT 1
+ )
+ """,
+ {
+ "oldest": oldest,
+ },
+ )
+ self.db.commit()
+ return cursor.rowcount
+
+ async def insert_unihash(self, method, taskhash, unihash):
+ with closing(self.db.cursor()) as cursor:
+ prevrowid = cursor.lastrowid
+ cursor.execute(
+ """
+ INSERT OR IGNORE INTO unihashes_v3 (method, taskhash, unihash, gc_mark) VALUES
+ (
+ :method,
+ :taskhash,
+ :unihash,
+ COALESCE((SELECT value FROM config WHERE name='gc-mark'), '')
+ )
+ """,
+ {
+ "method": method,
+ "taskhash": taskhash,
+ "unihash": unihash,
+ },
+ )
+ self.db.commit()
+ return cursor.lastrowid != prevrowid
+
+ async def insert_outhash(self, data):
+ data = {k: v for k, v in data.items() if k in OUTHASH_TABLE_COLUMNS}
+ keys = sorted(data.keys())
+ query = "INSERT OR IGNORE INTO outhashes_v2 ({fields}) VALUES({values})".format(
+ fields=", ".join(keys),
+ values=", ".join(":" + k for k in keys),
+ )
+ with closing(self.db.cursor()) as cursor:
+ prevrowid = cursor.lastrowid
+ cursor.execute(query, data)
+ self.db.commit()
+ return cursor.lastrowid != prevrowid
+
+ def _get_user(self, username):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ """
+ SELECT username, permissions, token FROM users WHERE username=:username
+ """,
+ {
+ "username": username,
+ },
+ )
+ return cursor.fetchone()
+
+ async def lookup_user_token(self, username):
+ row = self._get_user(username)
+ if row is None:
+ return None, None
+ return map_user(row), row["token"]
+
+ async def lookup_user(self, username):
+ return map_user(self._get_user(username))
+
+ async def set_user_token(self, username, token):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ """
+ UPDATE users SET token=:token WHERE username=:username
+ """,
+ {
+ "username": username,
+ "token": token,
+ },
+ )
+ self.db.commit()
+ return cursor.rowcount != 0
+
+ async def set_user_perms(self, username, permissions):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ """
+ UPDATE users SET permissions=:permissions WHERE username=:username
+ """,
+ {
+ "username": username,
+ "permissions": " ".join(permissions),
+ },
+ )
+ self.db.commit()
+ return cursor.rowcount != 0
+
+ async def get_all_users(self):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute("SELECT username, permissions FROM users")
+ return [map_user(r) for r in cursor.fetchall()]
+
+ async def new_user(self, username, permissions, token):
+ with closing(self.db.cursor()) as cursor:
+ try:
+ cursor.execute(
+ """
+ INSERT INTO users (username, token, permissions) VALUES (:username, :token, :permissions)
+ """,
+ {
+ "username": username,
+ "token": token,
+ "permissions": " ".join(permissions),
+ },
+ )
+ self.db.commit()
+ return True
+ except sqlite3.IntegrityError:
+ return False
+
+ async def delete_user(self, username):
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ """
+ DELETE FROM users WHERE username=:username
+ """,
+ {
+ "username": username,
+ },
+ )
+ self.db.commit()
+ return cursor.rowcount != 0
+
+ async def get_usage(self):
+ usage = {}
+ with closing(self.db.cursor()) as cursor:
+ cursor.execute(
+ f"""
+ SELECT name FROM {_schema_table_name(self.sqlite_version)} WHERE type = 'table' AND name NOT LIKE 'sqlite_%'
+ """
+ )
+ for row in cursor.fetchall():
+ cursor.execute(
+ """
+ SELECT COUNT() FROM %s
+ """
+ % row["name"],
+ )
+ usage[row["name"]] = {
+ "rows": cursor.fetchone()[0],
+ }
+ return usage
+
+ async def get_query_columns(self):
+ columns = set()
+ for name, typ, _ in UNIHASH_TABLE_DEFINITION + OUTHASH_TABLE_DEFINITION:
+ if typ.startswith("TEXT"):
+ columns.add(name)
+ return list(columns)
diff --git a/lib/hashserv/tests.py b/lib/hashserv/tests.py
index e2b762dbf..0809453cf 100644
--- a/lib/hashserv/tests.py
+++ b/lib/hashserv/tests.py
@@ -6,6 +6,9 @@
#
from . import create_server, create_client
+from .server import DEFAULT_ANON_PERMS, ALL_PERMISSIONS
+from bb.asyncrpc import InvokeError
+from .client import ClientPool
import hashlib
import logging
import multiprocessing
@@ -15,46 +18,80 @@ import tempfile
import threading
import unittest
import socket
-
-def _run_server(server, idx):
- # logging.basicConfig(level=logging.DEBUG, filename='bbhashserv.log', filemode='w',
- # format='%(levelname)s %(filename)s:%(lineno)d %(message)s')
- sys.stdout = open('bbhashserv-%d.log' % idx, 'w')
+import time
+import signal
+import subprocess
+import json
+import re
+from pathlib import Path
+
+
+THIS_DIR = Path(__file__).parent
+BIN_DIR = THIS_DIR.parent.parent / "bin"
+
+def server_prefunc(server, idx):
+ logging.basicConfig(level=logging.DEBUG, filename='bbhashserv-%d.log' % idx, filemode='w',
+ format='%(levelname)s %(filename)s:%(lineno)d %(message)s')
+ server.logger.debug("Running server %d" % idx)
+ sys.stdout = open('bbhashserv-stdout-%d.log' % idx, 'w')
sys.stderr = sys.stdout
- server.serve_forever()
-
class HashEquivalenceTestSetup(object):
METHOD = 'TestMethod'
server_index = 0
+ client_index = 0
- def start_server(self, dbpath=None, upstream=None, read_only=False):
+ def start_server(self, dbpath=None, upstream=None, read_only=False, prefunc=server_prefunc, anon_perms=DEFAULT_ANON_PERMS, admin_username=None, admin_password=None):
self.server_index += 1
if dbpath is None:
- dbpath = os.path.join(self.temp_dir.name, "db%d.sqlite" % self.server_index)
+ dbpath = self.make_dbpath()
+
+ def cleanup_server(server):
+ if server.process.exitcode is not None:
+ return
- def cleanup_thread(thread):
- thread.terminate()
- thread.join()
+ server.process.terminate()
+ server.process.join()
server = create_server(self.get_server_addr(self.server_index),
dbpath,
upstream=upstream,
- read_only=read_only)
+ read_only=read_only,
+ anon_perms=anon_perms,
+ admin_username=admin_username,
+ admin_password=admin_password)
server.dbpath = dbpath
- server.thread = multiprocessing.Process(target=_run_server, args=(server, self.server_index))
- server.thread.start()
- self.addCleanup(cleanup_thread, server.thread)
+ server.serve_as_process(prefunc=prefunc, args=(self.server_index,))
+ self.addCleanup(cleanup_server, server)
+
+ return server
+
+ def make_dbpath(self):
+ return os.path.join(self.temp_dir.name, "db%d.sqlite" % self.server_index)
+ def start_client(self, server_address, username=None, password=None):
def cleanup_client(client):
client.close()
- client = create_client(server.address)
+ client = create_client(server_address, username=username, password=password)
self.addCleanup(cleanup_client, client)
- return (client, server)
+ return client
+
+ def start_test_server(self):
+ self.server = self.start_server()
+ return self.server.address
+
+ def start_auth_server(self):
+ auth_server = self.start_server(self.server.dbpath, anon_perms=[], admin_username="admin", admin_password="password")
+ self.auth_server_address = auth_server.address
+ self.admin_client = self.start_client(auth_server.address, username="admin", password="password")
+ return self.admin_client
+
+ def auth_client(self, user):
+ return self.start_client(self.auth_server_address, user["username"], user["token"])
def setUp(self):
if sys.version_info < (3, 5, 0):
@@ -63,24 +100,82 @@ class HashEquivalenceTestSetup(object):
self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-hashserv')
self.addCleanup(self.temp_dir.cleanup)
- (self.client, self.server) = self.start_server()
+ self.server_address = self.start_test_server()
+
+ self.client = self.start_client(self.server_address)
def assertClientGetHash(self, client, taskhash, unihash):
result = client.get_unihash(self.METHOD, taskhash)
self.assertEqual(result, unihash)
+ def assertUserPerms(self, user, permissions):
+ with self.auth_client(user) as client:
+ info = client.get_user()
+ self.assertEqual(info, {
+ "username": user["username"],
+ "permissions": permissions,
+ })
-class HashEquivalenceCommonTests(object):
- def test_create_hash(self):
+ def assertUserCanAuth(self, user):
+ with self.start_client(self.auth_server_address) as client:
+ client.auth(user["username"], user["token"])
+
+ def assertUserCannotAuth(self, user):
+ with self.start_client(self.auth_server_address) as client, self.assertRaises(InvokeError):
+ client.auth(user["username"], user["token"])
+
+ def create_test_hash(self, client):
# Simple test that hashes can be created
taskhash = '35788efcb8dfb0a02659d81cf2bfd695fb30faf9'
outhash = '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f'
unihash = 'f46d3fbb439bd9b921095da657a4de906510d2cd'
- self.assertClientGetHash(self.client, taskhash, None)
+ self.assertClientGetHash(client, taskhash, None)
- result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+ result = client.report_unihash(taskhash, self.METHOD, outhash, unihash)
self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
+ return taskhash, outhash, unihash
+
+ def run_hashclient(self, args, **kwargs):
+ try:
+ p = subprocess.run(
+ [BIN_DIR / "bitbake-hashclient"] + args,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ encoding="utf-8",
+ **kwargs
+ )
+ except subprocess.CalledProcessError as e:
+ print(e.output)
+ raise e
+
+ print(p.stdout)
+ return p
+
+
+class HashEquivalenceCommonTests(object):
+ def auth_perms(self, *permissions):
+ self.client_index += 1
+ user = self.create_user(f"user-{self.client_index}", permissions)
+ return self.auth_client(user)
+
+ def create_user(self, username, permissions, *, client=None):
+ def remove_user(username):
+ try:
+ self.admin_client.delete_user(username)
+ except bb.asyncrpc.InvokeError:
+ pass
+
+ if client is None:
+ client = self.admin_client
+
+ user = client.new_user(username, permissions)
+ self.addCleanup(remove_user, username)
+
+ return user
+
+ def test_create_hash(self):
+ return self.create_test_hash(self.client)
def test_create_equivalent(self):
# Tests that a second reported task with the same outhash will be
@@ -122,6 +217,57 @@ class HashEquivalenceCommonTests(object):
self.assertClientGetHash(self.client, taskhash, unihash)
+ def test_remove_taskhash(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+ result = self.client.remove({"taskhash": taskhash})
+ self.assertGreater(result["count"], 0)
+ self.assertClientGetHash(self.client, taskhash, None)
+
+ result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash)
+ self.assertIsNone(result_outhash)
+
+ def test_remove_unihash(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+ result = self.client.remove({"unihash": unihash})
+ self.assertGreater(result["count"], 0)
+ self.assertClientGetHash(self.client, taskhash, None)
+
+ def test_remove_outhash(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+ result = self.client.remove({"outhash": outhash})
+ self.assertGreater(result["count"], 0)
+
+ result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash)
+ self.assertIsNone(result_outhash)
+
+ def test_remove_method(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+ result = self.client.remove({"method": self.METHOD})
+ self.assertGreater(result["count"], 0)
+ self.assertClientGetHash(self.client, taskhash, None)
+
+ result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash)
+ self.assertIsNone(result_outhash)
+
+ def test_clean_unused(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+
+ # Clean the database, which should not remove anything because all hashes an in-use
+ result = self.client.clean_unused(0)
+ self.assertEqual(result["count"], 0)
+ self.assertClientGetHash(self.client, taskhash, unihash)
+
+ # Remove the unihash. The row in the outhash table should still be present
+ self.client.remove({"unihash": unihash})
+ result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash, False)
+ self.assertIsNotNone(result_outhash)
+
+ # Now clean with no minimum age which will remove the outhash
+ result = self.client.clean_unused(0)
+ self.assertEqual(result["count"], 1)
+ result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash, False)
+ self.assertIsNone(result_outhash)
+
def test_huge_message(self):
# Simple test that hashes can be created
taskhash = 'c665584ee6817aa99edfc77a44dd853828279370'
@@ -137,16 +283,21 @@ class HashEquivalenceCommonTests(object):
})
self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
- result = self.client.get_taskhash(self.METHOD, taskhash, True)
- self.assertEqual(result['taskhash'], taskhash)
- self.assertEqual(result['unihash'], unihash)
- self.assertEqual(result['method'], self.METHOD)
- self.assertEqual(result['outhash'], outhash)
- self.assertEqual(result['outhash_siginfo'], siginfo)
+ result_unihash = self.client.get_taskhash(self.METHOD, taskhash, True)
+ self.assertEqual(result_unihash['taskhash'], taskhash)
+ self.assertEqual(result_unihash['unihash'], unihash)
+ self.assertEqual(result_unihash['method'], self.METHOD)
+
+ result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash)
+ self.assertEqual(result_outhash['taskhash'], taskhash)
+ self.assertEqual(result_outhash['method'], self.METHOD)
+ self.assertEqual(result_outhash['unihash'], unihash)
+ self.assertEqual(result_outhash['outhash'], outhash)
+ self.assertEqual(result_outhash['outhash_siginfo'], siginfo)
def test_stress(self):
def query_server(failures):
- client = Client(self.server.address)
+ client = Client(self.server_address)
try:
for i in range(1000):
taskhash = hashlib.sha256()
@@ -185,8 +336,10 @@ class HashEquivalenceCommonTests(object):
# the side client. It also verifies that the results are pulled into
# the downstream database by checking that the downstream and side servers
# match after the downstream is done waiting for all backfill tasks
- (down_client, down_server) = self.start_server(upstream=self.server.address)
- (side_client, side_server) = self.start_server(dbpath=down_server.dbpath)
+ down_server = self.start_server(upstream=self.server_address)
+ down_client = self.start_client(down_server.address)
+ side_server = self.start_server(dbpath=down_server.dbpath)
+ side_client = self.start_client(side_server.address)
def check_hash(taskhash, unihash, old_sidehash):
nonlocal down_client
@@ -257,15 +410,57 @@ class HashEquivalenceCommonTests(object):
result = down_client.report_unihash(taskhash6, self.METHOD, outhash5, unihash6)
self.assertEqual(result['unihash'], unihash5, 'Server failed to copy unihash from upstream')
+ # Tests read through from server with
+ taskhash7 = '9d81d76242cc7cfaf7bf74b94b9cd2e29324ed74'
+ outhash7 = '8470d56547eea6236d7c81a644ce74670ca0bbda998e13c629ef6bb3f0d60b69'
+ unihash7 = '05d2a63c81e32f0a36542ca677e8ad852365c538'
+ self.client.report_unihash(taskhash7, self.METHOD, outhash7, unihash7)
+
+ result = down_client.get_taskhash(self.METHOD, taskhash7, True)
+ self.assertEqual(result['unihash'], unihash7, 'Server failed to copy unihash from upstream')
+ self.assertEqual(result['outhash'], outhash7, 'Server failed to copy unihash from upstream')
+ self.assertEqual(result['taskhash'], taskhash7, 'Server failed to copy unihash from upstream')
+ self.assertEqual(result['method'], self.METHOD)
+
+ taskhash8 = '86978a4c8c71b9b487330b0152aade10c1ee58aa'
+ outhash8 = 'ca8c128e9d9e4a28ef24d0508aa20b5cf880604eacd8f65c0e366f7e0cc5fbcf'
+ unihash8 = 'd8bcf25369d40590ad7d08c84d538982f2023e01'
+ self.client.report_unihash(taskhash8, self.METHOD, outhash8, unihash8)
+
+ result = down_client.get_outhash(self.METHOD, outhash8, taskhash8)
+ self.assertEqual(result['unihash'], unihash8, 'Server failed to copy unihash from upstream')
+ self.assertEqual(result['outhash'], outhash8, 'Server failed to copy unihash from upstream')
+ self.assertEqual(result['taskhash'], taskhash8, 'Server failed to copy unihash from upstream')
+ self.assertEqual(result['method'], self.METHOD)
+
+ taskhash9 = 'ae6339531895ddf5b67e663e6a374ad8ec71d81c'
+ outhash9 = 'afc78172c81880ae10a1fec994b5b4ee33d196a001a1b66212a15ebe573e00b5'
+ unihash9 = '6662e699d6e3d894b24408ff9a4031ef9b038ee8'
+ self.client.report_unihash(taskhash9, self.METHOD, outhash9, unihash9)
+
+ result = down_client.get_taskhash(self.METHOD, taskhash9, False)
+ self.assertEqual(result['unihash'], unihash9, 'Server failed to copy unihash from upstream')
+ self.assertEqual(result['taskhash'], taskhash9, 'Server failed to copy unihash from upstream')
+ self.assertEqual(result['method'], self.METHOD)
+
+ def test_unihash_exsits(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+ self.assertTrue(self.client.unihash_exists(unihash))
+ self.assertFalse(self.client.unihash_exists('6662e699d6e3d894b24408ff9a4031ef9b038ee8'))
+
def test_ro_server(self):
- (ro_client, ro_server) = self.start_server(dbpath=self.server.dbpath, read_only=True)
+ rw_server = self.start_server()
+ rw_client = self.start_client(rw_server.address)
+
+ ro_server = self.start_server(dbpath=rw_server.dbpath, read_only=True)
+ ro_client = self.start_client(ro_server.address)
# Report a hash via the read-write server
taskhash = '35788efcb8dfb0a02659d81cf2bfd695fb30faf9'
outhash = '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f'
unihash = 'f46d3fbb439bd9b921095da657a4de906510d2cd'
- result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+ result = rw_client.report_unihash(taskhash, self.METHOD, outhash, unihash)
self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
# Check the hash via the read-only server
@@ -276,11 +471,940 @@ class HashEquivalenceCommonTests(object):
outhash2 = '3c979c3db45c569f51ab7626a4651074be3a9d11a84b1db076f5b14f7d39db44'
unihash2 = '90e9bc1d1f094c51824adca7f8ea79a048d68824'
- with self.assertRaises(ConnectionError):
- ro_client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
+ result = ro_client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
+ self.assertEqual(result['unihash'], unihash2)
# Ensure that the database was not modified
+ self.assertClientGetHash(rw_client, taskhash2, None)
+
+
+ def test_slow_server_start(self):
+ # Ensures that the server will exit correctly even if it gets a SIGTERM
+ # before entering the main loop
+
+ event = multiprocessing.Event()
+
+ def prefunc(server, idx):
+ nonlocal event
+ server_prefunc(server, idx)
+ event.wait()
+
+ def do_nothing(signum, frame):
+ pass
+
+ old_signal = signal.signal(signal.SIGTERM, do_nothing)
+ self.addCleanup(signal.signal, signal.SIGTERM, old_signal)
+
+ server = self.start_server(prefunc=prefunc)
+ server.process.terminate()
+ time.sleep(30)
+ event.set()
+ server.process.join(300)
+ self.assertIsNotNone(server.process.exitcode, "Server did not exit in a timely manner!")
+
+ def test_diverging_report_race(self):
+ # Tests that a reported task will correctly pick up an updated unihash
+
+ # This is a baseline report added to the database to ensure that there
+ # is something to match against as equivalent
+ outhash1 = 'afd11c366050bcd75ad763e898e4430e2a60659b26f83fbb22201a60672019fa'
+ taskhash1 = '3bde230c743fc45ab61a065d7a1815fbfa01c4740e4c895af2eb8dc0f684a4ab'
+ unihash1 = '3bde230c743fc45ab61a065d7a1815fbfa01c4740e4c895af2eb8dc0f684a4ab'
+ result = self.client.report_unihash(taskhash1, self.METHOD, outhash1, unihash1)
+
+ # Add a report that is equivalent to Task 1. It should ignore the
+ # provided unihash and report the unihash from task 1
+ taskhash2 = '6259ae8263bd94d454c086f501c37e64c4e83cae806902ca95b4ab513546b273'
+ unihash2 = taskhash2
+ result = self.client.report_unihash(taskhash2, self.METHOD, outhash1, unihash2)
+ self.assertEqual(result['unihash'], unihash1)
+
+ # Add another report for Task 2, but with a different outhash (e.g. the
+ # task is non-deterministic). It should still be marked with the Task 1
+ # unihash because it has the Task 2 taskhash, which is equivalent to
+ # Task 1
+ outhash3 = 'd2187ee3a8966db10b34fe0e863482288d9a6185cb8ef58a6c1c6ace87a2f24c'
+ result = self.client.report_unihash(taskhash2, self.METHOD, outhash3, unihash2)
+ self.assertEqual(result['unihash'], unihash1)
+
+
+ def test_diverging_report_reverse_race(self):
+ # Same idea as the previous test, but Tasks 2 and 3 are reported in
+ # reverse order the opposite order
+
+ outhash1 = 'afd11c366050bcd75ad763e898e4430e2a60659b26f83fbb22201a60672019fa'
+ taskhash1 = '3bde230c743fc45ab61a065d7a1815fbfa01c4740e4c895af2eb8dc0f684a4ab'
+ unihash1 = '3bde230c743fc45ab61a065d7a1815fbfa01c4740e4c895af2eb8dc0f684a4ab'
+ result = self.client.report_unihash(taskhash1, self.METHOD, outhash1, unihash1)
+
+ taskhash2 = '6259ae8263bd94d454c086f501c37e64c4e83cae806902ca95b4ab513546b273'
+ unihash2 = taskhash2
+
+ # Report Task 3 first. Since there is nothing else in the database it
+ # will use the client provided unihash
+ outhash3 = 'd2187ee3a8966db10b34fe0e863482288d9a6185cb8ef58a6c1c6ace87a2f24c'
+ result = self.client.report_unihash(taskhash2, self.METHOD, outhash3, unihash2)
+ self.assertEqual(result['unihash'], unihash2)
+
+ # Report Task 2. This is equivalent to Task 1 but there is already a mapping for
+ # taskhash2 so it will report unihash2
+ result = self.client.report_unihash(taskhash2, self.METHOD, outhash1, unihash2)
+ self.assertEqual(result['unihash'], unihash2)
+
+ # The originally reported unihash for Task 3 should be unchanged even if it
+ # shares a taskhash with Task 2
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+
+ def test_client_pool_get_unihashes(self):
+ TEST_INPUT = (
+ # taskhash outhash unihash
+ ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', 'afe240a439959ce86f5e322f8c208e1fedefea9e813f2140c81af866cc9edf7e','218e57509998197d570e2c98512d0105985dffc9'),
+ # Duplicated taskhash with multiple output hashes and unihashes.
+ ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', '0904a7fe3dc712d9fd8a74a616ddca2a825a8ee97adf0bd3fc86082c7639914d', 'ae9a7d252735f0dafcdb10e2e02561ca3a47314c'),
+ # Equivalent hash
+ ("044c2ec8aaf480685a00ff6ff49e6162e6ad34e1", '0904a7fe3dc712d9fd8a74a616ddca2a825a8ee97adf0bd3fc86082c7639914d', "def64766090d28f627e816454ed46894bb3aab36"),
+ ("e3da00593d6a7fb435c7e2114976c59c5fd6d561", "1cf8713e645f491eb9c959d20b5cae1c47133a292626dda9b10709857cbe688a", "3b5d3d83f07f259e9086fcb422c855286e18a57d"),
+ ('35788efcb8dfb0a02659d81cf2bfd695fb30faf9', '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f', 'f46d3fbb439bd9b921095da657a4de906510d2cd'),
+ ('35788efcb8dfb0a02659d81cf2bfd695fb30fafa', '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f', 'f46d3fbb439bd9b921095da657a4de906510d2ce'),
+ ('9d81d76242cc7cfaf7bf74b94b9cd2e29324ed74', '8470d56547eea6236d7c81a644ce74670ca0bbda998e13c629ef6bb3f0d60b69', '05d2a63c81e32f0a36542ca677e8ad852365c538'),
+ )
+ EXTRA_QUERIES = (
+ "6b6be7a84ab179b4240c4302518dc3f6",
+ )
+
+ with ClientPool(self.server_address, 10) as client_pool:
+ for taskhash, outhash, unihash in TEST_INPUT:
+ self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+
+ query = {idx: (self.METHOD, data[0]) for idx, data in enumerate(TEST_INPUT)}
+ for idx, taskhash in enumerate(EXTRA_QUERIES):
+ query[idx + len(TEST_INPUT)] = (self.METHOD, taskhash)
+
+ result = client_pool.get_unihashes(query)
+
+ self.assertDictEqual(result, {
+ 0: "218e57509998197d570e2c98512d0105985dffc9",
+ 1: "218e57509998197d570e2c98512d0105985dffc9",
+ 2: "218e57509998197d570e2c98512d0105985dffc9",
+ 3: "3b5d3d83f07f259e9086fcb422c855286e18a57d",
+ 4: "f46d3fbb439bd9b921095da657a4de906510d2cd",
+ 5: "f46d3fbb439bd9b921095da657a4de906510d2cd",
+ 6: "05d2a63c81e32f0a36542ca677e8ad852365c538",
+ 7: None,
+ })
+
+ def test_client_pool_unihash_exists(self):
+ TEST_INPUT = (
+ # taskhash outhash unihash
+ ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', 'afe240a439959ce86f5e322f8c208e1fedefea9e813f2140c81af866cc9edf7e','218e57509998197d570e2c98512d0105985dffc9'),
+ # Duplicated taskhash with multiple output hashes and unihashes.
+ ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', '0904a7fe3dc712d9fd8a74a616ddca2a825a8ee97adf0bd3fc86082c7639914d', 'ae9a7d252735f0dafcdb10e2e02561ca3a47314c'),
+ # Equivalent hash
+ ("044c2ec8aaf480685a00ff6ff49e6162e6ad34e1", '0904a7fe3dc712d9fd8a74a616ddca2a825a8ee97adf0bd3fc86082c7639914d', "def64766090d28f627e816454ed46894bb3aab36"),
+ ("e3da00593d6a7fb435c7e2114976c59c5fd6d561", "1cf8713e645f491eb9c959d20b5cae1c47133a292626dda9b10709857cbe688a", "3b5d3d83f07f259e9086fcb422c855286e18a57d"),
+ ('35788efcb8dfb0a02659d81cf2bfd695fb30faf9', '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f', 'f46d3fbb439bd9b921095da657a4de906510d2cd'),
+ ('35788efcb8dfb0a02659d81cf2bfd695fb30fafa', '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f', 'f46d3fbb439bd9b921095da657a4de906510d2ce'),
+ ('9d81d76242cc7cfaf7bf74b94b9cd2e29324ed74', '8470d56547eea6236d7c81a644ce74670ca0bbda998e13c629ef6bb3f0d60b69', '05d2a63c81e32f0a36542ca677e8ad852365c538'),
+ )
+ EXTRA_QUERIES = (
+ "6b6be7a84ab179b4240c4302518dc3f6",
+ )
+
+ result_unihashes = set()
+
+
+ with ClientPool(self.server_address, 10) as client_pool:
+ for taskhash, outhash, unihash in TEST_INPUT:
+ result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+ result_unihashes.add(result["unihash"])
+
+ query = {}
+ expected = {}
+
+ for _, _, unihash in TEST_INPUT:
+ idx = len(query)
+ query[idx] = unihash
+ expected[idx] = unihash in result_unihashes
+
+
+ for unihash in EXTRA_QUERIES:
+ idx = len(query)
+ query[idx] = unihash
+ expected[idx] = False
+
+ result = client_pool.unihashes_exist(query)
+ self.assertDictEqual(result, expected)
+
+
+ def test_auth_read_perms(self):
+ admin_client = self.start_auth_server()
+
+ # Create hashes with non-authenticated server
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+
+ # Validate hash can be retrieved using authenticated client
+ with self.auth_perms("@read") as client:
+ self.assertClientGetHash(client, taskhash, unihash)
+
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ self.assertClientGetHash(client, taskhash, unihash)
+
+ def test_auth_report_perms(self):
+ admin_client = self.start_auth_server()
+
+ # Without read permission, the user is completely denied
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ self.create_test_hash(client)
+
+ # Read permission allows the call to succeed, but it doesn't record
+ # anythin in the database
+ with self.auth_perms("@read") as client:
+ taskhash, outhash, unihash = self.create_test_hash(client)
+ self.assertClientGetHash(client, taskhash, None)
+
+ # Report permission alone is insufficient
+ with self.auth_perms("@report") as client, self.assertRaises(InvokeError):
+ self.create_test_hash(client)
+
+ # Read and report permission actually modify the database
+ with self.auth_perms("@read", "@report") as client:
+ taskhash, outhash, unihash = self.create_test_hash(client)
+ self.assertClientGetHash(client, taskhash, unihash)
+
+ def test_auth_no_token_refresh_from_anon_user(self):
+ self.start_auth_server()
+
+ with self.start_client(self.auth_server_address) as client, self.assertRaises(InvokeError):
+ client.refresh_token()
+
+ def test_auth_self_token_refresh(self):
+ admin_client = self.start_auth_server()
+
+ # Create a new user with no permissions
+ user = self.create_user("test-user", [])
+
+ with self.auth_client(user) as client:
+ new_user = client.refresh_token()
+
+ self.assertEqual(user["username"], new_user["username"])
+ self.assertNotEqual(user["token"], new_user["token"])
+ self.assertUserCanAuth(new_user)
+ self.assertUserCannotAuth(user)
+
+ # Explicitly specifying with your own username is fine also
+ with self.auth_client(new_user) as client:
+ new_user2 = client.refresh_token(user["username"])
+
+ self.assertEqual(user["username"], new_user2["username"])
+ self.assertNotEqual(user["token"], new_user2["token"])
+ self.assertUserCanAuth(new_user2)
+ self.assertUserCannotAuth(new_user)
+ self.assertUserCannotAuth(user)
+
+ def test_auth_token_refresh(self):
+ admin_client = self.start_auth_server()
+
+ user = self.create_user("test-user", [])
+
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ client.refresh_token(user["username"])
+
+ with self.auth_perms("@user-admin") as client:
+ new_user = client.refresh_token(user["username"])
+
+ self.assertEqual(user["username"], new_user["username"])
+ self.assertNotEqual(user["token"], new_user["token"])
+ self.assertUserCanAuth(new_user)
+ self.assertUserCannotAuth(user)
+
+ def test_auth_self_get_user(self):
+ admin_client = self.start_auth_server()
+
+ user = self.create_user("test-user", [])
+ user_info = user.copy()
+ del user_info["token"]
+
+ with self.auth_client(user) as client:
+ info = client.get_user()
+ self.assertEqual(info, user_info)
+
+ # Explicitly asking for your own username is fine also
+ info = client.get_user(user["username"])
+ self.assertEqual(info, user_info)
+
+ def test_auth_get_user(self):
+ admin_client = self.start_auth_server()
+
+ user = self.create_user("test-user", [])
+ user_info = user.copy()
+ del user_info["token"]
+
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ client.get_user(user["username"])
+
+ with self.auth_perms("@user-admin") as client:
+ info = client.get_user(user["username"])
+ self.assertEqual(info, user_info)
+
+ info = client.get_user("nonexist-user")
+ self.assertIsNone(info)
+
+ def test_auth_reconnect(self):
+ admin_client = self.start_auth_server()
+
+ user = self.create_user("test-user", [])
+ user_info = user.copy()
+ del user_info["token"]
+
+ with self.auth_client(user) as client:
+ info = client.get_user()
+ self.assertEqual(info, user_info)
+
+ client.disconnect()
+
+ info = client.get_user()
+ self.assertEqual(info, user_info)
+
+ def test_auth_delete_user(self):
+ admin_client = self.start_auth_server()
+
+ user = self.create_user("test-user", [])
+
+ # self service
+ with self.auth_client(user) as client:
+ client.delete_user(user["username"])
+
+ self.assertIsNone(admin_client.get_user(user["username"]))
+ user = self.create_user("test-user", [])
+
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ client.delete_user(user["username"])
+
+ with self.auth_perms("@user-admin") as client:
+ client.delete_user(user["username"])
+
+ # User doesn't exist, so even though the permission is correct, it's an
+ # error
+ with self.auth_perms("@user-admin") as client, self.assertRaises(InvokeError):
+ client.delete_user(user["username"])
+
+ def test_auth_set_user_perms(self):
+ admin_client = self.start_auth_server()
+
+ user = self.create_user("test-user", [])
+
+ self.assertUserPerms(user, [])
+
+ # No self service to change permissions
+ with self.auth_client(user) as client, self.assertRaises(InvokeError):
+ client.set_user_perms(user["username"], ["@all"])
+ self.assertUserPerms(user, [])
+
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ client.set_user_perms(user["username"], ["@all"])
+ self.assertUserPerms(user, [])
+
+ with self.auth_perms("@user-admin") as client:
+ client.set_user_perms(user["username"], ["@all"])
+ self.assertUserPerms(user, sorted(list(ALL_PERMISSIONS)))
+
+ # Bad permissions
+ with self.auth_perms("@user-admin") as client, self.assertRaises(InvokeError):
+ client.set_user_perms(user["username"], ["@this-is-not-a-permission"])
+ self.assertUserPerms(user, sorted(list(ALL_PERMISSIONS)))
+
+ def test_auth_get_all_users(self):
+ admin_client = self.start_auth_server()
+
+ user = self.create_user("test-user", [])
+
+ with self.auth_client(user) as client, self.assertRaises(InvokeError):
+ client.get_all_users()
+
+ # Give the test user the correct permission
+ admin_client.set_user_perms(user["username"], ["@user-admin"])
+
+ with self.auth_client(user) as client:
+ all_users = client.get_all_users()
+
+ # Convert to a dictionary for easier comparison
+ all_users = {u["username"]: u for u in all_users}
+
+ self.assertEqual(all_users,
+ {
+ "admin": {
+ "username": "admin",
+ "permissions": sorted(list(ALL_PERMISSIONS)),
+ },
+ "test-user": {
+ "username": "test-user",
+ "permissions": ["@user-admin"],
+ }
+ }
+ )
+
+ def test_auth_new_user(self):
+ self.start_auth_server()
+
+ permissions = ["@read", "@report", "@db-admin", "@user-admin"]
+ permissions.sort()
+
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ self.create_user("test-user", permissions, client=client)
+
+ with self.auth_perms("@user-admin") as client:
+ user = self.create_user("test-user", permissions, client=client)
+ self.assertIn("token", user)
+ self.assertEqual(user["username"], "test-user")
+ self.assertEqual(user["permissions"], permissions)
+
+ def test_auth_become_user(self):
+ admin_client = self.start_auth_server()
+
+ user = self.create_user("test-user", ["@read", "@report"])
+ user_info = user.copy()
+ del user_info["token"]
+
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ client.become_user(user["username"])
+
+ with self.auth_perms("@user-admin") as client:
+ become = client.become_user(user["username"])
+ self.assertEqual(become, user_info)
+
+ info = client.get_user()
+ self.assertEqual(info, user_info)
+
+ # Verify become user is preserved across disconnect
+ client.disconnect()
+
+ info = client.get_user()
+ self.assertEqual(info, user_info)
+
+ # test-user doesn't have become_user permissions, so this should
+ # not work
+ with self.assertRaises(InvokeError):
+ client.become_user(user["username"])
+
+ # No self-service of become
+ with self.auth_client(user) as client, self.assertRaises(InvokeError):
+ client.become_user(user["username"])
+
+ # Give test user permissions to become
+ admin_client.set_user_perms(user["username"], ["@user-admin"])
+
+ # It's possible to become yourself (effectively a noop)
+ with self.auth_perms("@user-admin") as client:
+ become = client.become_user(client.username)
+
+ def test_auth_gc(self):
+ admin_client = self.start_auth_server()
+
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ client.gc_mark("ABC", {"unihash": "123"})
+
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ client.gc_status()
+
+ with self.auth_perms() as client, self.assertRaises(InvokeError):
+ client.gc_sweep("ABC")
+
+ with self.auth_perms("@db-admin") as client:
+ client.gc_mark("ABC", {"unihash": "123"})
+
+ with self.auth_perms("@db-admin") as client:
+ client.gc_status()
+
+ with self.auth_perms("@db-admin") as client:
+ client.gc_sweep("ABC")
+
+ def test_get_db_usage(self):
+ usage = self.client.get_db_usage()
+
+ self.assertTrue(isinstance(usage, dict))
+ for name in usage.keys():
+ self.assertTrue(isinstance(usage[name], dict))
+ self.assertIn("rows", usage[name])
+ self.assertTrue(isinstance(usage[name]["rows"], int))
+
+ def test_get_db_query_columns(self):
+ columns = self.client.get_db_query_columns()
+
+ self.assertTrue(isinstance(columns, list))
+ self.assertTrue(len(columns) > 0)
+
+ for col in columns:
+ self.client.remove({col: ""})
+
+ def test_auth_is_owner(self):
+ admin_client = self.start_auth_server()
+
+ user = self.create_user("test-user", ["@read", "@report"])
+ with self.auth_client(user) as client:
+ taskhash, outhash, unihash = self.create_test_hash(client)
+ data = client.get_taskhash(self.METHOD, taskhash, True)
+ self.assertEqual(data["owner"], user["username"])
+
+ def test_gc(self):
+ taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4'
+ outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8'
+ unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646'
+
+ result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+ self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
+
+ taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4'
+ outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4'
+ unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b'
+
+ result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ # Mark the first unihash to be kept
+ ret = self.client.gc_mark("ABC", {"unihash": unihash, "method": self.METHOD})
+ self.assertEqual(ret, {"count": 1})
+
+ ret = self.client.gc_status()
+ self.assertEqual(ret, {"mark": "ABC", "keep": 1, "remove": 1})
+
+ # Second hash is still there; mark doesn't delete hashes
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ ret = self.client.gc_sweep("ABC")
+ self.assertEqual(ret, {"count": 1})
+
+ # Hash is gone. Taskhash is returned for second hash
self.assertClientGetHash(self.client, taskhash2, None)
+ # First hash is still present
+ self.assertClientGetHash(self.client, taskhash, unihash)
+
+ def test_gc_switch_mark(self):
+ taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4'
+ outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8'
+ unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646'
+
+ result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+ self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
+
+ taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4'
+ outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4'
+ unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b'
+
+ result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ # Mark the first unihash to be kept
+ ret = self.client.gc_mark("ABC", {"unihash": unihash, "method": self.METHOD})
+ self.assertEqual(ret, {"count": 1})
+
+ ret = self.client.gc_status()
+ self.assertEqual(ret, {"mark": "ABC", "keep": 1, "remove": 1})
+
+ # Second hash is still there; mark doesn't delete hashes
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ # Switch to a different mark and mark the second hash. This will start
+ # a new collection cycle
+ ret = self.client.gc_mark("DEF", {"unihash": unihash2, "method": self.METHOD})
+ self.assertEqual(ret, {"count": 1})
+
+ ret = self.client.gc_status()
+ self.assertEqual(ret, {"mark": "DEF", "keep": 1, "remove": 1})
+
+ # Both hashes are still present
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash, unihash)
+
+ # Sweep with the new mark
+ ret = self.client.gc_sweep("DEF")
+ self.assertEqual(ret, {"count": 1})
+
+ # First hash is gone, second is kept
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash, None)
+
+ def test_gc_switch_sweep_mark(self):
+ taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4'
+ outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8'
+ unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646'
+
+ result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+ self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
+
+ taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4'
+ outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4'
+ unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b'
+
+ result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ # Mark the first unihash to be kept
+ ret = self.client.gc_mark("ABC", {"unihash": unihash, "method": self.METHOD})
+ self.assertEqual(ret, {"count": 1})
+
+ ret = self.client.gc_status()
+ self.assertEqual(ret, {"mark": "ABC", "keep": 1, "remove": 1})
+
+ # Sweeping with a different mark raises an error
+ with self.assertRaises(InvokeError):
+ self.client.gc_sweep("DEF")
+
+ # Both hashes are present
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash, unihash)
+
+ def test_gc_new_hashes(self):
+ taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4'
+ outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8'
+ unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646'
+
+ result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+ self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
+
+ # Start a new garbage collection
+ ret = self.client.gc_mark("ABC", {"unihash": unihash, "method": self.METHOD})
+ self.assertEqual(ret, {"count": 1})
+
+ ret = self.client.gc_status()
+ self.assertEqual(ret, {"mark": "ABC", "keep": 1, "remove": 0})
+
+ # Add second hash. It should inherit the mark from the current garbage
+ # collection operation
+
+ taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4'
+ outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4'
+ unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b'
+
+ result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ # Sweep should remove nothing
+ ret = self.client.gc_sweep("ABC")
+ self.assertEqual(ret, {"count": 0})
+
+ # Both hashes are present
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash, unihash)
+
+
+class TestHashEquivalenceClient(HashEquivalenceTestSetup, unittest.TestCase):
+ def get_server_addr(self, server_idx):
+ return "unix://" + os.path.join(self.temp_dir.name, 'sock%d' % server_idx)
+
+ def test_get(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+
+ p = self.run_hashclient(["--address", self.server_address, "get", self.METHOD, taskhash])
+ data = json.loads(p.stdout)
+ self.assertEqual(data["unihash"], unihash)
+ self.assertEqual(data["outhash"], outhash)
+ self.assertEqual(data["taskhash"], taskhash)
+ self.assertEqual(data["method"], self.METHOD)
+
+ def test_get_outhash(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+
+ p = self.run_hashclient(["--address", self.server_address, "get-outhash", self.METHOD, outhash, taskhash])
+ data = json.loads(p.stdout)
+ self.assertEqual(data["unihash"], unihash)
+ self.assertEqual(data["outhash"], outhash)
+ self.assertEqual(data["taskhash"], taskhash)
+ self.assertEqual(data["method"], self.METHOD)
+
+ def test_stats(self):
+ p = self.run_hashclient(["--address", self.server_address, "stats"], check=True)
+ json.loads(p.stdout)
+
+ def test_stress(self):
+ self.run_hashclient(["--address", self.server_address, "stress"], check=True)
+
+ def test_unihash_exsits(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+
+ p = self.run_hashclient([
+ "--address", self.server_address,
+ "unihash-exists", unihash,
+ ], check=True)
+ self.assertEqual(p.stdout.strip(), "true")
+
+ p = self.run_hashclient([
+ "--address", self.server_address,
+ "unihash-exists", '6662e699d6e3d894b24408ff9a4031ef9b038ee8',
+ ], check=True)
+ self.assertEqual(p.stdout.strip(), "false")
+
+ def test_unihash_exsits_quiet(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+
+ p = self.run_hashclient([
+ "--address", self.server_address,
+ "unihash-exists", unihash,
+ "--quiet",
+ ])
+ self.assertEqual(p.returncode, 0)
+ self.assertEqual(p.stdout.strip(), "")
+
+ p = self.run_hashclient([
+ "--address", self.server_address,
+ "unihash-exists", '6662e699d6e3d894b24408ff9a4031ef9b038ee8',
+ "--quiet",
+ ])
+ self.assertEqual(p.returncode, 1)
+ self.assertEqual(p.stdout.strip(), "")
+
+ def test_remove_taskhash(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+ self.run_hashclient([
+ "--address", self.server_address,
+ "remove",
+ "--where", "taskhash", taskhash,
+ ], check=True)
+ self.assertClientGetHash(self.client, taskhash, None)
+
+ result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash)
+ self.assertIsNone(result_outhash)
+
+ def test_remove_unihash(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+ self.run_hashclient([
+ "--address", self.server_address,
+ "remove",
+ "--where", "unihash", unihash,
+ ], check=True)
+ self.assertClientGetHash(self.client, taskhash, None)
+
+ def test_remove_outhash(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+ self.run_hashclient([
+ "--address", self.server_address,
+ "remove",
+ "--where", "outhash", outhash,
+ ], check=True)
+
+ result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash)
+ self.assertIsNone(result_outhash)
+
+ def test_remove_method(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+ self.run_hashclient([
+ "--address", self.server_address,
+ "remove",
+ "--where", "method", self.METHOD,
+ ], check=True)
+ self.assertClientGetHash(self.client, taskhash, None)
+
+ result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash)
+ self.assertIsNone(result_outhash)
+
+ def test_clean_unused(self):
+ taskhash, outhash, unihash = self.create_test_hash(self.client)
+
+ # Clean the database, which should not remove anything because all hashes an in-use
+ self.run_hashclient([
+ "--address", self.server_address,
+ "clean-unused", "0",
+ ], check=True)
+ self.assertClientGetHash(self.client, taskhash, unihash)
+
+ # Remove the unihash. The row in the outhash table should still be present
+ self.run_hashclient([
+ "--address", self.server_address,
+ "remove",
+ "--where", "unihash", unihash,
+ ], check=True)
+ result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash, False)
+ self.assertIsNotNone(result_outhash)
+
+ # Now clean with no minimum age which will remove the outhash
+ self.run_hashclient([
+ "--address", self.server_address,
+ "clean-unused", "0",
+ ], check=True)
+ result_outhash = self.client.get_outhash(self.METHOD, outhash, taskhash, False)
+ self.assertIsNone(result_outhash)
+
+ def test_refresh_token(self):
+ admin_client = self.start_auth_server()
+
+ user = admin_client.new_user("test-user", ["@read", "@report"])
+
+ p = self.run_hashclient([
+ "--address", self.auth_server_address,
+ "--login", user["username"],
+ "--password", user["token"],
+ "refresh-token"
+ ], check=True)
+
+ new_token = None
+ for l in p.stdout.splitlines():
+ l = l.rstrip()
+ m = re.match(r'Token: +(.*)$', l)
+ if m is not None:
+ new_token = m.group(1)
+
+ self.assertTrue(new_token)
+
+ print("New token is %r" % new_token)
+
+ self.run_hashclient([
+ "--address", self.auth_server_address,
+ "--login", user["username"],
+ "--password", new_token,
+ "get-user"
+ ], check=True)
+
+ def test_set_user_perms(self):
+ admin_client = self.start_auth_server()
+
+ user = admin_client.new_user("test-user", ["@read"])
+
+ self.run_hashclient([
+ "--address", self.auth_server_address,
+ "--login", admin_client.username,
+ "--password", admin_client.password,
+ "set-user-perms",
+ "-u", user["username"],
+ "@read", "@report",
+ ], check=True)
+
+ new_user = admin_client.get_user(user["username"])
+
+ self.assertEqual(set(new_user["permissions"]), {"@read", "@report"})
+
+ def test_get_user(self):
+ admin_client = self.start_auth_server()
+
+ user = admin_client.new_user("test-user", ["@read"])
+
+ p = self.run_hashclient([
+ "--address", self.auth_server_address,
+ "--login", admin_client.username,
+ "--password", admin_client.password,
+ "get-user",
+ "-u", user["username"],
+ ], check=True)
+
+ self.assertIn("Username:", p.stdout)
+ self.assertIn("Permissions:", p.stdout)
+
+ p = self.run_hashclient([
+ "--address", self.auth_server_address,
+ "--login", user["username"],
+ "--password", user["token"],
+ "get-user",
+ ], check=True)
+
+ self.assertIn("Username:", p.stdout)
+ self.assertIn("Permissions:", p.stdout)
+
+ def test_get_all_users(self):
+ admin_client = self.start_auth_server()
+
+ admin_client.new_user("test-user1", ["@read"])
+ admin_client.new_user("test-user2", ["@read"])
+
+ p = self.run_hashclient([
+ "--address", self.auth_server_address,
+ "--login", admin_client.username,
+ "--password", admin_client.password,
+ "get-all-users",
+ ], check=True)
+
+ self.assertIn("admin", p.stdout)
+ self.assertIn("test-user1", p.stdout)
+ self.assertIn("test-user2", p.stdout)
+
+ def test_new_user(self):
+ admin_client = self.start_auth_server()
+
+ p = self.run_hashclient([
+ "--address", self.auth_server_address,
+ "--login", admin_client.username,
+ "--password", admin_client.password,
+ "new-user",
+ "-u", "test-user",
+ "@read", "@report",
+ ], check=True)
+
+ new_token = None
+ for l in p.stdout.splitlines():
+ l = l.rstrip()
+ m = re.match(r'Token: +(.*)$', l)
+ if m is not None:
+ new_token = m.group(1)
+
+ self.assertTrue(new_token)
+
+ user = {
+ "username": "test-user",
+ "token": new_token,
+ }
+
+ self.assertUserPerms(user, ["@read", "@report"])
+
+ def test_delete_user(self):
+ admin_client = self.start_auth_server()
+
+ user = admin_client.new_user("test-user", ["@read"])
+
+ p = self.run_hashclient([
+ "--address", self.auth_server_address,
+ "--login", admin_client.username,
+ "--password", admin_client.password,
+ "delete-user",
+ "-u", user["username"],
+ ], check=True)
+
+ self.assertIsNone(admin_client.get_user(user["username"]))
+
+ def test_get_db_usage(self):
+ p = self.run_hashclient([
+ "--address", self.server_address,
+ "get-db-usage",
+ ], check=True)
+
+ def test_get_db_query_columns(self):
+ p = self.run_hashclient([
+ "--address", self.server_address,
+ "get-db-query-columns",
+ ], check=True)
+
+ def test_gc(self):
+ taskhash = '53b8dce672cb6d0c73170be43f540460bfc347b4'
+ outhash = '5a9cb1649625f0bf41fc7791b635cd9c2d7118c7f021ba87dcd03f72b67ce7a8'
+ unihash = 'f37918cc02eb5a520b1aff86faacbc0a38124646'
+
+ result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
+ self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
+
+ taskhash2 = '3bf6f1e89d26205aec90da04854fbdbf73afe6b4'
+ outhash2 = '77623a549b5b1a31e3732dfa8fe61d7ce5d44b3370f253c5360e136b852967b4'
+ unihash2 = 'af36b199320e611fbb16f1f277d3ee1d619ca58b'
+
+ result = self.client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ # Mark the first unihash to be kept
+ self.run_hashclient([
+ "--address", self.server_address,
+ "gc-mark", "ABC",
+ "--where", "unihash", unihash,
+ "--where", "method", self.METHOD
+ ], check=True)
+
+ # Second hash is still there; mark doesn't delete hashes
+ self.assertClientGetHash(self.client, taskhash2, unihash2)
+
+ self.run_hashclient([
+ "--address", self.server_address,
+ "gc-sweep", "ABC",
+ ], check=True)
+
+ # Hash is gone. Taskhash is returned for second hash
+ self.assertClientGetHash(self.client, taskhash2, None)
+ # First hash is still present
+ self.assertClientGetHash(self.client, taskhash, unihash)
class TestHashEquivalenceUnixServer(HashEquivalenceTestSetup, HashEquivalenceCommonTests, unittest.TestCase):
@@ -313,3 +1437,77 @@ class TestHashEquivalenceTCPServer(HashEquivalenceTestSetup, HashEquivalenceComm
# If IPv6 is enabled, it should be safe to use localhost directly, in general
# case it is more reliable to resolve the IP address explicitly.
return socket.gethostbyname("localhost") + ":0"
+
+
+class TestHashEquivalenceWebsocketServer(HashEquivalenceTestSetup, HashEquivalenceCommonTests, unittest.TestCase):
+ def setUp(self):
+ try:
+ import websockets
+ except ImportError as e:
+ self.skipTest(str(e))
+
+ super().setUp()
+
+ def get_server_addr(self, server_idx):
+ # Some hosts cause asyncio module to misbehave, when IPv6 is not enabled.
+ # If IPv6 is enabled, it should be safe to use localhost directly, in general
+ # case it is more reliable to resolve the IP address explicitly.
+ host = socket.gethostbyname("localhost")
+ return "ws://%s:0" % host
+
+
+class TestHashEquivalenceWebsocketsSQLAlchemyServer(TestHashEquivalenceWebsocketServer):
+ def setUp(self):
+ try:
+ import sqlalchemy
+ import aiosqlite
+ except ImportError as e:
+ self.skipTest(str(e))
+
+ super().setUp()
+
+ def make_dbpath(self):
+ return "sqlite+aiosqlite:///%s" % os.path.join(self.temp_dir.name, "db%d.sqlite" % self.server_index)
+
+
+class TestHashEquivalenceExternalServer(HashEquivalenceTestSetup, HashEquivalenceCommonTests, unittest.TestCase):
+ def get_env(self, name):
+ v = os.environ.get(name)
+ if not v:
+ self.skipTest(f'{name} not defined to test an external server')
+ return v
+
+ def start_test_server(self):
+ return self.get_env('BB_TEST_HASHSERV')
+
+ def start_server(self, *args, **kwargs):
+ self.skipTest('Cannot start local server when testing external servers')
+
+ def start_auth_server(self):
+
+ self.auth_server_address = self.server_address
+ self.admin_client = self.start_client(
+ self.server_address,
+ username=self.get_env('BB_TEST_HASHSERV_USERNAME'),
+ password=self.get_env('BB_TEST_HASHSERV_PASSWORD'),
+ )
+ return self.admin_client
+
+ def setUp(self):
+ super().setUp()
+ if "BB_TEST_HASHSERV_USERNAME" in os.environ:
+ self.client = self.start_client(
+ self.server_address,
+ username=os.environ["BB_TEST_HASHSERV_USERNAME"],
+ password=os.environ["BB_TEST_HASHSERV_PASSWORD"],
+ )
+ self.client.remove({"method": self.METHOD})
+
+ def tearDown(self):
+ self.client.remove({"method": self.METHOD})
+ super().tearDown()
+
+
+ def test_auth_get_all_users(self):
+ self.skipTest("Cannot test all users with external server")
+
diff --git a/lib/layerindexlib/__init__.py b/lib/layerindexlib/__init__.py
index 9ca127b9d..c3265ddaa 100644
--- a/lib/layerindexlib/__init__.py
+++ b/lib/layerindexlib/__init__.py
@@ -6,7 +6,6 @@
import datetime
import logging
-import imp
import os
from collections import OrderedDict
@@ -179,9 +178,9 @@ class LayerIndex():
'''Load the layerindex.
indexURI - An index to load. (Use multiple calls to load multiple indexes)
-
+
reload - If reload is True, then any previously loaded indexes will be forgotten.
-
+
load - List of elements to load. Default loads all items.
Note: plugs may ignore this.
@@ -199,7 +198,7 @@ The format of the indexURI:
For example:
- http://layers.openembedded.org/layerindex/api/;branch=master;desc=OpenEmbedded%20Layer%20Index
+ https://layers.openembedded.org/layerindex/api/;branch=master;desc=OpenEmbedded%20Layer%20Index
cooker://
'''
if reload:
@@ -384,7 +383,14 @@ layerBranches set. If not, they are effectively blank.'''
# Get a list of dependencies and then recursively process them
for layerdependency in layerbranch.index.layerDependencies_layerBranchId[layerbranch.id]:
- deplayerbranch = layerdependency.dependency_layerBranch
+ try:
+ deplayerbranch = layerdependency.dependency_layerBranch
+ except AttributeError as e:
+ logger.error('LayerBranch does not exist for dependent layer {}:{}\n' \
+ ' Cannot continue successfully.\n' \
+ ' You might be able to resolve this by checking out the layer locally.\n' \
+ ' Consider reaching out the to the layer maintainers or the layerindex admins' \
+ .format(layerdependency.dependency.name, layerbranch.branch.name))
if ignores and deplayerbranch.layer.name in ignores:
continue
@@ -577,7 +583,7 @@ This function is used to implement debugging and provide the user info.
# index['config'] - configuration data for this index
# index['branches'] - dictionary of Branch objects, by id number
# index['layerItems'] - dictionary of layerItem objects, by id number
-# ...etc... (See: http://layers.openembedded.org/layerindex/api/)
+# ...etc... (See: https://layers.openembedded.org/layerindex/api/)
#
# The class needs to manage the 'index' entries and allow easily adding
# of new items, as well as simply loading of the items.
@@ -847,7 +853,7 @@ class LayerIndexObj():
continue
for layerdependency in layerbranch.index.layerDependencies_layerBranchId[layerbranch.id]:
- deplayerbranch = layerdependency.dependency_layerBranch
+ deplayerbranch = layerdependency.dependency_layerBranch or None
if ignores and deplayerbranch.layer.name in ignores:
continue
@@ -1279,7 +1285,7 @@ class Recipe(LayerIndexItemObj_LayerBranch):
filename, filepath, pn, pv, layerbranch,
summary="", description="", section="", license="",
homepage="", bugtracker="", provides="", bbclassextend="",
- inherits="", blacklisted="", updated=None):
+ inherits="", disallowed="", updated=None):
self.id = id
self.filename = filename
self.filepath = filepath
@@ -1295,7 +1301,7 @@ class Recipe(LayerIndexItemObj_LayerBranch):
self.bbclassextend = bbclassextend
self.inherits = inherits
self.updated = updated or datetime.datetime.today().isoformat()
- self.blacklisted = blacklisted
+ self.disallowed = disallowed
if isinstance(layerbranch, LayerBranch):
self.layerbranch = layerbranch
else:
diff --git a/lib/layerindexlib/cooker.py b/lib/layerindexlib/cooker.py
index 2de6e5faa..ced3e0636 100644
--- a/lib/layerindexlib/cooker.py
+++ b/lib/layerindexlib/cooker.py
@@ -279,7 +279,7 @@ class CookerPlugin(layerindexlib.plugin.IndexPlugin):
summary=pn, description=pn, section='?',
license='?', homepage='?', bugtracker='?',
provides='?', bbclassextend='?', inherits='?',
- blacklisted='?', layerbranch=depBranchId)
+ disallowed='?', layerbranch=depBranchId)
index = addElement("recipes", [recipe], index)
diff --git a/lib/layerindexlib/restapi.py b/lib/layerindexlib/restapi.py
index 26a1c9674..81d99b02e 100644
--- a/lib/layerindexlib/restapi.py
+++ b/lib/layerindexlib/restapi.py
@@ -31,7 +31,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin):
The return value is a LayerIndexObj.
url is the url to the rest api of the layer index, such as:
- http://layers.openembedded.org/layerindex/api/
+ https://layers.openembedded.org/layerindex/api/
Or a local file...
"""
@@ -138,7 +138,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin):
The return value is a LayerIndexObj.
ud is the parsed url to the rest api of the layer index, such as:
- http://layers.openembedded.org/layerindex/api/
+ https://layers.openembedded.org/layerindex/api/
"""
def _get_json_response(apiurl=None, username=None, password=None, retry=True):
diff --git a/lib/layerindexlib/tests/restapi.py b/lib/layerindexlib/tests/restapi.py
index 33b5c1c4c..71f0ae8a9 100644
--- a/lib/layerindexlib/tests/restapi.py
+++ b/lib/layerindexlib/tests/restapi.py
@@ -22,7 +22,7 @@ class LayerIndexWebRestApiTest(LayersTest):
self.assertFalse(os.environ.get("BB_SKIP_NETTESTS") == "yes", msg="BB_SKIP_NETTESTS set, but we tried to test anyway")
LayersTest.setUp(self)
self.layerindex = layerindexlib.LayerIndex(self.d)
- self.layerindex.load_layerindex('http://layers.openembedded.org/layerindex/api/;branch=sumo', load=['layerDependencies'])
+ self.layerindex.load_layerindex('https://layers.openembedded.org/layerindex/api/;branch=sumo', load=['layerDependencies'])
@skipIfNoNetwork()
def test_layerindex_is_empty(self):
diff --git a/lib/ply/yacc.py b/lib/ply/yacc.py
index 46e7dc96f..381b50cf0 100644
--- a/lib/ply/yacc.py
+++ b/lib/ply/yacc.py
@@ -2797,11 +2797,15 @@ class ParserReflect(object):
# Compute a signature over the grammar
def signature(self):
try:
- from hashlib import md5
+ import hashlib
except ImportError:
- from md5 import md5
+ raise RuntimeError("Unable to import hashlib")
+ try:
+ sig = hashlib.new('MD5', usedforsecurity=False)
+ except TypeError:
+ # Some configurations don't appear to support two arguments
+ sig = hashlib.new('MD5')
try:
- sig = md5()
if self.start:
sig.update(self.start.encode('latin-1'))
if self.prec:
diff --git a/lib/progressbar/progressbar.py b/lib/progressbar/progressbar.py
index e2b6ba108..d4da10ab7 100644
--- a/lib/progressbar/progressbar.py
+++ b/lib/progressbar/progressbar.py
@@ -253,7 +253,7 @@ class ProgressBar(object):
if (self.maxval is not UnknownLength
and not 0 <= value <= self.maxval):
- raise ValueError('Value out of range')
+ self.maxval = value
self.currval = value
diff --git a/lib/prserv/__init__.py b/lib/prserv/__init__.py
index 9961040b5..0e0aa34d0 100644
--- a/lib/prserv/__init__.py
+++ b/lib/prserv/__init__.py
@@ -1,17 +1,19 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
__version__ = "1.0.0"
import os, time
-import sys,logging
+import sys, logging
def init_logger(logfile, loglevel):
numeric_level = getattr(logging, loglevel.upper(), None)
if not isinstance(numeric_level, int):
- raise ValueError('Invalid log level: %s' % loglevel)
- FORMAT = '%(asctime)-15s %(message)s'
+ raise ValueError("Invalid log level: %s" % loglevel)
+ FORMAT = "%(asctime)-15s %(message)s"
logging.basicConfig(level=numeric_level, filename=logfile, format=FORMAT)
class NotFoundError(Exception):
diff --git a/lib/prserv/client.py b/lib/prserv/client.py
new file mode 100644
index 000000000..8471ee304
--- /dev/null
+++ b/lib/prserv/client.py
@@ -0,0 +1,71 @@
+#
+# Copyright BitBake Contributors
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import logging
+import bb.asyncrpc
+
+logger = logging.getLogger("BitBake.PRserv")
+
+class PRAsyncClient(bb.asyncrpc.AsyncClient):
+ def __init__(self):
+ super().__init__("PRSERVICE", "1.0", logger)
+
+ async def getPR(self, version, pkgarch, checksum):
+ response = await self.invoke(
+ {"get-pr": {"version": version, "pkgarch": pkgarch, "checksum": checksum}}
+ )
+ if response:
+ return response["value"]
+
+ async def test_pr(self, version, pkgarch, checksum):
+ response = await self.invoke(
+ {"test-pr": {"version": version, "pkgarch": pkgarch, "checksum": checksum}}
+ )
+ if response:
+ return response["value"]
+
+ async def test_package(self, version, pkgarch):
+ response = await self.invoke(
+ {"test-package": {"version": version, "pkgarch": pkgarch}}
+ )
+ if response:
+ return response["value"]
+
+ async def max_package_pr(self, version, pkgarch):
+ response = await self.invoke(
+ {"max-package-pr": {"version": version, "pkgarch": pkgarch}}
+ )
+ if response:
+ return response["value"]
+
+ async def importone(self, version, pkgarch, checksum, value):
+ response = await self.invoke(
+ {"import-one": {"version": version, "pkgarch": pkgarch, "checksum": checksum, "value": value}}
+ )
+ if response:
+ return response["value"]
+
+ async def export(self, version, pkgarch, checksum, colinfo):
+ response = await self.invoke(
+ {"export": {"version": version, "pkgarch": pkgarch, "checksum": checksum, "colinfo": colinfo}}
+ )
+ if response:
+ return (response["metainfo"], response["datainfo"])
+
+ async def is_readonly(self):
+ response = await self.invoke(
+ {"is-readonly": {}}
+ )
+ if response:
+ return response["readonly"]
+
+class PRClient(bb.asyncrpc.Client):
+ def __init__(self):
+ super().__init__()
+ self._add_methods("getPR", "test_pr", "test_package", "importone", "export", "is_readonly")
+
+ def _get_async_client(self):
+ return PRAsyncClient()
diff --git a/lib/prserv/db.py b/lib/prserv/db.py
index cb2a2461e..eb4150819 100644
--- a/lib/prserv/db.py
+++ b/lib/prserv/db.py
@@ -1,4 +1,6 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
@@ -30,21 +32,29 @@ if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
#
class PRTable(object):
- def __init__(self, conn, table, nohist):
+ def __init__(self, conn, table, nohist, read_only):
self.conn = conn
self.nohist = nohist
+ self.read_only = read_only
self.dirty = False
if nohist:
- self.table = "%s_nohist" % table
+ self.table = "%s_nohist" % table
else:
- self.table = "%s_hist" % table
+ self.table = "%s_hist" % table
- self._execute("CREATE TABLE IF NOT EXISTS %s \
- (version TEXT NOT NULL, \
- pkgarch TEXT NOT NULL, \
- checksum TEXT NOT NULL, \
- value INTEGER, \
- PRIMARY KEY (version, pkgarch, checksum));" % self.table)
+ if self.read_only:
+ table_exists = self._execute(
+ "SELECT count(*) FROM sqlite_master \
+ WHERE type='table' AND name='%s'" % (self.table))
+ if not table_exists:
+ raise prserv.NotFoundError
+ else:
+ self._execute("CREATE TABLE IF NOT EXISTS %s \
+ (version TEXT NOT NULL, \
+ pkgarch TEXT NOT NULL, \
+ checksum TEXT NOT NULL, \
+ value INTEGER, \
+ PRIMARY KEY (version, pkgarch, checksum));" % self.table)
def _execute(self, *query):
"""Execute a query, waiting to acquire a lock if necessary"""
@@ -54,20 +64,67 @@ class PRTable(object):
try:
return self.conn.execute(*query)
except sqlite3.OperationalError as exc:
- if 'is locked' in str(exc) and end > time.time():
+ if "is locked" in str(exc) and end > time.time():
continue
raise exc
def sync(self):
- self.conn.commit()
- self._execute("BEGIN EXCLUSIVE TRANSACTION")
+ if not self.read_only:
+ self.conn.commit()
+ self._execute("BEGIN EXCLUSIVE TRANSACTION")
def sync_if_dirty(self):
if self.dirty:
self.sync()
self.dirty = False
- def _getValueHist(self, version, pkgarch, checksum):
+ def test_package(self, version, pkgarch):
+ """Returns whether the specified package version is found in the database for the specified architecture"""
+
+ # Just returns the value if found or None otherwise
+ data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=?;" % self.table,
+ (version, pkgarch))
+ row=data.fetchone()
+ if row is not None:
+ return True
+ else:
+ return False
+
+ def test_value(self, version, pkgarch, value):
+ """Returns whether the specified value is found in the database for the specified package and architecture"""
+
+ # Just returns the value if found or None otherwise
+ data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? and value=?;" % self.table,
+ (version, pkgarch, value))
+ row=data.fetchone()
+ if row is not None:
+ return True
+ else:
+ return False
+
+ def find_value(self, version, pkgarch, checksum):
+ """Returns the value for the specified checksum if found or None otherwise."""
+
+ data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
+ (version, pkgarch, checksum))
+ row=data.fetchone()
+ if row is not None:
+ return row[0]
+ else:
+ return None
+
+ def find_max_value(self, version, pkgarch):
+ """Returns the greatest value for (version, pkgarch), or None if not found. Doesn't create a new value"""
+
+ data = self._execute("SELECT max(value) FROM %s where version=? AND pkgarch=?;" % (self.table),
+ (version, pkgarch))
+ row = data.fetchone()
+ if row is not None:
+ return row[0]
+ else:
+ return None
+
+ def _get_value_hist(self, version, pkgarch, checksum):
data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
(version, pkgarch, checksum))
row=data.fetchone()
@@ -75,10 +132,19 @@ class PRTable(object):
return row[0]
else:
#no value found, try to insert
+ if self.read_only:
+ data = self._execute("SELECT ifnull(max(value)+1, 0) FROM %s where version=? AND pkgarch=?;" % (self.table),
+ (version, pkgarch))
+ row = data.fetchone()
+ if row is not None:
+ return row[0]
+ else:
+ return 0
+
try:
- self._execute("INSERT INTO %s VALUES (?, ?, ?, (select ifnull(max(value)+1,0) from %s where version=? AND pkgarch=?));"
- % (self.table,self.table),
- (version,pkgarch, checksum,version, pkgarch))
+ self._execute("INSERT INTO %s VALUES (?, ?, ?, (select ifnull(max(value)+1, 0) from %s where version=? AND pkgarch=?));"
+ % (self.table, self.table),
+ (version, pkgarch, checksum, version, pkgarch))
except sqlite3.IntegrityError as exc:
logger.error(str(exc))
@@ -92,10 +158,10 @@ class PRTable(object):
else:
raise prserv.NotFoundError
- def _getValueNohist(self, version, pkgarch, checksum):
+ def _get_value_no_hist(self, version, pkgarch, checksum):
data=self._execute("SELECT value FROM %s \
WHERE version=? AND pkgarch=? AND checksum=? AND \
- value >= (select max(value) from %s where version=? AND pkgarch=?);"
+ value >= (select max(value) from %s where version=? AND pkgarch=?);"
% (self.table, self.table),
(version, pkgarch, checksum, version, pkgarch))
row=data.fetchone()
@@ -103,9 +169,14 @@ class PRTable(object):
return row[0]
else:
#no value found, try to insert
+ if self.read_only:
+ data = self._execute("SELECT ifnull(max(value)+1, 0) FROM %s where version=? AND pkgarch=?;" % (self.table),
+ (version, pkgarch))
+ return data.fetchone()[0]
+
try:
- self._execute("INSERT OR REPLACE INTO %s VALUES (?, ?, ?, (select ifnull(max(value)+1,0) from %s where version=? AND pkgarch=?));"
- % (self.table,self.table),
+ self._execute("INSERT OR REPLACE INTO %s VALUES (?, ?, ?, (select ifnull(max(value)+1, 0) from %s where version=? AND pkgarch=?));"
+ % (self.table, self.table),
(version, pkgarch, checksum, version, pkgarch))
except sqlite3.IntegrityError as exc:
logger.error(str(exc))
@@ -121,14 +192,17 @@ class PRTable(object):
else:
raise prserv.NotFoundError
- def getValue(self, version, pkgarch, checksum):
+ def get_value(self, version, pkgarch, checksum):
if self.nohist:
- return self._getValueNohist(version, pkgarch, checksum)
+ return self._get_value_no_hist(version, pkgarch, checksum)
else:
- return self._getValueHist(version, pkgarch, checksum)
+ return self._get_value_hist(version, pkgarch, checksum)
- def _importHist(self, version, pkgarch, checksum, value):
- val = None
+ def _import_hist(self, version, pkgarch, checksum, value):
+ if self.read_only:
+ return None
+
+ val = None
data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
(version, pkgarch, checksum))
row = data.fetchone()
@@ -151,24 +225,27 @@ class PRTable(object):
val = row[0]
return val
- def _importNohist(self, version, pkgarch, checksum, value):
+ def _import_no_hist(self, version, pkgarch, checksum, value):
+ if self.read_only:
+ return None
+
try:
#try to insert
self._execute("INSERT INTO %s VALUES (?, ?, ?, ?);" % (self.table),
- (version, pkgarch, checksum,value))
+ (version, pkgarch, checksum, value))
except sqlite3.IntegrityError as exc:
#already have the record, try to update
try:
- self._execute("UPDATE %s SET value=? WHERE version=? AND pkgarch=? AND checksum=? AND value<?"
+ self._execute("UPDATE %s SET value=? WHERE version=? AND pkgarch=? AND checksum=? AND value<?"
% (self.table),
- (value,version,pkgarch,checksum,value))
+ (value, version, pkgarch, checksum, value))
except sqlite3.IntegrityError as exc:
logger.error(str(exc))
self.dirty = True
data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=? AND value>=?;" % self.table,
- (version,pkgarch,checksum,value))
+ (version, pkgarch, checksum, value))
row=data.fetchone()
if row is not None:
return row[0]
@@ -177,33 +254,33 @@ class PRTable(object):
def importone(self, version, pkgarch, checksum, value):
if self.nohist:
- return self._importNohist(version, pkgarch, checksum, value)
+ return self._import_no_hist(version, pkgarch, checksum, value)
else:
- return self._importHist(version, pkgarch, checksum, value)
+ return self._import_hist(version, pkgarch, checksum, value)
def export(self, version, pkgarch, checksum, colinfo):
metainfo = {}
- #column info
+ #column info
if colinfo:
- metainfo['tbl_name'] = self.table
- metainfo['core_ver'] = prserv.__version__
- metainfo['col_info'] = []
+ metainfo["tbl_name"] = self.table
+ metainfo["core_ver"] = prserv.__version__
+ metainfo["col_info"] = []
data = self._execute("PRAGMA table_info(%s);" % self.table)
for row in data:
col = {}
- col['name'] = row['name']
- col['type'] = row['type']
- col['notnull'] = row['notnull']
- col['dflt_value'] = row['dflt_value']
- col['pk'] = row['pk']
- metainfo['col_info'].append(col)
+ col["name"] = row["name"]
+ col["type"] = row["type"]
+ col["notnull"] = row["notnull"]
+ col["dflt_value"] = row["dflt_value"]
+ col["pk"] = row["pk"]
+ metainfo["col_info"].append(col)
#data info
datainfo = []
if self.nohist:
sqlstmt = "SELECT T1.version, T1.pkgarch, T1.checksum, T1.value FROM %s as T1, \
- (SELECT version,pkgarch,max(value) as maxvalue FROM %s GROUP BY version,pkgarch) as T2 \
+ (SELECT version, pkgarch, max(value) as maxvalue FROM %s GROUP BY version, pkgarch) as T2 \
WHERE T1.version=T2.version AND T1.pkgarch=T2.pkgarch AND T1.value=T2.maxvalue " % (self.table, self.table)
else:
sqlstmt = "SELECT * FROM %s as T1 WHERE 1=1 " % self.table
@@ -226,12 +303,12 @@ class PRTable(object):
else:
data = self._execute(sqlstmt)
for row in data:
- if row['version']:
+ if row["version"]:
col = {}
- col['version'] = row['version']
- col['pkgarch'] = row['pkgarch']
- col['checksum'] = row['checksum']
- col['value'] = row['value']
+ col["version"] = row["version"]
+ col["pkgarch"] = row["pkgarch"]
+ col["checksum"] = row["checksum"]
+ col["value"] = row["value"]
datainfo.append(col)
return (metainfo, datainfo)
@@ -240,41 +317,45 @@ class PRTable(object):
for line in self.conn.iterdump():
writeCount = writeCount + len(line) + 1
fd.write(line)
- fd.write('\n')
+ fd.write("\n")
return writeCount
class PRData(object):
"""Object representing the PR database"""
- def __init__(self, filename, nohist=True):
+ def __init__(self, filename, nohist=True, read_only=False):
self.filename=os.path.abspath(filename)
self.nohist=nohist
+ self.read_only = read_only
#build directory hierarchy
try:
os.makedirs(os.path.dirname(self.filename))
except OSError as e:
if e.errno != errno.EEXIST:
raise e
- self.connection=sqlite3.connect(self.filename, isolation_level="EXCLUSIVE", check_same_thread = False)
+ uri = "file:%s%s" % (self.filename, "?mode=ro" if self.read_only else "")
+ logger.debug("Opening PRServ database '%s'" % (uri))
+ self.connection=sqlite3.connect(uri, uri=True, isolation_level="EXCLUSIVE", check_same_thread = False)
self.connection.row_factory=sqlite3.Row
- self.connection.execute("pragma synchronous = off;")
- self.connection.execute("PRAGMA journal_mode = MEMORY;")
+ if not self.read_only:
+ self.connection.execute("pragma synchronous = off;")
+ self.connection.execute("PRAGMA journal_mode = MEMORY;")
self._tables={}
def disconnect(self):
self.connection.close()
- def __getitem__(self,tblname):
+ def __getitem__(self, tblname):
if not isinstance(tblname, str):
raise TypeError("tblname argument must be a string, not '%s'" %
type(tblname))
if tblname in self._tables:
return self._tables[tblname]
else:
- tableobj = self._tables[tblname] = PRTable(self.connection, tblname, self.nohist)
+ tableobj = self._tables[tblname] = PRTable(self.connection, tblname, self.nohist, self.read_only)
return tableobj
def __delitem__(self, tblname):
if tblname in self._tables:
del self._tables[tblname]
logger.info("drop table %s" % (tblname))
- self.connection.execute("DROP TABLE IF EXISTS %s;" % tblname)
+ self.connection.execute("DROP TABLE IF EXISTS %s;" % tblname)
diff --git a/lib/prserv/serv.py b/lib/prserv/serv.py
index 5e322bf83..dc4be5b62 100644
--- a/lib/prserv/serv.py
+++ b/lib/prserv/serv.py
@@ -1,160 +1,167 @@
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
import os,sys,logging
import signal, time
-from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
import socket
import io
import sqlite3
-import bb.server.xmlrpcclient
import prserv
import prserv.db
import errno
-import multiprocessing
+import bb.asyncrpc
logger = logging.getLogger("BitBake.PRserv")
-class Handler(SimpleXMLRPCRequestHandler):
- def _dispatch(self,method,params):
+PIDPREFIX = "/tmp/PRServer_%s_%s.pid"
+singleton = None
+
+class PRServerClient(bb.asyncrpc.AsyncServerConnection):
+ def __init__(self, socket, server):
+ super().__init__(socket, "PRSERVICE", server.logger)
+ self.server = server
+
+ self.handlers.update({
+ "get-pr": self.handle_get_pr,
+ "test-pr": self.handle_test_pr,
+ "test-package": self.handle_test_package,
+ "max-package-pr": self.handle_max_package_pr,
+ "import-one": self.handle_import_one,
+ "export": self.handle_export,
+ "is-readonly": self.handle_is_readonly,
+ })
+
+ def validate_proto_version(self):
+ return (self.proto_version == (1, 0))
+
+ async def dispatch_message(self, msg):
try:
- value=self.server.funcs[method](*params)
+ return await super().dispatch_message(msg)
except:
- import traceback
- traceback.print_exc()
+ self.server.table.sync()
raise
- return value
+ else:
+ self.server.table.sync_if_dirty()
-PIDPREFIX = "/tmp/PRServer_%s_%s.pid"
-singleton = None
+ async def handle_test_pr(self, request):
+ '''Finds the PR value corresponding to the request. If not found, returns None and doesn't insert a new value'''
+ version = request["version"]
+ pkgarch = request["pkgarch"]
+ checksum = request["checksum"]
+ value = self.server.table.find_value(version, pkgarch, checksum)
+ return {"value": value}
-class PRServer(SimpleXMLRPCServer):
- def __init__(self, dbfile, logfile, interface):
- ''' constructor '''
- try:
- SimpleXMLRPCServer.__init__(self, interface,
- logRequests=False, allow_none=True)
- except socket.error:
- ip=socket.gethostbyname(interface[0])
- port=interface[1]
- msg="PR Server unable to bind to %s:%s\n" % (ip, port)
- sys.stderr.write(msg)
- raise PRServiceConfigError
-
- self.dbfile=dbfile
- self.logfile=logfile
- self.host, self.port = self.socket.getsockname()
+ async def handle_test_package(self, request):
+ '''Tells whether there are entries for (version, pkgarch) in the db. Returns True or False'''
+ version = request["version"]
+ pkgarch = request["pkgarch"]
- self.register_function(self.getPR, "getPR")
- self.register_function(self.ping, "ping")
- self.register_function(self.export, "export")
- self.register_function(self.importone, "importone")
- self.register_introspection_functions()
+ value = self.server.table.test_package(version, pkgarch)
+ return {"value": value}
- self.iter_count = 0
- # 60 iterations between syncs or sync if dirty every ~30 seconds
- self.iterations_between_sync = 60
+ async def handle_max_package_pr(self, request):
+ '''Finds the greatest PR value for (version, pkgarch) in the db. Returns None if no entry was found'''
+ version = request["version"]
+ pkgarch = request["pkgarch"]
- def sigint_handler(self, signum, stack):
- if self.table:
- self.table.sync()
+ value = self.server.table.find_max_value(version, pkgarch)
+ return {"value": value}
- def sigterm_handler(self, signum, stack):
- if self.table:
- self.table.sync()
- raise(SystemExit)
+ async def handle_get_pr(self, request):
+ version = request["version"]
+ pkgarch = request["pkgarch"]
+ checksum = request["checksum"]
- def process_request(self, request, client_address):
- if request is None:
- return
+ response = None
try:
- self.finish_request(request, client_address)
- self.shutdown_request(request)
- self.iter_count = (self.iter_count + 1) % self.iterations_between_sync
- if self.iter_count == 0:
- self.table.sync_if_dirty()
- except:
- self.handle_error(request, client_address)
- self.shutdown_request(request)
- self.table.sync()
- self.table.sync_if_dirty()
-
- def serve_forever(self, poll_interval=0.5):
- signal.signal(signal.SIGINT, self.sigint_handler)
- signal.signal(signal.SIGTERM, self.sigterm_handler)
+ value = self.server.table.get_value(version, pkgarch, checksum)
+ response = {"value": value}
+ except prserv.NotFoundError:
+ self.logger.error("failure storing value in database for (%s, %s)",version, checksum)
- self.db = prserv.db.PRData(self.dbfile)
- self.table = self.db["PRMAIN"]
- return super().serve_forever(poll_interval)
+ return response
- def export(self, version=None, pkgarch=None, checksum=None, colinfo=True):
- try:
- return self.table.export(version, pkgarch, checksum, colinfo)
- except sqlite3.Error as exc:
- logger.error(str(exc))
- return None
+ async def handle_import_one(self, request):
+ response = None
+ if not self.server.read_only:
+ version = request["version"]
+ pkgarch = request["pkgarch"]
+ checksum = request["checksum"]
+ value = request["value"]
- def importone(self, version, pkgarch, checksum, value):
- return self.table.importone(version, pkgarch, checksum, value)
+ value = self.server.table.importone(version, pkgarch, checksum, value)
+ if value is not None:
+ response = {"value": value}
- def ping(self):
- return True
+ return response
- def getinfo(self):
- return (self.host, self.port)
+ async def handle_export(self, request):
+ version = request["version"]
+ pkgarch = request["pkgarch"]
+ checksum = request["checksum"]
+ colinfo = request["colinfo"]
- def getPR(self, version, pkgarch, checksum):
try:
- return self.table.getValue(version, pkgarch, checksum)
- except prserv.NotFoundError:
- logger.error("can not find value for (%s, %s)",version, checksum)
- return None
+ (metainfo, datainfo) = self.server.table.export(version, pkgarch, checksum, colinfo)
except sqlite3.Error as exc:
- logger.error(str(exc))
- return None
+ self.logger.error(str(exc))
+ metainfo = datainfo = None
-class PRServSingleton(object):
- def __init__(self, dbfile, logfile, interface):
+ return {"metainfo": metainfo, "datainfo": datainfo}
+
+ async def handle_is_readonly(self, request):
+ return {"readonly": self.server.read_only}
+
+class PRServer(bb.asyncrpc.AsyncServer):
+ def __init__(self, dbfile, read_only=False):
+ super().__init__(logger)
self.dbfile = dbfile
- self.logfile = logfile
- self.interface = interface
- self.host = None
- self.port = None
+ self.table = None
+ self.read_only = read_only
- def start(self):
- self.prserv = PRServer(self.dbfile, self.logfile, self.interface)
- self.process = multiprocessing.Process(target=self.prserv.serve_forever)
- self.process.start()
+ def accept_client(self, socket):
+ return PRServerClient(socket, self)
- self.host, self.port = self.prserv.getinfo()
+ def start(self):
+ tasks = super().start()
+ self.db = prserv.db.PRData(self.dbfile, read_only=self.read_only)
+ self.table = self.db["PRMAIN"]
- def getinfo(self):
- return (self.host, self.port)
+ self.logger.info("Started PRServer with DBfile: %s, Address: %s, PID: %s" %
+ (self.dbfile, self.address, str(os.getpid())))
-class PRServerConnection(object):
- def __init__(self, host, port):
- if is_local_special(host, port):
- host, port = singleton.getinfo()
- self.host = host
- self.port = port
- self.connection, self.transport = bb.server.xmlrpcclient._create_server(self.host, self.port)
+ return tasks
- def getPR(self, version, pkgarch, checksum):
- return self.connection.getPR(version, pkgarch, checksum)
+ async def stop(self):
+ self.table.sync_if_dirty()
+ self.db.disconnect()
+ await super().stop()
- def ping(self):
- return self.connection.ping()
+ def signal_handler(self):
+ super().signal_handler()
+ if self.table:
+ self.table.sync()
- def export(self,version=None, pkgarch=None, checksum=None, colinfo=True):
- return self.connection.export(version, pkgarch, checksum, colinfo)
+class PRServSingleton(object):
+ def __init__(self, dbfile, logfile, host, port):
+ self.dbfile = dbfile
+ self.logfile = logfile
+ self.host = host
+ self.port = port
- def importone(self, version, pkgarch, checksum, value):
- return self.connection.importone(version, pkgarch, checksum, value)
+ def start(self):
+ self.prserv = PRServer(self.dbfile)
+ self.prserv.start_tcp_server(socket.gethostbyname(self.host), self.port)
+ self.process = self.prserv.serve_as_process(log_level=logging.WARNING)
- def getinfo(self):
- return self.host, self.port
+ if not self.prserv.address:
+ raise PRServiceConfigError
+ if not self.port:
+ self.port = int(self.prserv.address.rsplit(":", 1)[1])
def run_as_daemon(func, pidfile, logfile):
"""
@@ -190,18 +197,18 @@ def run_as_daemon(func, pidfile, logfile):
# stdout/stderr or it could be 'real' unix fd forking where we need
# to physically close the fds to prevent the program launching us from
# potentially hanging on a pipe. Handle both cases.
- si = open('/dev/null', 'r')
+ si = open("/dev/null", "r")
try:
- os.dup2(si.fileno(),sys.stdin.fileno())
+ os.dup2(si.fileno(), sys.stdin.fileno())
except (AttributeError, io.UnsupportedOperation):
sys.stdin = si
- so = open(logfile, 'a+')
+ so = open(logfile, "a+")
try:
- os.dup2(so.fileno(),sys.stdout.fileno())
+ os.dup2(so.fileno(), sys.stdout.fileno())
except (AttributeError, io.UnsupportedOperation):
sys.stdout = so
try:
- os.dup2(so.fileno(),sys.stderr.fileno())
+ os.dup2(so.fileno(), sys.stderr.fileno())
except (AttributeError, io.UnsupportedOperation):
sys.stderr = so
@@ -219,14 +226,14 @@ def run_as_daemon(func, pidfile, logfile):
# write pidfile
pid = str(os.getpid())
- with open(pidfile, 'w') as pf:
+ with open(pidfile, "w") as pf:
pf.write("%s\n" % pid)
func()
os.remove(pidfile)
os._exit(0)
-def start_daemon(dbfile, host, port, logfile):
+def start_daemon(dbfile, host, port, logfile, read_only=False):
ip = socket.gethostbyname(host)
pidfile = PIDPREFIX % (ip, port)
try:
@@ -240,15 +247,13 @@ def start_daemon(dbfile, host, port, logfile):
% pidfile)
return 1
- server = PRServer(os.path.abspath(dbfile), os.path.abspath(logfile), (ip,port))
- run_as_daemon(server.serve_forever, pidfile, os.path.abspath(logfile))
+ dbfile = os.path.abspath(dbfile)
+ def daemon_main():
+ server = PRServer(dbfile, read_only=read_only)
+ server.start_tcp_server(ip, port)
+ server.serve_forever()
- # Sometimes, the port (i.e. localhost:0) indicated by the user does not match with
- # the one the server actually is listening, so at least warn the user about it
- _,rport = server.getinfo()
- if port != rport:
- sys.stdout.write("Server is listening at port %s instead of %s\n"
- % (rport,port))
+ run_as_daemon(daemon_main, pidfile, os.path.abspath(logfile))
return 0
def stop_daemon(host, port):
@@ -266,15 +271,15 @@ def stop_daemon(host, port):
# so at least advise the user which ports the corresponding server is listening
ports = []
portstr = ""
- for pf in glob.glob(PIDPREFIX % (ip,'*')):
+ for pf in glob.glob(PIDPREFIX % (ip, "*")):
bn = os.path.basename(pf)
root, _ = os.path.splitext(bn)
- ports.append(root.split('_')[-1])
+ ports.append(root.split("_")[-1])
if len(ports):
- portstr = "Wrong port? Other ports listening at %s: %s" % (host, ' '.join(ports))
+ portstr = "Wrong port? Other ports listening at %s: %s" % (host, " ".join(ports))
sys.stderr.write("pidfile %s does not exist. Daemon not running? %s\n"
- % (pidfile,portstr))
+ % (pidfile, portstr))
return 1
try:
@@ -283,8 +288,11 @@ def stop_daemon(host, port):
os.kill(pid, signal.SIGTERM)
time.sleep(0.1)
- if os.path.exists(pidfile):
+ try:
os.remove(pidfile)
+ except FileNotFoundError:
+ # The PID file might have been removed by the exiting process
+ pass
except OSError as e:
err = str(e)
@@ -302,7 +310,7 @@ def is_running(pid):
return True
def is_local_special(host, port):
- if host.strip().upper() == 'localhost'.upper() and (not port):
+ if (host == "localhost" or host == "127.0.0.1") and not port:
return True
else:
return False
@@ -313,7 +321,7 @@ class PRServiceConfigError(Exception):
def auto_start(d):
global singleton
- host_params = list(filter(None, (d.getVar('PRSERV_HOST') or '').split(':')))
+ host_params = list(filter(None, (d.getVar("PRSERV_HOST") or "").split(":")))
if not host_params:
# Shutdown any existing PR Server
auto_shutdown()
@@ -322,11 +330,13 @@ def auto_start(d):
if len(host_params) != 2:
# Shutdown any existing PR Server
auto_shutdown()
- logger.critical('\n'.join(['PRSERV_HOST: incorrect format',
+ logger.critical("\n".join(["PRSERV_HOST: incorrect format",
'Usage: PRSERV_HOST = "<hostname>:<port>"']))
raise PRServiceConfigError
- if is_local_special(host_params[0], int(host_params[1])):
+ host = host_params[0].strip().lower()
+ port = int(host_params[1])
+ if is_local_special(host, port):
import bb.utils
cachedir = (d.getVar("PERSISTENT_DIR") or d.getVar("CACHE"))
if not cachedir:
@@ -340,20 +350,16 @@ def auto_start(d):
auto_shutdown()
if not singleton:
bb.utils.mkdirhier(cachedir)
- singleton = PRServSingleton(os.path.abspath(dbfile), os.path.abspath(logfile), ("localhost",0))
+ singleton = PRServSingleton(os.path.abspath(dbfile), os.path.abspath(logfile), host, port)
singleton.start()
if singleton:
- host, port = singleton.getinfo()
- else:
- host = host_params[0]
- port = int(host_params[1])
+ host = singleton.host
+ port = singleton.port
try:
- connection = PRServerConnection(host,port)
- connection.ping()
- realhost, realport = connection.getinfo()
- return str(realhost) + ":" + str(realport)
-
+ ping(host, port)
+ return str(host) + ":" + str(port)
+
except Exception:
logger.critical("PRservice %s:%d not available" % (host, port))
raise PRServiceConfigError
@@ -366,8 +372,21 @@ def auto_shutdown():
singleton = None
def ping(host, port):
- conn=PRServerConnection(host, port)
- return conn.ping()
+ from . import client
+
+ with client.PRClient() as conn:
+ conn.connect_tcp(host, port)
+ return conn.ping()
def connect(host, port):
- return PRServerConnection(host, port)
+ from . import client
+
+ global singleton
+
+ if host.strip().lower() == "localhost" and not port:
+ host = "localhost"
+ port = singleton.port
+
+ conn = client.PRClient()
+ conn.connect_tcp(host, port)
+ return conn
diff --git a/lib/pyinotify.py b/lib/pyinotify.py
index 6ae40a2d7..3c5dab031 100644
--- a/lib/pyinotify.py
+++ b/lib/pyinotify.py
@@ -52,7 +52,6 @@ from collections import deque
from datetime import datetime, timedelta
import time
import re
-import asyncore
import glob
import locale
import subprocess
@@ -596,14 +595,24 @@ class _ProcessEvent:
@type event: Event object
@return: By convention when used from the ProcessEvent class:
- Returning False or None (default value) means keep on
- executing next chained functors (see chain.py example).
+ executing next chained functors (see chain.py example).
- Returning True instead means do not execute next
processing functions.
@rtype: bool
@raise ProcessEventError: Event object undispatchable,
unknown event.
"""
- stripped_mask = event.mask - (event.mask & IN_ISDIR)
+ stripped_mask = event.mask & ~IN_ISDIR
+ # Bitbake hack - we see event masks of 0x6, i.e., IN_MODIFY & IN_ATTRIB.
+ # The kernel inotify code can set more than one of the bits in the mask,
+ # fsnotify_change() in linux/fsnotify.h is quite clear that IN_ATTRIB,
+ # IN_MODIFY and IN_ACCESS can arrive together.
+ # This breaks the code below which assume only one mask bit is ever
+ # set in an event. We don't care about attrib or access in bitbake so
+ # drop those.
+ if stripped_mask & IN_MODIFY:
+ stripped_mask &= ~(IN_ATTRIB | IN_ACCESS)
+
maskname = EventsCodes.ALL_VALUES.get(stripped_mask)
if maskname is None:
raise ProcessEventError("Unknown mask 0x%08x" % stripped_mask)
@@ -1475,35 +1484,6 @@ class ThreadedNotifier(threading.Thread, Notifier):
self.loop()
-class AsyncNotifier(asyncore.file_dispatcher, Notifier):
- """
- This notifier inherits from asyncore.file_dispatcher in order to be able to
- use pyinotify along with the asyncore framework.
-
- """
- def __init__(self, watch_manager, default_proc_fun=None, read_freq=0,
- threshold=0, timeout=None, channel_map=None):
- """
- Initializes the async notifier. The only additional parameter is
- 'channel_map' which is the optional asyncore private map. See
- Notifier class for the meaning of the others parameters.
-
- """
- Notifier.__init__(self, watch_manager, default_proc_fun, read_freq,
- threshold, timeout)
- asyncore.file_dispatcher.__init__(self, self._fd, channel_map)
-
- def handle_read(self):
- """
- When asyncore tells us we can read from the fd, we proceed processing
- events. This method can be overridden for handling a notification
- differently.
-
- """
- self.read_events()
- self.process_events()
-
-
class TornadoAsyncNotifier(Notifier):
"""
Tornado ioloop adapter.
diff --git a/lib/toaster/bldcollector/urls.py b/lib/toaster/bldcollector/urls.py
index efd67a81a..3c3407035 100644
--- a/lib/toaster/bldcollector/urls.py
+++ b/lib/toaster/bldcollector/urls.py
@@ -6,7 +6,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-from django.conf.urls import url
+from django.urls import re_path as url
import bldcollector.views
diff --git a/lib/toaster/bldcollector/views.py b/lib/toaster/bldcollector/views.py
index 04cd8b3dd..bdf38ae6e 100644
--- a/lib/toaster/bldcollector/views.py
+++ b/lib/toaster/bldcollector/views.py
@@ -14,8 +14,11 @@ import subprocess
import toastermain
from django.views.decorators.csrf import csrf_exempt
+from toastermain.logs import log_view_mixin
+
@csrf_exempt
+@log_view_mixin
def eventfile(request):
""" Receives a file by POST, and runs toaster-eventreply on this file """
if request.method != "POST":
diff --git a/lib/toaster/bldcontrol/localhostbecontroller.py b/lib/toaster/bldcontrol/localhostbecontroller.py
index 75674ccbf..577e765f1 100644
--- a/lib/toaster/bldcontrol/localhostbecontroller.py
+++ b/lib/toaster/bldcontrol/localhostbecontroller.py
@@ -200,7 +200,7 @@ class LocalhostBEController(BuildEnvironmentController):
localdirpath = os.path.join(localdirname, dirpath)
logger.debug("localhostbecontroller: localdirpath expects '%s'" % localdirpath)
if not os.path.exists(localdirpath):
- raise BuildSetupException("Cannot find layer git path '%s' in checked out repository '%s:%s'. Aborting." % (localdirpath, giturl, commit))
+ raise BuildSetupException("Cannot find layer git path '%s' in checked out repository '%s:%s'. Exiting." % (localdirpath, giturl, commit))
if name != "bitbake":
layerlist.append("%03d:%s" % (index,localdirpath.rstrip("/")))
@@ -467,7 +467,7 @@ class LocalhostBEController(BuildEnvironmentController):
logger.debug("localhostbecontroller: waiting for bblock content to appear")
time.sleep(1)
else:
- raise BuildSetupException("Cannot find bitbake server lock file '%s'. Aborting." % bblock)
+ raise BuildSetupException("Cannot find bitbake server lock file '%s'. Exiting." % bblock)
with open(bblock) as fplock:
for line in fplock:
diff --git a/lib/toaster/bldcontrol/management/commands/runbuilds.py b/lib/toaster/bldcontrol/management/commands/runbuilds.py
index 19f659ec4..834e32b36 100644
--- a/lib/toaster/bldcontrol/management/commands/runbuilds.py
+++ b/lib/toaster/bldcontrol/management/commands/runbuilds.py
@@ -180,6 +180,77 @@ class Command(BaseCommand):
except Exception as e:
logger.warning("runbuilds: schedule exception %s" % str(e))
+ # Test to see if a build pre-maturely died due to a bitbake crash
+ def check_dead_builds(self):
+ do_cleanup = False
+ try:
+ for br in BuildRequest.objects.filter(state=BuildRequest.REQ_INPROGRESS):
+ # Get the build directory
+ if br.project.builddir:
+ builddir = br.project.builddir
+ else:
+ builddir = '%s-toaster-%d' % (br.environment.builddir,br.project.id)
+ # Check log to see if there is a recent traceback
+ toaster_ui_log = os.path.join(builddir, 'toaster_ui.log')
+ test_file = os.path.join(builddir, '._toaster_check.txt')
+ os.system("tail -n 50 %s > %s" % (os.path.join(builddir, 'toaster_ui.log'),test_file))
+ traceback_text = ''
+ is_traceback = False
+ with open(test_file,'r') as test_file_fd:
+ test_file_tail = test_file_fd.readlines()
+ for line in test_file_tail:
+ if line.startswith('Traceback (most recent call last):'):
+ traceback_text = line
+ is_traceback = True
+ elif line.startswith('NOTE: ToasterUI waiting for events'):
+ # Ignore any traceback before new build start
+ traceback_text = ''
+ is_traceback = False
+ elif line.startswith('Note: Toaster traceback auto-stop'):
+ # Ignore any traceback before this previous traceback catch
+ traceback_text = ''
+ is_traceback = False
+ elif is_traceback:
+ traceback_text += line
+ # Test the results
+ is_stop = False
+ if is_traceback:
+ # Found a traceback
+ errtype = 'Bitbake crash'
+ errmsg = 'Bitbake crash\n' + traceback_text
+ state = BuildRequest.REQ_FAILED
+ # Clean up bitbake files
+ bitbake_lock = os.path.join(builddir, 'bitbake.lock')
+ if os.path.isfile(bitbake_lock):
+ os.remove(bitbake_lock)
+ bitbake_sock = os.path.join(builddir, 'bitbake.sock')
+ if os.path.isfile(bitbake_sock):
+ os.remove(bitbake_sock)
+ if os.path.isfile(test_file):
+ os.remove(test_file)
+ # Add note to ignore this traceback on next check
+ os.system('echo "Note: Toaster traceback auto-stop" >> %s' % toaster_ui_log)
+ is_stop = True
+ # Add more tests here
+ #elif ...
+ # Stop the build request?
+ if is_stop:
+ brerror = BRError(
+ req = br,
+ errtype = errtype,
+ errmsg = errmsg,
+ traceback = traceback_text,
+ )
+ brerror.save()
+ br.state = state
+ br.save()
+ do_cleanup = True
+ # Do cleanup
+ if do_cleanup:
+ self.cleanup()
+ except Exception as e:
+ logger.error("runbuilds: Error in check_dead_builds %s" % e)
+
def handle(self, **options):
pidfile_path = os.path.join(os.environ.get("BUILDDIR", "."),
".runbuilds.pid")
@@ -187,10 +258,18 @@ class Command(BaseCommand):
with open(pidfile_path, 'w') as pidfile:
pidfile.write("%s" % os.getpid())
+ # Clean up any stale/failed builds from previous Toaster run
self.runbuild()
signal.signal(signal.SIGUSR1, lambda sig, frame: None)
while True:
- signal.pause()
- self.runbuild()
+ sigset = signal.sigtimedwait([signal.SIGUSR1], 5)
+ if sigset:
+ for sig in sigset:
+ # Consume each captured pending event
+ self.runbuild()
+ else:
+ # Check for build exceptions
+ self.check_dead_builds()
+
diff --git a/lib/toaster/bldcontrol/migrations/0008_models_bigautofield.py b/lib/toaster/bldcontrol/migrations/0008_models_bigautofield.py
new file mode 100644
index 000000000..45b477d02
--- /dev/null
+++ b/lib/toaster/bldcontrol/migrations/0008_models_bigautofield.py
@@ -0,0 +1,48 @@
+# Generated by Django 3.2.12 on 2022-03-06 03:28
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('bldcontrol', '0007_brlayers_optional_gitinfo'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='brbitbake',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='brerror',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='brlayer',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='brtarget',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='brvariable',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='buildenvironment',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='buildrequest',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ ]
diff --git a/lib/toaster/bldcontrol/models.py b/lib/toaster/bldcontrol/models.py
index c2f302da2..42750e718 100644
--- a/lib/toaster/bldcontrol/models.py
+++ b/lib/toaster/bldcontrol/models.py
@@ -4,7 +4,7 @@
from __future__ import unicode_literals
from django.db import models
-from django.utils.encoding import force_text
+from django.utils.encoding import force_str
from orm.models import Project, Build, Layer_Version
import logging
@@ -124,7 +124,7 @@ class BuildRequest(models.Model):
return self.brvariable_set.get(name="MACHINE").value
def __str__(self):
- return force_text('%s %s' % (self.project, self.get_state_display()))
+ return force_str('%s %s' % (self.project, self.get_state_display()))
# These tables specify the settings for running an actual build.
# They MUST be kept in sync with the tables in orm.models.Project*
diff --git a/lib/toaster/logs/.gitignore b/lib/toaster/logs/.gitignore
new file mode 100644
index 000000000..e5ebf25a4
--- /dev/null
+++ b/lib/toaster/logs/.gitignore
@@ -0,0 +1 @@
+*.log*
diff --git a/lib/toaster/manage.py b/lib/toaster/manage.py
index ae32619d1..f8de49c26 100755
--- a/lib/toaster/manage.py
+++ b/lib/toaster/manage.py
@@ -1,5 +1,7 @@
#!/usr/bin/env python3
#
+# Copyright BitBake Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/lib/toaster/orm/fixtures/README b/lib/toaster/orm/fixtures/README
index 1b1c660aa..7cd745e26 100644
--- a/lib/toaster/orm/fixtures/README
+++ b/lib/toaster/orm/fixtures/README
@@ -27,4 +27,4 @@ Data can be provided in XML, JSON and if installed YAML formats.
Use the django management command manage.py loaddata <your fixture file>
For further information see the Django command documentation at:
-https://docs.djangoproject.com/en/1.8/ref/django-admin/#django-admin-loaddata
+https://docs.djangoproject.com/en/3.2/ref/django-admin/#django-admin-loaddata
diff --git a/lib/toaster/orm/fixtures/gen_fixtures.py b/lib/toaster/orm/fixtures/gen_fixtures.py
new file mode 100755
index 000000000..71afe3914
--- /dev/null
+++ b/lib/toaster/orm/fixtures/gen_fixtures.py
@@ -0,0 +1,447 @@
+#!/usr/bin/env python3
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Generate Toaster Fixtures for 'poky.xml' and 'oe-core.xml'
+#
+# Copyright (C) 2022 Wind River Systems
+# SPDX-License-Identifier: GPL-2.0-only
+#
+# Edit the 'current_releases' table for each new release cycle
+#
+# Usage: ./get_fixtures all
+#
+
+import os
+import sys
+import argparse
+
+verbose = False
+
+####################################
+# Releases
+#
+# https://wiki.yoctoproject.org/wiki/Releases
+#
+# NOTE: for the current releases table, it helps to keep continuing releases
+# in the same table positions since this minimizes the patch diff for review.
+# The order of the table does not matter since Toaster presents them sorted.
+#
+# Traditionally, the two most current releases are included in addition to the
+# 'master' branch and the local installation's 'HEAD'.
+# It is also policy to include all active LTS releases.
+#
+
+# [Codename, Yocto Project Version, Release Date, Current Version, Support Level, Poky Version, BitBake branch]
+current_releases = [
+ # Release slot #1
+ ['Kirkstone','4.0','April 2022','4.0.8 (March 2023)','Stable - Long Term Support (until Apr. 2024)','','2.0'],
+ # Release slot #2 'local'
+ ['HEAD','HEAD','','Local Yocto Project','HEAD','','HEAD'],
+ # Release slot #3 'master'
+ ['Master','master','','Yocto Project master','master','','master'],
+ # Release slot #4
+ ['Mickledore','4.2','April 2023','4.2.0 (April 2023)','Support for 7 months (until October 2023)','','2.4'],
+# ['Langdale','4.1','October 2022','4.1.2 (January 2023)','Support for 7 months (until May 2023)','','2.2'],
+# ['Honister','3.4','October 2021','3.4.2 (February 2022)','Support for 7 months (until May 2022)','26.0','1.52'],
+# ['Hardknott','3.3','April 2021','3.3.5 (March 2022)','Stable - Support for 13 months (until Apr. 2022)','25.0','1.50'],
+# ['Gatesgarth','3.2','Oct 2020','3.2.4 (May 2021)','EOL','24.0','1.48'],
+ # Optional Release slot #5
+ ['Dunfell','3.1','April 2020','3.1.23 (February 2023)','Stable - Long Term Support (until Apr. 2024)','23.0','1.46'],
+]
+
+default_poky_layers = [
+ 'openembedded-core',
+ 'meta-poky',
+ 'meta-yocto-bsp',
+]
+
+default_oe_core_layers = [
+ 'openembedded-core',
+]
+
+####################################
+# Templates
+
+prolog_template = '''\
+<?xml version="1.0" encoding="utf-8"?>
+<django-objects version="1.0">
+ <!-- Set the project default value for DISTRO -->
+ <object model="orm.toastersetting" pk="1">
+ <field type="CharField" name="name">DEFCONF_DISTRO</field>
+ <field type="CharField" name="value">{{distro}}</field>
+ </object>
+'''
+
+#<!-- Bitbake versions which correspond to the metadata release -->')
+bitbakeversion_poky_template = '''\
+ <object model="orm.bitbakeversion" pk="{{bitbake_id}}">
+ <field type="CharField" name="name">{{name}}</field>
+ <field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field>
+ <field type="CharField" name="branch">{{branch}}</field>
+ <field type="CharField" name="dirpath">bitbake</field>
+ </object>
+'''
+bitbakeversion_oecore_template = '''\
+ <object model="orm.bitbakeversion" pk="{{bitbake_id}}">
+ <field type="CharField" name="name">{{name}}</field>
+ <field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field>
+ <field type="CharField" name="branch">{{bitbakeversion}}</field>
+ </object>
+'''
+
+# <!-- Releases available -->
+releases_available_template = '''\
+ <object model="orm.release" pk="{{ra_count}}">
+ <field type="CharField" name="name">{{name}}</field>
+ <field type="CharField" name="description">{{description}}</field>
+ <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">{{ra_count}}</field>
+ <field type="CharField" name="branch_name">{{release}}</field>
+ <field type="TextField" name="helptext">Toaster will run your builds {{help_source}}.</field>
+ </object>
+'''
+
+# <!-- Default project layers for each release -->
+default_layers_template = '''\
+ <object model="orm.releasedefaultlayer" pk="{{rdl_count}}">
+ <field rel="ManyToOneRel" to="orm.release" name="release">{{release_id}}</field>
+ <field type="CharField" name="layer_name">{{layer}}</field>
+ </object>
+'''
+
+default_layers_preface = '''\
+ <!-- Default layers provided by poky
+ openembedded-core
+ meta-poky
+ meta-yocto-bsp
+ -->
+'''
+
+layer_poky_template = '''\
+ <object model="orm.layer" pk="{{layer_id}}">
+ <field type="CharField" name="name">{{layer}}</field>
+ <field type="CharField" name="layer_index_url"></field>
+ <field type="CharField" name="vcs_url">{{vcs_url}}</field>
+ <field type="CharField" name="vcs_web_url">{{vcs_web_url}}</field>
+ <field type="CharField" name="vcs_web_tree_base_url">{{vcs_web_tree_base_url}}</field>
+ <field type="CharField" name="vcs_web_file_base_url">{{vcs_web_file_base_url}}</field>
+ </object>
+'''
+
+layer_oe_core_template = '''\
+ <object model="orm.layer" pk="{{layer_id}}">
+ <field type="CharField" name="name">{{layer}}</field>
+ <field type="CharField" name="vcs_url">{{vcs_url}}</field>
+ <field type="CharField" name="vcs_web_url">{{vcs_web_url}}</field>
+ <field type="CharField" name="vcs_web_tree_base_url">{{vcs_web_tree_base_url}}</field>
+ <field type="CharField" name="vcs_web_file_base_url">{{vcs_web_file_base_url}}</field>
+ </object>
+'''
+
+layer_version_template = '''\
+ <object model="orm.layer_version" pk="{{lv_count}}">
+ <field rel="ManyToOneRel" to="orm.layer" name="layer">{{layer_id}}</field>
+ <field type="IntegerField" name="layer_source">0</field>
+ <field rel="ManyToOneRel" to="orm.release" name="release">{{release_id}}</field>
+ <field type="CharField" name="branch">{{branch}}</field>
+ <field type="CharField" name="dirpath">{{dirpath}}</field>
+ </object>
+'''
+
+layer_version_HEAD_template = '''\
+ <object model="orm.layer_version" pk="{{lv_count}}">
+ <field rel="ManyToOneRel" to="orm.layer" name="layer">{{layer_id}}</field>
+ <field type="IntegerField" name="layer_source">0</field>
+ <field rel="ManyToOneRel" to="orm.release" name="release">{{release_id}}</field>
+ <field type="CharField" name="branch">{{branch}}</field>
+ <field type="CharField" name="commit">{{commit}}</field>
+ <field type="CharField" name="dirpath">{{dirpath}}</field>
+ </object>
+'''
+
+layer_version_oe_core_template = '''\
+ <object model="orm.layer_version" pk="1">
+ <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field>
+ <field rel="ManyToOneRel" to="orm.release" name="release">2</field>
+ <field type="CharField" name="local_path">OE-CORE-LAYER-DIR</field>
+ <field type="CharField" name="branch">HEAD</field>
+ <field type="CharField" name="dirpath">meta</field>
+ <field type="IntegerField" name="layer_source">0</field>
+ </object>
+'''
+
+epilog_template = '''\
+</django-objects>
+'''
+
+#################################
+# Helper Routines
+#
+
+def print_str(str,fd):
+ # Avoid extra newline at end
+ if str and (str[-1] == '\n'):
+ str = str[0:-1]
+ print(str,file=fd)
+
+def print_template(template,params,fd):
+ for line in template.split('\n'):
+ p = line.find('{{')
+ while p > 0:
+ q = line.find('}}')
+ key = line[p+2:q]
+ if key in params:
+ line = line[0:p] + params[key] + line[q+2:]
+ else:
+ line = line[0:p] + '?' + key + '?' + line[q+2:]
+ p = line.find('{{')
+ if line:
+ print(line,file=fd)
+
+#################################
+# Generate poky.xml
+#
+
+def generate_poky():
+ fd = open('poky.xml','w')
+
+ params = {}
+ params['distro'] = 'poky'
+ print_template(prolog_template,params,fd)
+ print_str('',fd)
+
+ print_str(' <!-- Bitbake versions which correspond to the metadata release -->',fd)
+ for i,release in enumerate(current_releases):
+ params = {}
+ params['release'] = release[0]
+ params['Release'] = release[0]
+ params['release_version'] = release[1]
+ if not (params['release'] in ('HEAD')): # 'master',
+ params['release'] = params['release'][0].lower() + params['release'][1:]
+ params['name'] = params['release']
+ params['bitbake_id'] = str(i+1)
+ params['branch'] = params['release']
+ print_template(bitbakeversion_poky_template,params,fd)
+ print_str('',fd)
+
+ print_str('',fd)
+ print_str(' <!-- Releases available -->',fd)
+ for i,release in enumerate(current_releases):
+ params = {}
+ params['release'] = release[0]
+ params['Release'] = release[0]
+ params['release_version'] = release[1]
+ if not (params['release'] in ('HEAD')): #'master',
+ params['release'] = params['release'][0].lower() + params['release'][1:]
+ params['h_release'] = '?h={{release}}'
+ params['name'] = params['release']
+ params['ra_count'] = str(i+1)
+ params['branch'] = params['release']
+
+ if 'HEAD' == params['release']:
+ params['help_source'] = 'with the version of the Yocto Project you have cloned or downloaded to your computer'
+ params['description'] = 'Local Yocto Project'
+ params['name'] = 'local'
+ else:
+ params['help_source'] = 'using the tip of the &lt;a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/{{h_release}}"&gt;Yocto Project {{Release}} branch&lt;/a&gt;'
+ params['description'] = 'Yocto Project {{release_version}} "{{Release}}"'
+ if 'master' == params['release']:
+ params['h_release'] = ''
+ params['description'] = 'Yocto Project master'
+
+ print_template(releases_available_template,params,fd)
+ print_str('',fd)
+
+ print_str(' <!-- Default project layers for each release -->',fd)
+ rdl_count = 1
+ for i,release in enumerate(current_releases):
+ for j,layer in enumerate(default_poky_layers):
+ params = {}
+ params['layer'] = layer
+ params['release'] = release[0]
+ params['Release'] = release[0]
+ params['release_version'] = release[1]
+ if not (params['release'] in ('master','HEAD')):
+ params['release'] = params['release'][0].lower() + params['release'][1:]
+ params['release_id'] = str(i+1)
+ params['rdl_count'] = str(rdl_count)
+ params['branch'] = params['release']
+ print_template(default_layers_template,params,fd)
+ rdl_count += 1
+ print_str('',fd)
+
+ print_str(default_layers_preface,fd)
+ lv_count = 1
+ for i,layer in enumerate(default_poky_layers):
+ params = {}
+ params['layer'] = layer
+ params['layer_id'] = str(i+1)
+ params['vcs_url'] = 'git://git.yoctoproject.org/poky'
+ params['vcs_web_url'] = 'https://git.yoctoproject.org/cgit/cgit.cgi/poky'
+ params['vcs_web_tree_base_url'] = 'https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%'
+ params['vcs_web_file_base_url'] = 'https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%'
+
+ if i:
+ print_str('',fd)
+ print_template(layer_poky_template,params,fd)
+ for j,release in enumerate(current_releases):
+ params['release'] = release[0]
+ params['Release'] = release[0]
+ params['release_version'] = release[1]
+ if not (params['release'] in ('master','HEAD')):
+ params['release'] = params['release'][0].lower() + params['release'][1:]
+ params['release_id'] = str(j+1)
+ params['lv_count'] = str(lv_count)
+ params['branch'] = params['release']
+ params['commit'] = params['release']
+
+ params['dirpath'] = params['layer']
+ if params['layer'] in ('openembedded-core'): #'openembedded-core',
+ params['dirpath'] = 'meta'
+
+ if 'HEAD' == params['release']:
+ print_template(layer_version_HEAD_template,params,fd)
+ else:
+ print_template(layer_version_template,params,fd)
+ lv_count += 1
+
+ print_str(epilog_template,fd)
+ fd.close()
+
+#################################
+# Generate oe-core.xml
+#
+
+def generate_oe_core():
+ fd = open('oe-core.xml','w')
+
+ params = {}
+ params['distro'] = 'nodistro'
+ print_template(prolog_template,params,fd)
+ print_str('',fd)
+
+ print_str(' <!-- Bitbake versions which correspond to the metadata release -->',fd)
+ for i,release in enumerate(current_releases):
+ params = {}
+ params['release'] = release[0]
+ params['Release'] = release[0]
+ params['bitbakeversion'] = release[6]
+ params['release_version'] = release[1]
+ if not (params['release'] in ('HEAD')): # 'master',
+ params['release'] = params['release'][0].lower() + params['release'][1:]
+ params['name'] = params['release']
+ params['bitbake_id'] = str(i+1)
+ params['branch'] = params['release']
+ print_template(bitbakeversion_oecore_template,params,fd)
+ print_str('',fd)
+
+ print_str(' <!-- Releases available -->',fd)
+ for i,release in enumerate(current_releases):
+ params = {}
+ params['release'] = release[0]
+ params['Release'] = release[0]
+ params['release_version'] = release[1]
+ if not (params['release'] in ('HEAD')): #'master',
+ params['release'] = params['release'][0].lower() + params['release'][1:]
+ params['h_release'] = '?h={{release}}'
+ params['name'] = params['release']
+ params['ra_count'] = str(i+1)
+ params['branch'] = params['release']
+
+ if 'HEAD' == params['release']:
+ params['help_source'] = 'with the version of OpenEmbedded that you have cloned or downloaded to your computer'
+ params['description'] = 'Local Openembedded'
+ params['name'] = 'local'
+ else:
+ params['help_source'] = 'using the tip of the &lt;a href=\\"https://cgit.openembedded.org/openembedded-core/log/{{h_release}}\\"&gt;OpenEmbedded {{Release}}&lt;/a&gt; branch'
+ params['description'] = 'Openembedded {{Release}}'
+ if 'master' == params['release']:
+ params['h_release'] = ''
+ params['description'] = 'OpenEmbedded core master'
+ params['Release'] = params['release']
+
+ print_template(releases_available_template,params,fd)
+ print_str('',fd)
+
+ print_str(' <!-- Default layers for each release -->',fd)
+ rdl_count = 1
+ for i,release in enumerate(current_releases):
+ for j,layer in enumerate(default_oe_core_layers):
+ params = {}
+ params['layer'] = layer
+ params['release'] = release[0]
+ params['Release'] = release[0]
+ params['release_version'] = release[1]
+ if not (params['release'] in ('master','HEAD')):
+ params['release'] = params['release'][0].lower() + params['release'][1:]
+ params['release_id'] = str(i+1)
+ params['rdl_count'] = str(rdl_count)
+ params['branch'] = params['release']
+ print_template(default_layers_template,params,fd)
+ rdl_count += 1
+ print_str('',fd)
+
+ print_str('',fd)
+ print_str(' <!-- Layer for the Local release -->',fd)
+ lv_count = 1
+ for i,layer in enumerate(default_oe_core_layers):
+ params = {}
+ params['layer'] = layer
+ params['layer_id'] = str(i+1)
+ params['vcs_url'] = 'git://git.openembedded.org/openembedded-core'
+ params['vcs_web_url'] = 'https://cgit.openembedded.org/openembedded-core'
+ params['vcs_web_tree_base_url'] = 'https://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%'
+ params['vcs_web_file_base_url'] = 'https://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%'
+ if i:
+ print_str('',fd)
+ print_template(layer_oe_core_template,params,fd)
+
+ print_template(layer_version_oe_core_template,params,fd)
+ print_str('',fd)
+
+ print_str(epilog_template,fd)
+ fd.close()
+
+#################################
+# Help
+#
+
+def list_releases():
+ print("Release ReleaseVer BitbakeVer Support Level")
+ print("========== =========== ========== ==============================================")
+ for release in current_releases:
+ print("%10s %10s %11s %s" % (release[0],release[1],release[6],release[4]))
+
+#################################
+# main
+#
+
+def main(argv):
+ global verbose
+
+ parser = argparse.ArgumentParser(description='gen_fixtures.py: table generate the fixture files')
+ parser.add_argument('--poky', '-p', action='store_const', const='poky', dest='command', help='Generate the poky.xml file')
+ parser.add_argument('--oe-core', '-o', action='store_const', const='oe_core', dest='command', help='Generate the oe-core.xml file')
+ parser.add_argument('--all', '-a', action='store_const', const='all', dest='command', help='Generate all fixture files')
+ parser.add_argument('--list', '-l', action='store_const', const='list', dest='command', help='List the release table')
+ parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Enable verbose debugging output')
+ args = parser.parse_args()
+
+ verbose = args.verbose
+ if 'poky' == args.command:
+ generate_poky()
+ elif 'oe_core' == args.command:
+ generate_oe_core()
+ elif 'all' == args.command:
+ generate_poky()
+ generate_oe_core()
+ elif 'all' == args.command:
+ list_releases()
+ elif 'list' == args.command:
+ list_releases()
+
+ else:
+ print("No command for 'gen_fixtures.py' selected")
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/lib/toaster/orm/fixtures/oe-core.xml b/lib/toaster/orm/fixtures/oe-core.xml
index 026d94869..950f2a98a 100644
--- a/lib/toaster/orm/fixtures/oe-core.xml
+++ b/lib/toaster/orm/fixtures/oe-core.xml
@@ -8,9 +8,9 @@
<!-- Bitbake versions which correspond to the metadata release -->
<object model="orm.bitbakeversion" pk="1">
- <field type="CharField" name="name">dunfell</field>
+ <field type="CharField" name="name">kirkstone</field>
<field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field>
- <field type="CharField" name="branch">1.46</field>
+ <field type="CharField" name="branch">2.0</field>
</object>
<object model="orm.bitbakeversion" pk="2">
<field type="CharField" name="name">HEAD</field>
@@ -23,18 +23,23 @@
<field type="CharField" name="branch">master</field>
</object>
<object model="orm.bitbakeversion" pk="4">
- <field type="CharField" name="name">gatesgarth</field>
+ <field type="CharField" name="name">mickledore</field>
+ <field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field>
+ <field type="CharField" name="branch">2.4</field>
+ </object>
+ <object model="orm.bitbakeversion" pk="5">
+ <field type="CharField" name="name">dunfell</field>
<field type="CharField" name="giturl">git://git.openembedded.org/bitbake</field>
- <field type="CharField" name="branch">1.48</field>
+ <field type="CharField" name="branch">1.46</field>
</object>
<!-- Releases available -->
<object model="orm.release" pk="1">
- <field type="CharField" name="name">dunfell</field>
- <field type="CharField" name="description">Openembedded Dunfell</field>
+ <field type="CharField" name="name">kirkstone</field>
+ <field type="CharField" name="description">Openembedded Kirkstone</field>
<field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">1</field>
- <field type="CharField" name="branch_name">dunfell</field>
- <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"http://cgit.openembedded.org/openembedded-core/log/?h=dunfell\"&gt;OpenEmbedded Dunfell&lt;/a&gt; branch.</field>
+ <field type="CharField" name="branch_name">kirkstone</field>
+ <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=kirkstone\"&gt;OpenEmbedded Kirkstone&lt;/a&gt; branch.</field>
</object>
<object model="orm.release" pk="2">
<field type="CharField" name="name">local</field>
@@ -48,14 +53,21 @@
<field type="CharField" name="description">OpenEmbedded core master</field>
<field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">3</field>
<field type="CharField" name="branch_name">master</field>
- <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"http://cgit.openembedded.org/openembedded-core/log/\"&gt;OpenEmbedded master&lt;/a&gt; branch.</field>
+ <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"https://cgit.openembedded.org/openembedded-core/log/\"&gt;OpenEmbedded master&lt;/a&gt; branch.</field>
</object>
<object model="orm.release" pk="4">
- <field type="CharField" name="name">gatesgarth</field>
- <field type="CharField" name="description">Openembedded Gatesgarth</field>
+ <field type="CharField" name="name">mickledore</field>
+ <field type="CharField" name="description">Openembedded Mickledore</field>
<field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">4</field>
- <field type="CharField" name="branch_name">gatesgarth</field>
- <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"http://cgit.openembedded.org/openembedded-core/log/?h=gatesgarth\"&gt;OpenEmbedded Gatesgarth&lt;/a&gt; branch.</field>
+ <field type="CharField" name="branch_name">mickledore</field>
+ <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=mickledore\"&gt;OpenEmbedded Mickledore&lt;/a&gt; branch.</field>
+ </object>
+ <object model="orm.release" pk="5">
+ <field type="CharField" name="name">dunfell</field>
+ <field type="CharField" name="description">Openembedded Dunfell</field>
+ <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">5</field>
+ <field type="CharField" name="branch_name">dunfell</field>
+ <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href=\"https://cgit.openembedded.org/openembedded-core/log/?h=dunfell\"&gt;OpenEmbedded Dunfell&lt;/a&gt; branch.</field>
</object>
<!-- Default layers for each release -->
@@ -75,15 +87,19 @@
<field rel="ManyToOneRel" to="orm.release" name="release">4</field>
<field type="CharField" name="layer_name">openembedded-core</field>
</object>
+ <object model="orm.releasedefaultlayer" pk="5">
+ <field rel="ManyToOneRel" to="orm.release" name="release">5</field>
+ <field type="CharField" name="layer_name">openembedded-core</field>
+ </object>
<!-- Layer for the Local release -->
<object model="orm.layer" pk="1">
<field type="CharField" name="name">openembedded-core</field>
<field type="CharField" name="vcs_url">git://git.openembedded.org/openembedded-core</field>
- <field type="CharField" name="vcs_web_url">http://cgit.openembedded.org/openembedded-core</field>
- <field type="CharField" name="vcs_web_tree_base_url">http://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%</field>
- <field type="CharField" name="vcs_web_file_base_url">http://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%</field>
+ <field type="CharField" name="vcs_web_url">https://cgit.openembedded.org/openembedded-core</field>
+ <field type="CharField" name="vcs_web_tree_base_url">https://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%</field>
+ <field type="CharField" name="vcs_web_file_base_url">https://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch%</field>
</object>
<object model="orm.layer_version" pk="1">
<field rel="ManyToOneRel" to="orm.layer" name="layer">1</field>
diff --git a/lib/toaster/orm/fixtures/poky.xml b/lib/toaster/orm/fixtures/poky.xml
index a468a54c4..121e52fd4 100644
--- a/lib/toaster/orm/fixtures/poky.xml
+++ b/lib/toaster/orm/fixtures/poky.xml
@@ -8,9 +8,9 @@
<!-- Bitbake versions which correspond to the metadata release -->
<object model="orm.bitbakeversion" pk="1">
- <field type="CharField" name="name">dunfell</field>
+ <field type="CharField" name="name">kirkstone</field>
<field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field>
- <field type="CharField" name="branch">dunfell</field>
+ <field type="CharField" name="branch">kirkstone</field>
<field type="CharField" name="dirpath">bitbake</field>
</object>
<object model="orm.bitbakeversion" pk="2">
@@ -26,20 +26,26 @@
<field type="CharField" name="dirpath">bitbake</field>
</object>
<object model="orm.bitbakeversion" pk="4">
- <field type="CharField" name="name">gatesgarth</field>
+ <field type="CharField" name="name">mickledore</field>
<field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field>
- <field type="CharField" name="branch">gatesgarth</field>
+ <field type="CharField" name="branch">mickledore</field>
+ <field type="CharField" name="dirpath">bitbake</field>
+ </object>
+ <object model="orm.bitbakeversion" pk="5">
+ <field type="CharField" name="name">dunfell</field>
+ <field type="CharField" name="giturl">git://git.yoctoproject.org/poky</field>
+ <field type="CharField" name="branch">dunfell</field>
<field type="CharField" name="dirpath">bitbake</field>
</object>
<!-- Releases available -->
<object model="orm.release" pk="1">
- <field type="CharField" name="name">dunfell</field>
- <field type="CharField" name="description">Yocto Project 3.1 "Dunfell"</field>
+ <field type="CharField" name="name">kirkstone</field>
+ <field type="CharField" name="description">Yocto Project 4.0 "Kirkstone"</field>
<field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">1</field>
- <field type="CharField" name="branch_name">dunfell</field>
- <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=dunfell"&gt;Yocto Project Dunfell branch&lt;/a&gt;.</field>
+ <field type="CharField" name="branch_name">kirkstone</field>
+ <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=kirkstone"&gt;Yocto Project Kirkstone branch&lt;/a&gt;.</field>
</object>
<object model="orm.release" pk="2">
<field type="CharField" name="name">local</field>
@@ -53,14 +59,21 @@
<field type="CharField" name="description">Yocto Project master</field>
<field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">3</field>
<field type="CharField" name="branch_name">master</field>
- <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/"&gt;Yocto Project Master branch&lt;/a&gt;.</field>
+ <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/"&gt;Yocto Project Master branch&lt;/a&gt;.</field>
</object>
<object model="orm.release" pk="4">
- <field type="CharField" name="name">gatesgarth</field>
- <field type="CharField" name="description">Yocto Project 3.2 "Gatesgarth"</field>
+ <field type="CharField" name="name">mickledore</field>
+ <field type="CharField" name="description">Yocto Project 4.2 "Mickledore"</field>
<field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">4</field>
- <field type="CharField" name="branch_name">gatesgarth</field>
- <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=gatesgarth"&gt;Yocto Project Gatesgarth branch&lt;/a&gt;.</field>
+ <field type="CharField" name="branch_name">mickledore</field>
+ <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=mickledore"&gt;Yocto Project Mickledore branch&lt;/a&gt;.</field>
+ </object>
+ <object model="orm.release" pk="5">
+ <field type="CharField" name="name">dunfell</field>
+ <field type="CharField" name="description">Yocto Project 3.1 "Dunfell"</field>
+ <field rel="ManyToOneRel" to="orm.bitbakeversion" name="bitbake_version">5</field>
+ <field type="CharField" name="branch_name">dunfell</field>
+ <field type="TextField" name="helptext">Toaster will run your builds using the tip of the &lt;a href="https://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=dunfell"&gt;Yocto Project Dunfell branch&lt;/a&gt;.</field>
</object>
<!-- Default project layers for each release -->
@@ -112,6 +125,18 @@
<field rel="ManyToOneRel" to="orm.release" name="release">4</field>
<field type="CharField" name="layer_name">meta-yocto-bsp</field>
</object>
+ <object model="orm.releasedefaultlayer" pk="13">
+ <field rel="ManyToOneRel" to="orm.release" name="release">5</field>
+ <field type="CharField" name="layer_name">openembedded-core</field>
+ </object>
+ <object model="orm.releasedefaultlayer" pk="14">
+ <field rel="ManyToOneRel" to="orm.release" name="release">5</field>
+ <field type="CharField" name="layer_name">meta-poky</field>
+ </object>
+ <object model="orm.releasedefaultlayer" pk="15">
+ <field rel="ManyToOneRel" to="orm.release" name="release">5</field>
+ <field type="CharField" name="layer_name">meta-yocto-bsp</field>
+ </object>
<!-- Default layers provided by poky
openembedded-core
@@ -122,15 +147,15 @@
<field type="CharField" name="name">openembedded-core</field>
<field type="CharField" name="layer_index_url"></field>
<field type="CharField" name="vcs_url">git://git.yoctoproject.org/poky</field>
- <field type="CharField" name="vcs_web_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky</field>
- <field type="CharField" name="vcs_web_tree_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
- <field type="CharField" name="vcs_web_file_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
+ <field type="CharField" name="vcs_web_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky</field>
+ <field type="CharField" name="vcs_web_tree_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
+ <field type="CharField" name="vcs_web_file_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
</object>
<object model="orm.layer_version" pk="1">
<field rel="ManyToOneRel" to="orm.layer" name="layer">1</field>
<field type="IntegerField" name="layer_source">0</field>
<field rel="ManyToOneRel" to="orm.release" name="release">1</field>
- <field type="CharField" name="branch">dunfell</field>
+ <field type="CharField" name="branch">kirkstone</field>
<field type="CharField" name="dirpath">meta</field>
</object>
<object model="orm.layer_version" pk="2">
@@ -152,7 +177,14 @@
<field rel="ManyToOneRel" to="orm.layer" name="layer">1</field>
<field type="IntegerField" name="layer_source">0</field>
<field rel="ManyToOneRel" to="orm.release" name="release">4</field>
- <field type="CharField" name="branch">gatesgarth</field>
+ <field type="CharField" name="branch">mickledore</field>
+ <field type="CharField" name="dirpath">meta</field>
+ </object>
+ <object model="orm.layer_version" pk="5">
+ <field rel="ManyToOneRel" to="orm.layer" name="layer">1</field>
+ <field type="IntegerField" name="layer_source">0</field>
+ <field rel="ManyToOneRel" to="orm.release" name="release">5</field>
+ <field type="CharField" name="branch">dunfell</field>
<field type="CharField" name="dirpath">meta</field>
</object>
@@ -160,18 +192,18 @@
<field type="CharField" name="name">meta-poky</field>
<field type="CharField" name="layer_index_url"></field>
<field type="CharField" name="vcs_url">git://git.yoctoproject.org/poky</field>
- <field type="CharField" name="vcs_web_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky</field>
- <field type="CharField" name="vcs_web_tree_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
- <field type="CharField" name="vcs_web_file_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
+ <field type="CharField" name="vcs_web_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky</field>
+ <field type="CharField" name="vcs_web_tree_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
+ <field type="CharField" name="vcs_web_file_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
</object>
- <object model="orm.layer_version" pk="5">
+ <object model="orm.layer_version" pk="6">
<field rel="ManyToOneRel" to="orm.layer" name="layer">2</field>
<field type="IntegerField" name="layer_source">0</field>
<field rel="ManyToOneRel" to="orm.release" name="release">1</field>
- <field type="CharField" name="branch">dunfell</field>
+ <field type="CharField" name="branch">kirkstone</field>
<field type="CharField" name="dirpath">meta-poky</field>
</object>
- <object model="orm.layer_version" pk="6">
+ <object model="orm.layer_version" pk="7">
<field rel="ManyToOneRel" to="orm.layer" name="layer">2</field>
<field type="IntegerField" name="layer_source">0</field>
<field rel="ManyToOneRel" to="orm.release" name="release">2</field>
@@ -179,18 +211,25 @@
<field type="CharField" name="commit">HEAD</field>
<field type="CharField" name="dirpath">meta-poky</field>
</object>
- <object model="orm.layer_version" pk="7">
+ <object model="orm.layer_version" pk="8">
<field rel="ManyToOneRel" to="orm.layer" name="layer">2</field>
<field type="IntegerField" name="layer_source">0</field>
<field rel="ManyToOneRel" to="orm.release" name="release">3</field>
<field type="CharField" name="branch">master</field>
<field type="CharField" name="dirpath">meta-poky</field>
</object>
- <object model="orm.layer_version" pk="8">
+ <object model="orm.layer_version" pk="9">
<field rel="ManyToOneRel" to="orm.layer" name="layer">2</field>
<field type="IntegerField" name="layer_source">0</field>
<field rel="ManyToOneRel" to="orm.release" name="release">4</field>
- <field type="CharField" name="branch">gatesgarth</field>
+ <field type="CharField" name="branch">mickledore</field>
+ <field type="CharField" name="dirpath">meta-poky</field>
+ </object>
+ <object model="orm.layer_version" pk="10">
+ <field rel="ManyToOneRel" to="orm.layer" name="layer">2</field>
+ <field type="IntegerField" name="layer_source">0</field>
+ <field rel="ManyToOneRel" to="orm.release" name="release">5</field>
+ <field type="CharField" name="branch">dunfell</field>
<field type="CharField" name="dirpath">meta-poky</field>
</object>
@@ -198,18 +237,18 @@
<field type="CharField" name="name">meta-yocto-bsp</field>
<field type="CharField" name="layer_index_url"></field>
<field type="CharField" name="vcs_url">git://git.yoctoproject.org/poky</field>
- <field type="CharField" name="vcs_web_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky</field>
- <field type="CharField" name="vcs_web_tree_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
- <field type="CharField" name="vcs_web_file_base_url">http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
+ <field type="CharField" name="vcs_web_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky</field>
+ <field type="CharField" name="vcs_web_tree_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
+ <field type="CharField" name="vcs_web_file_base_url">https://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch%</field>
</object>
- <object model="orm.layer_version" pk="9">
+ <object model="orm.layer_version" pk="11">
<field rel="ManyToOneRel" to="orm.layer" name="layer">3</field>
<field type="IntegerField" name="layer_source">0</field>
<field rel="ManyToOneRel" to="orm.release" name="release">1</field>
- <field type="CharField" name="branch">dunfell</field>
+ <field type="CharField" name="branch">kirkstone</field>
<field type="CharField" name="dirpath">meta-yocto-bsp</field>
</object>
- <object model="orm.layer_version" pk="10">
+ <object model="orm.layer_version" pk="12">
<field rel="ManyToOneRel" to="orm.layer" name="layer">3</field>
<field type="IntegerField" name="layer_source">0</field>
<field rel="ManyToOneRel" to="orm.release" name="release">2</field>
@@ -217,18 +256,25 @@
<field type="CharField" name="commit">HEAD</field>
<field type="CharField" name="dirpath">meta-yocto-bsp</field>
</object>
- <object model="orm.layer_version" pk="11">
+ <object model="orm.layer_version" pk="13">
<field rel="ManyToOneRel" to="orm.layer" name="layer">3</field>
<field type="IntegerField" name="layer_source">0</field>
<field rel="ManyToOneRel" to="orm.release" name="release">3</field>
<field type="CharField" name="branch">master</field>
<field type="CharField" name="dirpath">meta-yocto-bsp</field>
</object>
- <object model="orm.layer_version" pk="12">
+ <object model="orm.layer_version" pk="14">
<field rel="ManyToOneRel" to="orm.layer" name="layer">3</field>
<field type="IntegerField" name="layer_source">0</field>
<field rel="ManyToOneRel" to="orm.release" name="release">4</field>
- <field type="CharField" name="branch">gatesgarth</field>
+ <field type="CharField" name="branch">mickledore</field>
+ <field type="CharField" name="dirpath">meta-yocto-bsp</field>
+ </object>
+ <object model="orm.layer_version" pk="15">
+ <field rel="ManyToOneRel" to="orm.layer" name="layer">3</field>
+ <field type="IntegerField" name="layer_source">0</field>
+ <field rel="ManyToOneRel" to="orm.release" name="release">5</field>
+ <field type="CharField" name="branch">dunfell</field>
<field type="CharField" name="dirpath">meta-yocto-bsp</field>
</object>
</django-objects>
diff --git a/lib/toaster/orm/fixtures/settings.xml b/lib/toaster/orm/fixtures/settings.xml
index 78c0fdca7..02c26a697 100644
--- a/lib/toaster/orm/fixtures/settings.xml
+++ b/lib/toaster/orm/fixtures/settings.xml
@@ -12,14 +12,14 @@
</object>
<object model="orm.toastersetting" pk="4">
<field type="CharField" name="name">DEFCONF_MACHINE</field>
- <field type="CharField" name="value">qemux86</field>
+ <field type="CharField" name="value">qemux86-64</field>
</object>
<object model="orm.toastersetting" pk="5">
<field type="CharField" name="name">DEFCONF_SSTATE_DIR</field>
<field type="CharField" name="value">${TOPDIR}/../sstate-cache</field>
</object>
<object model="orm.toastersetting" pk="6">
- <field type="CharField" name="name">DEFCONF_IMAGE_INSTALL_append</field>
+ <field type="CharField" name="name">DEFCONF_IMAGE_INSTALL:append</field>
<field type="CharField" name="value"></field>
</object>
<object model="orm.toastersetting" pk="7">
diff --git a/lib/toaster/orm/management/commands/lsupdates.py b/lib/toaster/orm/management/commands/lsupdates.py
index 2fbd7be3d..6d64830eb 100644
--- a/lib/toaster/orm/management/commands/lsupdates.py
+++ b/lib/toaster/orm/management/commands/lsupdates.py
@@ -21,7 +21,7 @@ import threading
import time
logger = logging.getLogger("toaster")
-DEFAULT_LAYERINDEX_SERVER = "http://layers.openembedded.org/layerindex/api/"
+DEFAULT_LAYERINDEX_SERVER = "https://layers.openembedded.org/layerindex/api/"
# Add path to bitbake modules for layerindexlib
# lib/toaster/orm/management/commands/lsupdates.py (abspath)
@@ -40,7 +40,7 @@ class Spinner(threading.Thread):
""" A simple progress spinner to indicate download/parsing is happening"""
def __init__(self, *args, **kwargs):
super(Spinner, self).__init__(*args, **kwargs)
- self.setDaemon(True)
+ self.daemon = True
self.signal = True
def run(self):
@@ -87,13 +87,13 @@ class Command(BaseCommand):
# update branches; only those that we already have names listed in the
# Releases table
- whitelist_branch_names = [rel.branch_name
- for rel in Release.objects.all()]
- if len(whitelist_branch_names) == 0:
+ allowed_branch_names = [rel.branch_name
+ for rel in Release.objects.all()]
+ if len(allowed_branch_names) == 0:
raise Exception("Failed to make list of branches to fetch")
logger.info("Fetching metadata for %s",
- " ".join(whitelist_branch_names))
+ " ".join(allowed_branch_names))
# We require a non-empty bb.data, but we can fake it with a dictionary
layerindex = layerindexlib.LayerIndex({"DUMMY" : "VALUE"})
@@ -101,8 +101,8 @@ class Command(BaseCommand):
http_progress = Spinner()
http_progress.start()
- if whitelist_branch_names:
- url_branches = ";branch=%s" % ','.join(whitelist_branch_names)
+ if allowed_branch_names:
+ url_branches = ";branch=%s" % ','.join(allowed_branch_names)
else:
url_branches = ""
layerindex.load_layerindex("%s%s" % (self.apiurl, url_branches))
diff --git a/lib/toaster/orm/migrations/0020_models_bigautofield.py b/lib/toaster/orm/migrations/0020_models_bigautofield.py
new file mode 100644
index 000000000..f19b5dddb
--- /dev/null
+++ b/lib/toaster/orm/migrations/0020_models_bigautofield.py
@@ -0,0 +1,173 @@
+# Generated by Django 3.2.12 on 2022-03-06 03:28
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('orm', '0019_django_2_2'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='bitbakeversion',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='build',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='distro',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='helptext',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='layer',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='layer_version',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='layerversiondependency',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='logmessage',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='machine',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='package',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='package_dependency',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='package_file',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='project',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='projectlayer',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='projecttarget',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='projectvariable',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='provides',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='recipe',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='recipe_dependency',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='release',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='releasedefaultlayer',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='target',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='target_file',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='target_image_file',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='target_installed_package',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='targetkernelfile',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='targetsdkfile',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='task',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='task_dependency',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='toastersetting',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='variable',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ migrations.AlterField(
+ model_name='variablehistory',
+ name='id',
+ field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+ ),
+ ]
diff --git a/lib/toaster/orm/migrations/0021_eventlogsimports.py b/lib/toaster/orm/migrations/0021_eventlogsimports.py
new file mode 100644
index 000000000..328eb5753
--- /dev/null
+++ b/lib/toaster/orm/migrations/0021_eventlogsimports.py
@@ -0,0 +1,22 @@
+# Generated by Django 4.2.5 on 2023-11-23 18:44
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('orm', '0020_models_bigautofield'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='EventLogsImports',
+ fields=[
+ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('name', models.CharField(max_length=255)),
+ ('imported', models.BooleanField(default=False)),
+ ('build_id', models.IntegerField(blank=True, null=True)),
+ ],
+ ),
+ ]
diff --git a/lib/toaster/orm/models.py b/lib/toaster/orm/models.py
index 7f7e922ad..19c968620 100644
--- a/lib/toaster/orm/models.py
+++ b/lib/toaster/orm/models.py
@@ -58,7 +58,6 @@ if 'sqlite' in settings.DATABASES['default']['ENGINE']:
return _base_insert(self, *args, **kwargs)
QuerySet._insert = _insert
- from django.utils import six
def _create_object_from_params(self, lookup, params):
"""
Tries to create an object using passed params.
@@ -108,7 +107,7 @@ class ToasterSetting(models.Model):
class ProjectManager(models.Manager):
- def create_project(self, name, release, existing_project=None):
+ def create_project(self, name, release, existing_project=None, imported=False):
if existing_project and (release is not None):
prj = existing_project
prj.bitbake_version = release.bitbake_version
@@ -135,19 +134,19 @@ class ProjectManager(models.Manager):
if release is None:
return prj
-
- for rdl in release.releasedefaultlayer_set.all():
- lv = Layer_Version.objects.filter(
- layer__name=rdl.layer_name,
- release=release).first()
-
- if lv:
- ProjectLayer.objects.create(project=prj,
- layercommit=lv,
- optional=False)
- else:
- logger.warning("Default project layer %s not found" %
- rdl.layer_name)
+ if not imported:
+ for rdl in release.releasedefaultlayer_set.all():
+ lv = Layer_Version.objects.filter(
+ layer__name=rdl.layer_name,
+ release=release).first()
+
+ if lv:
+ ProjectLayer.objects.create(project=prj,
+ layercommit=lv,
+ optional=False)
+ else:
+ logger.warning("Default project layer %s not found" %
+ rdl.layer_name)
return prj
@@ -1390,9 +1389,6 @@ class Machine(models.Model):
return "Machine " + self.name + "(" + self.description + ")"
-
-
-
class BitbakeVersion(models.Model):
name = models.CharField(max_length=32, unique = True)
@@ -1717,9 +1713,9 @@ class CustomImageRecipe(Recipe):
def generate_recipe_file_contents(self):
"""Generate the contents for the recipe file."""
- # If we have no excluded packages we only need to _append
+ # If we have no excluded packages we only need to :append
if self.excludes_set.count() == 0:
- packages_conf = "IMAGE_INSTALL_append = \" "
+ packages_conf = "IMAGE_INSTALL:append = \" "
for pkg in self.appends_set.all():
packages_conf += pkg.name+' '
@@ -1734,7 +1730,7 @@ class CustomImageRecipe(Recipe):
packages_conf += "\""
base_recipe_path = self.get_base_recipe_file()
- if base_recipe_path:
+ if base_recipe_path and os.path.isfile(base_recipe_path):
base_recipe = open(base_recipe_path, 'r').read()
else:
# Pass back None to trigger error message to user
@@ -1854,6 +1850,8 @@ def signal_runbuilds():
os.kill(int(pidf.read()), SIGUSR1)
except FileNotFoundError:
logger.info("Stopping existing runbuilds: no current process found")
+ except ProcessLookupError:
+ logger.warning("Stopping existing runbuilds: process lookup not found")
class Distro(models.Model):
search_allowed_fields = ["name", "description", "layer_version__layer__name"]
@@ -1870,6 +1868,15 @@ class Distro(models.Model):
def __unicode__(self):
return "Distro " + self.name + "(" + self.description + ")"
+class EventLogsImports(models.Model):
+ name = models.CharField(max_length=255)
+ imported = models.BooleanField(default=False)
+ build_id = models.IntegerField(blank=True, null=True)
+
+ def __str__(self):
+ return self.name
+
+
django.db.models.signals.post_save.connect(invalidate_cache)
django.db.models.signals.post_delete.connect(invalidate_cache)
django.db.models.signals.m2m_changed.connect(invalidate_cache)
diff --git a/lib/toaster/pytest.ini b/lib/toaster/pytest.ini
new file mode 100644
index 000000000..071c65fcd
--- /dev/null
+++ b/lib/toaster/pytest.ini
@@ -0,0 +1,16 @@
+# -- FILE: pytest.ini (or tox.ini)
+[pytest]
+# --create-db - force re creation of the test database
+# https://pytest-django.readthedocs.io/en/latest/database.html#create-db-force-re-creation-of-the-test-database
+
+# --html=report.html --self-contained-html
+# https://docs.pytest.org/en/latest/usage.html#creating-html-reports
+# https://pytest-html.readthedocs.io/en/latest/user_guide.html#creating-a-self-contained-report
+addopts = --create-db --html="Toaster Tests Report.html" --self-contained-html
+
+# Define environment variables using pytest-env
+# A pytest plugin that enables you to set environment variables in the pytest.ini file.
+# https://pypi.org/project/pytest-env/
+env =
+ TOASTER_BUILDSERVER=1
+ DJANGO_SETTINGS_MODULE=toastermain.settings_test
diff --git a/lib/toaster/tests/browser/selenium_helpers_base.py b/lib/toaster/tests/browser/selenium_helpers_base.py
index 644d45fe5..393be7549 100644
--- a/lib/toaster/tests/browser/selenium_helpers_base.py
+++ b/lib/toaster/tests/browser/selenium_helpers_base.py
@@ -19,11 +19,15 @@ import os
import time
import unittest
+import pytest
from selenium import webdriver
+from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
+from selenium.webdriver.common.by import By
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.common.exceptions import NoSuchElementException, \
- StaleElementReferenceException, TimeoutException
+ StaleElementReferenceException, TimeoutException, \
+ SessionNotCreatedException
def create_selenium_driver(cls,browser='chrome'):
# set default browser string based on env (if available)
@@ -32,9 +36,32 @@ def create_selenium_driver(cls,browser='chrome'):
browser = env_browser
if browser == 'chrome':
- return webdriver.Chrome(
- service_args=["--verbose", "--log-path=selenium.log"]
- )
+ options = webdriver.ChromeOptions()
+ options.add_argument('--headless')
+ options.add_argument('--disable-infobars')
+ options.add_argument('--disable-dev-shm-usage')
+ options.add_argument('--no-sandbox')
+ options.add_argument('--remote-debugging-port=9222')
+ try:
+ return webdriver.Chrome(options=options)
+ except SessionNotCreatedException as e:
+ exit_message = "Halting tests prematurely to avoid cascading errors."
+ # check if chrome / chromedriver exists
+ chrome_path = os.popen("find ~/.cache/selenium/chrome/ -name 'chrome' -type f -print -quit").read().strip()
+ if not chrome_path:
+ pytest.exit(f"Failed to install/find chrome.\n{exit_message}")
+ chromedriver_path = os.popen("find ~/.cache/selenium/chromedriver/ -name 'chromedriver' -type f -print -quit").read().strip()
+ if not chromedriver_path:
+ pytest.exit(f"Failed to install/find chromedriver.\n{exit_message}")
+ # check if depends on each are fulfilled
+ depends_chrome = os.popen(f"ldd {chrome_path} | grep 'not found'").read().strip()
+ if depends_chrome:
+ pytest.exit(f"Missing chrome dependencies.\n{depends_chrome}\n{exit_message}")
+ depends_chromedriver = os.popen(f"ldd {chromedriver_path} | grep 'not found'").read().strip()
+ if depends_chromedriver:
+ pytest.exit(f"Missing chromedriver dependencies.\n{depends_chromedriver}\n{exit_message}")
+ # print original error otherwise
+ pytest.exit(f"Failed to start chromedriver.\n{e}\n{exit_message}")
elif browser == 'firefox':
return webdriver.Firefox()
elif browser == 'marionette':
@@ -66,7 +93,9 @@ class Wait(WebDriverWait):
_TIMEOUT = 10
_POLL_FREQUENCY = 0.5
- def __init__(self, driver):
+ def __init__(self, driver, timeout=_TIMEOUT, poll=_POLL_FREQUENCY):
+ self._TIMEOUT = timeout
+ self._POLL_FREQUENCY = poll
super(Wait, self).__init__(driver, self._TIMEOUT, self._POLL_FREQUENCY)
def until(self, method, message=''):
@@ -138,6 +167,8 @@ class SeleniumTestCaseBase(unittest.TestCase):
""" Clean up webdriver driver """
cls.driver.quit()
+ # Allow driver resources to be properly freed before proceeding with further tests
+ time.sleep(5)
super(SeleniumTestCaseBase, cls).tearDownClass()
def get(self, url):
@@ -151,13 +182,20 @@ class SeleniumTestCaseBase(unittest.TestCase):
abs_url = '%s%s' % (self.live_server_url, url)
self.driver.get(abs_url)
+ try: # Ensure page is loaded before proceeding
+ self.wait_until_visible("#global-nav", poll=3)
+ except NoSuchElementException:
+ self.driver.implicitly_wait(3)
+ except TimeoutException:
+ self.driver.implicitly_wait(3)
+
def find(self, selector):
""" Find single element by CSS selector """
- return self.driver.find_element_by_css_selector(selector)
+ return self.driver.find_element(By.CSS_SELECTOR, selector)
def find_all(self, selector):
""" Find all elements matching CSS selector """
- return self.driver.find_elements_by_css_selector(selector)
+ return self.driver.find_elements(By.CSS_SELECTOR, selector)
def element_exists(self, selector):
"""
@@ -170,18 +208,34 @@ class SeleniumTestCaseBase(unittest.TestCase):
""" Return the element which currently has focus on the page """
return self.driver.switch_to.active_element
- def wait_until_present(self, selector):
+ def wait_until_present(self, selector, poll=0.5):
""" Wait until element matching CSS selector is on the page """
is_present = lambda driver: self.find(selector)
msg = 'An element matching "%s" should be on the page' % selector
- element = Wait(self.driver).until(is_present, msg)
+ element = Wait(self.driver, poll=poll).until(is_present, msg)
+ if poll > 2:
+ time.sleep(poll) # element need more delay to be present
return element
- def wait_until_visible(self, selector):
+ def wait_until_visible(self, selector, poll=1):
""" Wait until element matching CSS selector is visible on the page """
is_visible = lambda driver: self.find(selector).is_displayed()
msg = 'An element matching "%s" should be visible' % selector
- Wait(self.driver).until(is_visible, msg)
+ Wait(self.driver, poll=poll).until(is_visible, msg)
+ time.sleep(poll) # wait for visibility to settle
+ return self.find(selector)
+
+ def wait_until_clickable(self, selector, poll=1):
+ """ Wait until element matching CSS selector is visible on the page """
+ WebDriverWait(
+ self.driver,
+ Wait._TIMEOUT,
+ poll_frequency=poll
+ ).until(
+ EC.element_to_be_clickable((By.ID, selector.removeprefix('#')
+ )
+ )
+ )
return self.find(selector)
def wait_until_focused(self, selector):
diff --git a/lib/toaster/tests/browser/test_all_builds_page.py b/lib/toaster/tests/browser/test_all_builds_page.py
index 8423d3dab..b9356a034 100644
--- a/lib/toaster/tests/browser/test_all_builds_page.py
+++ b/lib/toaster/tests/browser/test_all_builds_page.py
@@ -7,13 +7,18 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import os
import re
from django.urls import reverse
+from selenium.webdriver.support.select import Select
from django.utils import timezone
+from bldcontrol.models import BuildRequest
from tests.browser.selenium_helpers import SeleniumTestCase
-from orm.models import BitbakeVersion, Release, Project, Build, Target
+from orm.models import BitbakeVersion, Layer, Layer_Version, Recipe, Release, Project, Build, Target, Task
+
+from selenium.webdriver.common.by import By
class TestAllBuildsPage(SeleniumTestCase):
@@ -23,7 +28,8 @@ class TestAllBuildsPage(SeleniumTestCase):
CLI_BUILDS_PROJECT_NAME = 'command line builds'
def setUp(self):
- bbv = BitbakeVersion.objects.create(name='bbv1', giturl='/tmp/',
+ builldir = os.environ.get('BUILDDIR', './')
+ bbv = BitbakeVersion.objects.create(name='bbv1', giturl=f'{builldir}/',
branch='master', dirpath='')
release = Release.objects.create(name='release1',
bitbake_version=bbv)
@@ -69,7 +75,7 @@ class TestAllBuildsPage(SeleniumTestCase):
'[data-role="data-recent-build-buildtime-field"]' % build.id
# because this loads via Ajax, wait for it to be visible
- self.wait_until_present(selector)
+ self.wait_until_visible(selector)
build_time_spans = self.find_all(selector)
@@ -79,7 +85,7 @@ class TestAllBuildsPage(SeleniumTestCase):
def _get_row_for_build(self, build):
""" Get the table row for the build from the all builds table """
- self.wait_until_present('#allbuildstable')
+ self.wait_until_visible('#allbuildstable')
rows = self.find_all('#allbuildstable tr')
@@ -91,7 +97,7 @@ class TestAllBuildsPage(SeleniumTestCase):
found_row = None
for row in rows:
- outcome_links = row.find_elements_by_css_selector(selector)
+ outcome_links = row.find_elements(By.CSS_SELECTOR, selector)
if len(outcome_links) == 1:
found_row = row
break
@@ -100,6 +106,66 @@ class TestAllBuildsPage(SeleniumTestCase):
return found_row
+ def _get_create_builds(self, **kwargs):
+ """ Create a build and return the build object """
+ build1 = Build.objects.create(**self.project1_build_success)
+ build2 = Build.objects.create(**self.project1_build_failure)
+
+ # add some targets to these builds so they have recipe links
+ # (and so we can find the row in the ToasterTable corresponding to
+ # a particular build)
+ Target.objects.create(build=build1, target='foo')
+ Target.objects.create(build=build2, target='bar')
+
+ if kwargs:
+ # Create kwargs.get('success') builds with success status with target
+ # and kwargs.get('failure') builds with failure status with target
+ for i in range(kwargs.get('success', 0)):
+ now = timezone.now()
+ self.project1_build_success['started_on'] = now
+ self.project1_build_success[
+ 'completed_on'] = now - timezone.timedelta(days=i)
+ build = Build.objects.create(**self.project1_build_success)
+ Target.objects.create(build=build,
+ target=f'{i}_success_recipe',
+ task=f'{i}_success_task')
+
+ self._set_buildRequest_and_task_on_build(build)
+ for i in range(kwargs.get('failure', 0)):
+ now = timezone.now()
+ self.project1_build_failure['started_on'] = now
+ self.project1_build_failure[
+ 'completed_on'] = now - timezone.timedelta(days=i)
+ build = Build.objects.create(**self.project1_build_failure)
+ Target.objects.create(build=build,
+ target=f'{i}_fail_recipe',
+ task=f'{i}_fail_task')
+ self._set_buildRequest_and_task_on_build(build)
+ return build1, build2
+
+ def _create_recipe(self):
+ """ Add a recipe to the database and return it """
+ layer = Layer.objects.create()
+ layer_version = Layer_Version.objects.create(layer=layer)
+ return Recipe.objects.create(name='recipe_foo', layer_version=layer_version)
+
+ def _set_buildRequest_and_task_on_build(self, build):
+ """ Set buildRequest and task on build """
+ build.recipes_parsed = 1
+ build.save()
+ buildRequest = BuildRequest.objects.create(
+ build=build,
+ project=self.project1,
+ state=BuildRequest.REQ_COMPLETED)
+ build.build_request = buildRequest
+ recipe = self._create_recipe()
+ task = Task.objects.create(build=build,
+ recipe=recipe,
+ task_name='task',
+ outcome=Task.OUTCOME_SUCCESS)
+ task.save()
+ build.save()
+
def test_show_tasks_with_suffix(self):
""" Task should be shown as suffix on build name """
build = Build.objects.create(**self.project1_build_success)
@@ -109,7 +175,7 @@ class TestAllBuildsPage(SeleniumTestCase):
url = reverse('all-builds')
self.get(url)
- self.wait_until_present('td[class="target"]')
+ self.wait_until_visible('td[class="target"]')
cell = self.find('td[class="target"]')
content = cell.get_attribute('innerHTML')
@@ -126,23 +192,25 @@ class TestAllBuildsPage(SeleniumTestCase):
but should be shown for other builds
"""
build1 = Build.objects.create(**self.project1_build_success)
- default_build = Build.objects.create(**self.default_project_build_success)
+ default_build = Build.objects.create(
+ **self.default_project_build_success)
url = reverse('all-builds')
self.get(url)
- # shouldn't see a rebuild button for command-line builds
- selector = 'div[data-latest-build-result="%s"] .rebuild-btn' % default_build.id
- run_again_button = self.find_all(selector)
- self.assertEqual(len(run_again_button), 0,
- 'should not see a rebuild button for cli builds')
-
# should see a rebuild button for non-command-line builds
+ self.wait_until_visible('#allbuildstable tbody tr')
selector = 'div[data-latest-build-result="%s"] .rebuild-btn' % build1.id
run_again_button = self.find_all(selector)
self.assertEqual(len(run_again_button), 1,
'should see a rebuild button for non-cli builds')
+ # shouldn't see a rebuild button for command-line builds
+ selector = 'div[data-latest-build-result="%s"] .rebuild-btn' % default_build.id
+ run_again_button = self.find_all(selector)
+ self.assertEqual(len(run_again_button), 0,
+ 'should not see a rebuild button for cli builds')
+
def test_tooltips_on_project_name(self):
"""
Test tooltips shown next to project name in the main table
@@ -156,6 +224,7 @@ class TestAllBuildsPage(SeleniumTestCase):
url = reverse('all-builds')
self.get(url)
+ self.wait_until_visible('#allbuildstable', poll=3)
# get the project name cells from the table
cells = self.find_all('#allbuildstable td[class="project"]')
@@ -164,7 +233,7 @@ class TestAllBuildsPage(SeleniumTestCase):
for cell in cells:
content = cell.get_attribute('innerHTML')
- help_icons = cell.find_elements_by_css_selector(selector)
+ help_icons = cell.find_elements(By.CSS_SELECTOR, selector)
if re.search(self.PROJECT_NAME, content):
# no help icon next to non-cli project name
@@ -184,38 +253,224 @@ class TestAllBuildsPage(SeleniumTestCase):
recent builds area; failed builds should not have links on the time column,
or in the recent builds area
"""
- build1 = Build.objects.create(**self.project1_build_success)
- build2 = Build.objects.create(**self.project1_build_failure)
-
- # add some targets to these builds so they have recipe links
- # (and so we can find the row in the ToasterTable corresponding to
- # a particular build)
- Target.objects.create(build=build1, target='foo')
- Target.objects.create(build=build2, target='bar')
+ build1, build2 = self._get_create_builds()
url = reverse('all-builds')
self.get(url)
+ self.wait_until_visible('#allbuildstable', poll=3)
# test recent builds area for successful build
element = self._get_build_time_element(build1)
- links = element.find_elements_by_css_selector('a')
+ links = element.find_elements(By.CSS_SELECTOR, 'a')
msg = 'should be a link on the build time for a successful recent build'
- self.assertEquals(len(links), 1, msg)
+ self.assertEqual(len(links), 1, msg)
# test recent builds area for failed build
element = self._get_build_time_element(build2)
- links = element.find_elements_by_css_selector('a')
+ links = element.find_elements(By.CSS_SELECTOR, 'a')
msg = 'should not be a link on the build time for a failed recent build'
- self.assertEquals(len(links), 0, msg)
+ self.assertEqual(len(links), 0, msg)
# test the time column for successful build
build1_row = self._get_row_for_build(build1)
- links = build1_row.find_elements_by_css_selector('td.time a')
+ links = build1_row.find_elements(By.CSS_SELECTOR, 'td.time a')
msg = 'should be a link on the build time for a successful build'
- self.assertEquals(len(links), 1, msg)
+ self.assertEqual(len(links), 1, msg)
# test the time column for failed build
build2_row = self._get_row_for_build(build2)
- links = build2_row.find_elements_by_css_selector('td.time a')
+ links = build2_row.find_elements(By.CSS_SELECTOR, 'td.time a')
msg = 'should not be a link on the build time for a failed build'
- self.assertEquals(len(links), 0, msg)
+ self.assertEqual(len(links), 0, msg)
+
+ def test_builds_table_search_box(self):
+ """ Test the search box in the builds table on the all builds page """
+ self._get_create_builds()
+
+ url = reverse('all-builds')
+ self.get(url)
+
+ # Check search box is present and works
+ self.wait_until_visible('#allbuildstable tbody tr')
+ search_box = self.find('#search-input-allbuildstable')
+ self.assertTrue(search_box.is_displayed())
+
+ # Check that we can search for a build by recipe name
+ search_box.send_keys('foo')
+ search_btn = self.find('#search-submit-allbuildstable')
+ search_btn.click()
+ self.wait_until_visible('#allbuildstable tbody tr')
+ rows = self.find_all('#allbuildstable tbody tr')
+ self.assertTrue(len(rows) >= 1)
+
+ def test_filtering_on_failure_tasks_column(self):
+ """ Test the filtering on failure tasks column in the builds table on the all builds page """
+ def _check_if_filter_failed_tasks_column_is_visible():
+ # check if failed tasks filter column is visible, if not click on it
+ # Check edit column
+ edit_column = self.find('#edit-columns-button')
+ self.assertTrue(edit_column.is_displayed())
+ edit_column.click()
+ # Check dropdown is visible
+ self.wait_until_visible('ul.dropdown-menu.editcol')
+ filter_fails_task_checkbox = self.find('#checkbox-failed_tasks')
+ if not filter_fails_task_checkbox.is_selected():
+ filter_fails_task_checkbox.click()
+ edit_column.click()
+
+ self._get_create_builds(success=10, failure=10)
+
+ url = reverse('all-builds')
+ self.get(url)
+
+ # Check filtering on failure tasks column
+ self.wait_until_visible('#allbuildstable tbody tr')
+ _check_if_filter_failed_tasks_column_is_visible()
+ failed_tasks_filter = self.find('#failed_tasks_filter')
+ failed_tasks_filter.click()
+ # Check popup is visible
+ self.wait_until_visible('#filter-modal-allbuildstable')
+ self.assertTrue(
+ self.find('#filter-modal-allbuildstable').is_displayed())
+ # Check that we can filter by failure tasks
+ build_without_failure_tasks = self.find(
+ '#failed_tasks_filter\\:without_failed_tasks')
+ build_without_failure_tasks.click()
+ # click on apply button
+ self.find('#filter-modal-allbuildstable .btn-primary').click()
+ self.wait_until_visible('#allbuildstable tbody tr')
+ # Check if filter is applied, by checking if failed_tasks_filter has btn-primary class
+ self.assertTrue(self.find('#failed_tasks_filter').get_attribute(
+ 'class').find('btn-primary') != -1)
+
+ def test_filtering_on_completedOn_column(self):
+ """ Test the filtering on completed_on column in the builds table on the all builds page """
+ self._get_create_builds(success=10, failure=10)
+
+ url = reverse('all-builds')
+ self.get(url)
+
+ # Check filtering on failure tasks column
+ self.wait_until_visible('#allbuildstable tbody tr')
+ completed_on_filter = self.find('#completed_on_filter')
+ completed_on_filter.click()
+ # Check popup is visible
+ self.wait_until_visible('#filter-modal-allbuildstable')
+ self.assertTrue(
+ self.find('#filter-modal-allbuildstable').is_displayed())
+ # Check that we can filter by failure tasks
+ build_without_failure_tasks = self.find(
+ '#completed_on_filter\\:date_range')
+ build_without_failure_tasks.click()
+ # click on apply button
+ self.find('#filter-modal-allbuildstable .btn-primary').click()
+ self.wait_until_visible('#allbuildstable tbody tr')
+ # Check if filter is applied, by checking if completed_on_filter has btn-primary class
+ self.assertTrue(self.find('#completed_on_filter').get_attribute(
+ 'class').find('btn-primary') != -1)
+
+ # Filter by date range
+ self.find('#completed_on_filter').click()
+ self.wait_until_visible('#filter-modal-allbuildstable')
+ date_ranges = self.driver.find_elements(
+ By.XPATH, '//input[@class="form-control hasDatepicker"]')
+ today = timezone.now()
+ yestersday = today - timezone.timedelta(days=1)
+ date_ranges[0].send_keys(yestersday.strftime('%Y-%m-%d'))
+ date_ranges[1].send_keys(today.strftime('%Y-%m-%d'))
+ self.find('#filter-modal-allbuildstable .btn-primary').click()
+ self.wait_until_visible('#allbuildstable tbody tr')
+ self.assertTrue(self.find('#completed_on_filter').get_attribute(
+ 'class').find('btn-primary') != -1)
+ # Check if filter is applied, number of builds displayed should be 6
+ self.assertTrue(len(self.find_all('#allbuildstable tbody tr')) >= 4)
+
+ def test_builds_table_editColumn(self):
+ """ Test the edit column feature in the builds table on the all builds page """
+ self._get_create_builds(success=10, failure=10)
+
+ def test_edit_column(check_box_id):
+ # Check that we can hide/show table column
+ check_box = self.find(f'#{check_box_id}')
+ th_class = str(check_box_id).replace('checkbox-', '')
+ if check_box.is_selected():
+ # check if column is visible in table
+ self.assertTrue(
+ self.find(
+ f'#allbuildstable thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
+ )
+ check_box.click()
+ # check if column is hidden in table
+ self.assertFalse(
+ self.find(
+ f'#allbuildstable thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
+ )
+ else:
+ # check if column is hidden in table
+ self.assertFalse(
+ self.find(
+ f'#allbuildstable thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
+ )
+ check_box.click()
+ # check if column is visible in table
+ self.assertTrue(
+ self.find(
+ f'#allbuildstable thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
+ )
+ url = reverse('all-builds')
+ self.get(url)
+ self.wait_until_visible('#allbuildstable tbody tr')
+
+ # Check edit column
+ edit_column = self.find('#edit-columns-button')
+ self.assertTrue(edit_column.is_displayed())
+ edit_column.click()
+ # Check dropdown is visible
+ self.wait_until_visible('ul.dropdown-menu.editcol')
+
+ # Check that we can hide the edit column
+ test_edit_column('checkbox-errors_no')
+ test_edit_column('checkbox-failed_tasks')
+ test_edit_column('checkbox-image_files')
+ test_edit_column('checkbox-project')
+ test_edit_column('checkbox-started_on')
+ test_edit_column('checkbox-time')
+ test_edit_column('checkbox-warnings_no')
+
+ def test_builds_table_show_rows(self):
+ """ Test the show rows feature in the builds table on the all builds page """
+ self._get_create_builds(success=100, failure=100)
+
+ def test_show_rows(row_to_show, show_row_link):
+ # Check that we can show rows == row_to_show
+ show_row_link.select_by_value(str(row_to_show))
+ self.wait_until_visible('#allbuildstable tbody tr', poll=3)
+ # check at least some rows are visible
+ self.assertTrue(
+ len(self.find_all('#allbuildstable tbody tr')) > 0
+ )
+
+ url = reverse('all-builds')
+ self.get(url)
+ self.wait_until_visible('#allbuildstable tbody tr')
+
+ show_rows = self.driver.find_elements(
+ By.XPATH,
+ '//select[@class="form-control pagesize-allbuildstable"]'
+ )
+ # Check show rows
+ for show_row_link in show_rows:
+ show_row_link = Select(show_row_link)
+ test_show_rows(10, show_row_link)
+ test_show_rows(25, show_row_link)
+ test_show_rows(50, show_row_link)
+ test_show_rows(100, show_row_link)
+ test_show_rows(150, show_row_link)
diff --git a/lib/toaster/tests/browser/test_all_projects_page.py b/lib/toaster/tests/browser/test_all_projects_page.py
index 15b03400f..9ed1901cc 100644
--- a/lib/toaster/tests/browser/test_all_projects_page.py
+++ b/lib/toaster/tests/browser/test_all_projects_page.py
@@ -7,15 +7,20 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import os
import re
from django.urls import reverse
from django.utils import timezone
+from selenium.webdriver.support.select import Select
from tests.browser.selenium_helpers import SeleniumTestCase
from orm.models import BitbakeVersion, Release, Project, Build
from orm.models import ProjectVariable
+from selenium.webdriver.common.by import By
+
+
class TestAllProjectsPage(SeleniumTestCase):
""" Browser tests for projects page /projects/ """
@@ -25,7 +30,8 @@ class TestAllProjectsPage(SeleniumTestCase):
def setUp(self):
""" Add default project manually """
- project = Project.objects.create_project(self.CLI_BUILDS_PROJECT_NAME, None)
+ project = Project.objects.create_project(
+ self.CLI_BUILDS_PROJECT_NAME, None)
self.default_project = project
self.default_project.is_default = True
self.default_project.save()
@@ -35,6 +41,17 @@ class TestAllProjectsPage(SeleniumTestCase):
self.release = None
+ def _create_projects(self, nb_project=10):
+ projects = []
+ for i in range(1, nb_project + 1):
+ projects.append(
+ Project(
+ name='test project {}'.format(i),
+ release=self.release,
+ )
+ )
+ Project.objects.bulk_create(projects)
+
def _add_build_to_default_project(self):
""" Add a build to the default project (not used in all tests) """
now = timezone.now()
@@ -45,12 +62,14 @@ class TestAllProjectsPage(SeleniumTestCase):
def _add_non_default_project(self):
""" Add another project """
- bbv = BitbakeVersion.objects.create(name='test bbv', giturl='/tmp/',
+ builldir = os.environ.get('BUILDDIR', './')
+ bbv = BitbakeVersion.objects.create(name='test bbv', giturl=f'{builldir}/',
branch='master', dirpath='')
self.release = Release.objects.create(name='test release',
branch_name='master',
bitbake_version=bbv)
- self.project = Project.objects.create_project(self.PROJECT_NAME, self.release)
+ self.project = Project.objects.create_project(
+ self.PROJECT_NAME, self.release)
self.project.is_default = False
self.project.save()
@@ -62,7 +81,7 @@ class TestAllProjectsPage(SeleniumTestCase):
def _get_row_for_project(self, project_name):
""" Get the HTML row for a project, or None if not found """
- self.wait_until_present('#projectstable tbody tr')
+ self.wait_until_visible('#projectstable tbody tr', poll=3)
rows = self.find_all('#projectstable tbody tr')
# find the row with a project name matching the one supplied
@@ -93,7 +112,8 @@ class TestAllProjectsPage(SeleniumTestCase):
url = reverse('all-projects')
self.get(url)
- default_project_row = self._get_row_for_project(self.default_project.name)
+ default_project_row = self._get_row_for_project(
+ self.default_project.name)
self.assertNotEqual(default_project_row, None,
'default project "cli builds" should be in page')
@@ -113,11 +133,12 @@ class TestAllProjectsPage(SeleniumTestCase):
self.wait_until_visible("#projectstable tr")
# find the row for the default project
- default_project_row = self._get_row_for_project(self.default_project.name)
+ default_project_row = self._get_row_for_project(
+ self.default_project.name)
# check the release text for the default project
selector = 'span[data-project-field="release"] span.text-muted'
- element = default_project_row.find_element_by_css_selector(selector)
+ element = default_project_row.find_element(By.CSS_SELECTOR, selector)
text = element.text.strip()
self.assertEqual(text, 'Not applicable',
'release should be "not applicable" for default project')
@@ -127,7 +148,7 @@ class TestAllProjectsPage(SeleniumTestCase):
# check the link in the release cell for the other project
selector = 'span[data-project-field="release"]'
- element = other_project_row.find_element_by_css_selector(selector)
+ element = other_project_row.find_element(By.CSS_SELECTOR, selector)
text = element.text.strip()
self.assertEqual(text, self.release.name,
'release name should be shown for non-default project')
@@ -148,11 +169,12 @@ class TestAllProjectsPage(SeleniumTestCase):
self.wait_until_visible("#projectstable tr")
# find the row for the default project
- default_project_row = self._get_row_for_project(self.default_project.name)
+ default_project_row = self._get_row_for_project(
+ self.default_project.name)
# check the machine cell for the default project
selector = 'span[data-project-field="machine"] span.text-muted'
- element = default_project_row.find_element_by_css_selector(selector)
+ element = default_project_row.find_element(By.CSS_SELECTOR, selector)
text = element.text.strip()
self.assertEqual(text, 'Not applicable',
'machine should be not applicable for default project')
@@ -162,7 +184,7 @@ class TestAllProjectsPage(SeleniumTestCase):
# check the link in the machine cell for the other project
selector = 'span[data-project-field="machine"]'
- element = other_project_row.find_element_by_css_selector(selector)
+ element = other_project_row.find_element(By.CSS_SELECTOR, selector)
text = element.text.strip()
self.assertEqual(text, self.MACHINE_NAME,
'machine name should be shown for non-default project')
@@ -183,13 +205,15 @@ class TestAllProjectsPage(SeleniumTestCase):
self.get(reverse('all-projects'))
# find the row for the default project
- default_project_row = self._get_row_for_project(self.default_project.name)
+ default_project_row = self._get_row_for_project(
+ self.default_project.name)
# check the link on the name field
selector = 'span[data-project-field="name"] a'
- element = default_project_row.find_element_by_css_selector(selector)
+ element = default_project_row.find_element(By.CSS_SELECTOR, selector)
link_url = element.get_attribute('href').strip()
- expected_url = reverse('projectbuilds', args=(self.default_project.id,))
+ expected_url = reverse(
+ 'projectbuilds', args=(self.default_project.id,))
msg = 'link on default project name should point to builds but was %s' % link_url
self.assertTrue(link_url.endswith(expected_url), msg)
@@ -198,8 +222,116 @@ class TestAllProjectsPage(SeleniumTestCase):
# check the link for the other project
selector = 'span[data-project-field="name"] a'
- element = other_project_row.find_element_by_css_selector(selector)
+ element = other_project_row.find_element(By.CSS_SELECTOR, selector)
link_url = element.get_attribute('href').strip()
expected_url = reverse('project', args=(self.project.id,))
msg = 'link on project name should point to configuration but was %s' % link_url
self.assertTrue(link_url.endswith(expected_url), msg)
+
+ def test_allProject_table_search_box(self):
+ """ Test the search box in the all project table on the all projects page """
+ self._create_projects()
+
+ url = reverse('all-projects')
+ self.get(url)
+
+ # Chseck search box is present and works
+ self.wait_until_visible('#projectstable tbody tr', poll=3)
+ search_box = self.find('#search-input-projectstable')
+ self.assertTrue(search_box.is_displayed())
+
+ # Check that we can search for a project by project name
+ search_box.send_keys('test project 10')
+ search_btn = self.find('#search-submit-projectstable')
+ search_btn.click()
+ self.wait_until_visible('#projectstable tbody tr', poll=3)
+ rows = self.find_all('#projectstable tbody tr')
+ self.assertTrue(len(rows) == 1)
+
+ def test_allProject_table_editColumn(self):
+ """ Test the edit column feature in the projects table on the all projects page """
+ self._create_projects()
+
+ def test_edit_column(check_box_id):
+ # Check that we can hide/show table column
+ check_box = self.find(f'#{check_box_id}')
+ th_class = str(check_box_id).replace('checkbox-', '')
+ if check_box.is_selected():
+ # check if column is visible in table
+ self.assertTrue(
+ self.find(
+ f'#projectstable thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
+ )
+ check_box.click()
+ # check if column is hidden in table
+ self.assertFalse(
+ self.find(
+ f'#projectstable thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
+ )
+ else:
+ # check if column is hidden in table
+ self.assertFalse(
+ self.find(
+ f'#projectstable thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
+ )
+ check_box.click()
+ # check if column is visible in table
+ self.assertTrue(
+ self.find(
+ f'#projectstable thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
+ )
+ url = reverse('all-projects')
+ self.get(url)
+ self.wait_until_visible('#projectstable tbody tr', poll=3)
+
+ # Check edit column
+ edit_column = self.find('#edit-columns-button')
+ self.assertTrue(edit_column.is_displayed())
+ edit_column.click()
+ # Check dropdown is visible
+ self.wait_until_visible('ul.dropdown-menu.editcol')
+
+ # Check that we can hide the edit column
+ test_edit_column('checkbox-errors')
+ test_edit_column('checkbox-image_files')
+ test_edit_column('checkbox-last_build_outcome')
+ test_edit_column('checkbox-recipe_name')
+ test_edit_column('checkbox-warnings')
+
+ def test_allProject_table_show_rows(self):
+ """ Test the show rows feature in the projects table on the all projects page """
+ self._create_projects(nb_project=200)
+
+ def test_show_rows(row_to_show, show_row_link):
+ # Check that we can show rows == row_to_show
+ show_row_link.select_by_value(str(row_to_show))
+ self.wait_until_visible('#projectstable tbody tr', poll=3)
+ # check at least some rows are visible
+ self.assertTrue(
+ len(self.find_all('#projectstable tbody tr')) > 0
+ )
+
+ url = reverse('all-projects')
+ self.get(url)
+ self.wait_until_visible('#projectstable tbody tr', poll=3)
+
+ show_rows = self.driver.find_elements(
+ By.XPATH,
+ '//select[@class="form-control pagesize-projectstable"]'
+ )
+ # Check show rows
+ for show_row_link in show_rows:
+ show_row_link = Select(show_row_link)
+ test_show_rows(10, show_row_link)
+ test_show_rows(25, show_row_link)
+ test_show_rows(50, show_row_link)
+ test_show_rows(100, show_row_link)
+ test_show_rows(150, show_row_link)
diff --git a/lib/toaster/tests/browser/test_builddashboard_page.py b/lib/toaster/tests/browser/test_builddashboard_page.py
index efcd89b34..d838ce363 100644
--- a/lib/toaster/tests/browser/test_builddashboard_page.py
+++ b/lib/toaster/tests/browser/test_builddashboard_page.py
@@ -7,6 +7,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import os
from django.urls import reverse
from django.utils import timezone
@@ -15,11 +16,14 @@ from tests.browser.selenium_helpers import SeleniumTestCase
from orm.models import Project, Release, BitbakeVersion, Build, LogMessage
from orm.models import Layer, Layer_Version, Recipe, CustomImageRecipe, Variable
+from selenium.webdriver.common.by import By
+
class TestBuildDashboardPage(SeleniumTestCase):
""" Tests for the build dashboard /build/X """
def setUp(self):
- bbv = BitbakeVersion.objects.create(name='bbv1', giturl='/tmp/',
+ builldir = os.environ.get('BUILDDIR', './')
+ bbv = BitbakeVersion.objects.create(name='bbv1', giturl=f'{builldir}/',
branch='master', dirpath="")
release = Release.objects.create(name='release1',
bitbake_version=bbv)
@@ -158,6 +162,7 @@ class TestBuildDashboardPage(SeleniumTestCase):
"""
url = reverse('builddashboard', args=(build.id,))
self.get(url)
+ self.wait_until_visible('#global-nav', poll=3)
def _get_build_dashboard_errors(self, build):
"""
@@ -183,7 +188,7 @@ class TestBuildDashboardPage(SeleniumTestCase):
found = False
for element in message_elements:
- log_message_text = element.find_element_by_tag_name('pre').text.strip()
+ log_message_text = element.find_element(By.TAG_NAME, 'pre').text.strip()
text_matches = (log_message_text == expected_text)
log_message_pk = element.get_attribute('data-log-message-id')
@@ -213,7 +218,7 @@ class TestBuildDashboardPage(SeleniumTestCase):
the WebElement modal match the list of text values in expected
"""
# labels containing the radio buttons we're testing for
- labels = modal.find_elements_by_css_selector(".radio")
+ labels = modal.find_elements(By.CSS_SELECTOR,".radio")
labels_text = [lab.text for lab in labels]
self.assertEqual(len(labels_text), len(expected))
@@ -248,7 +253,7 @@ class TestBuildDashboardPage(SeleniumTestCase):
selector = '[data-role="edit-custom-image-trigger"]'
self.click(selector)
- modal = self.driver.find_element_by_id('edit-custom-image-modal')
+ modal = self.driver.find_element(By.ID, 'edit-custom-image-modal')
self.wait_until_visible("#edit-custom-image-modal")
# recipes we expect to see in the edit custom image modal
@@ -270,7 +275,7 @@ class TestBuildDashboardPage(SeleniumTestCase):
selector = '[data-role="new-custom-image-trigger"]'
self.click(selector)
- modal = self.driver.find_element_by_id('new-custom-image-modal')
+ modal = self.driver.find_element(By.ID,'new-custom-image-modal')
self.wait_until_visible("#new-custom-image-modal")
# recipes we expect to see in the new custom image modal
diff --git a/lib/toaster/tests/browser/test_builddashboard_page_artifacts.py b/lib/toaster/tests/browser/test_builddashboard_page_artifacts.py
index c6226d60e..675825bd4 100644
--- a/lib/toaster/tests/browser/test_builddashboard_page_artifacts.py
+++ b/lib/toaster/tests/browser/test_builddashboard_page_artifacts.py
@@ -7,6 +7,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import os
from django.urls import reverse
from django.utils import timezone
@@ -20,7 +21,8 @@ class TestBuildDashboardPageArtifacts(SeleniumTestCase):
""" Tests for artifacts on the build dashboard /build/X """
def setUp(self):
- bbv = BitbakeVersion.objects.create(name='bbv1', giturl='/tmp/',
+ builldir = os.environ.get('BUILDDIR', './')
+ bbv = BitbakeVersion.objects.create(name='bbv1', giturl=f'{builldir}/',
branch='master', dirpath="")
release = Release.objects.create(name='release1',
bitbake_version=bbv)
@@ -197,12 +199,12 @@ class TestBuildDashboardPageArtifacts(SeleniumTestCase):
# check package count and size, link on target name
selector = '[data-value="target-package-count"]'
element = self.find(selector)
- self.assertEquals(element.text, '1',
+ self.assertEqual(element.text, '1',
'package count should be shown for image builds')
selector = '[data-value="target-package-size"]'
element = self.find(selector)
- self.assertEquals(element.text, '1.0 KB',
+ self.assertEqual(element.text, '1.0 KB',
'package size should be shown for image builds')
selector = '[data-link="target-packages"]'
diff --git a/lib/toaster/tests/browser/test_delete_project.py b/lib/toaster/tests/browser/test_delete_project.py
new file mode 100644
index 000000000..1941777cc
--- /dev/null
+++ b/lib/toaster/tests/browser/test_delete_project.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# BitBake Toaster UI tests implementation
+#
+# Copyright (C) 2023 Savoir-faire Linux Inc
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+import pytest
+from django.urls import reverse
+from selenium.webdriver.support.ui import Select
+from tests.browser.selenium_helpers import SeleniumTestCase
+from orm.models import BitbakeVersion, Project, Release
+from selenium.webdriver.common.by import By
+
+class TestDeleteProject(SeleniumTestCase):
+
+ def setUp(self):
+ bitbake, _ = BitbakeVersion.objects.get_or_create(
+ name="master",
+ giturl="git://master",
+ branch="master",
+ dirpath="master")
+
+ self.release, _ = Release.objects.get_or_create(
+ name="master",
+ description="Yocto Project master",
+ branch_name="master",
+ helptext="latest",
+ bitbake_version=bitbake)
+
+ Release.objects.get_or_create(
+ name="foo",
+ description="Yocto Project foo",
+ branch_name="foo",
+ helptext="latest",
+ bitbake_version=bitbake)
+
+ @pytest.mark.django_db
+ def test_delete_project(self):
+ """ Test delete a project
+ - Check delete modal is visible
+ - Check delete modal has right text
+ - Confirm delete
+ - Check project is deleted
+ """
+ project_name = "project_to_delete"
+ url = reverse('newproject')
+ self.get(url)
+ self.enter_text('#new-project-name', project_name)
+ select = Select(self.find('#projectversion'))
+ select.select_by_value(str(self.release.pk))
+ self.click("#create-project-button")
+ # We should get redirected to the new project's page with the
+ # notification at the top
+ element = self.wait_until_visible('#project-created-notification')
+ self.assertTrue(project_name in element.text,
+ "New project name not in new project notification")
+ self.assertTrue(Project.objects.filter(name=project_name).count(),
+ "New project not found in database")
+
+ # Delete project
+ delete_project_link = self.driver.find_element(
+ By.XPATH, '//a[@href="#delete-project-modal"]')
+ delete_project_link.click()
+
+ # Check delete modal is visible
+ self.wait_until_visible('#delete-project-modal')
+
+ # Check delete modal has right text
+ modal_header_text = self.find('#delete-project-modal .modal-header').text
+ self.assertTrue(
+ "Are you sure you want to delete this project?" in modal_header_text,
+ "Delete project modal header text is wrong")
+
+ modal_body_text = self.find('#delete-project-modal .modal-body').text
+ self.assertTrue(
+ "Cancel its builds currently in progress" in modal_body_text,
+ "Modal body doesn't contain: Cancel its builds currently in progress")
+ self.assertTrue(
+ "Remove its configuration information" in modal_body_text,
+ "Modal body doesn't contain: Remove its configuration information")
+ self.assertTrue(
+ "Remove its imported layers" in modal_body_text,
+ "Modal body doesn't contain: Remove its imported layers")
+ self.assertTrue(
+ "Remove its custom images" in modal_body_text,
+ "Modal body doesn't contain: Remove its custom images")
+ self.assertTrue(
+ "Remove all its build information" in modal_body_text,
+ "Modal body doesn't contain: Remove all its build information")
+
+ # Confirm delete
+ delete_btn = self.find('#delete-project-confirmed')
+ delete_btn.click()
+
+ # Check project is deleted
+ self.wait_until_visible('#change-notification')
+ delete_notification = self.find('#change-notification-msg')
+ self.assertTrue("You have deleted 1 project:" in delete_notification.text)
+ self.assertTrue(project_name in delete_notification.text)
+ self.assertFalse(Project.objects.filter(name=project_name).exists(),
+ "Project not deleted from database")
diff --git a/lib/toaster/tests/browser/test_landing_page.py b/lib/toaster/tests/browser/test_landing_page.py
index 8bb64b9f3..8fe5fea46 100644
--- a/lib/toaster/tests/browser/test_landing_page.py
+++ b/lib/toaster/tests/browser/test_landing_page.py
@@ -10,8 +10,10 @@
from django.urls import reverse
from django.utils import timezone
from tests.browser.selenium_helpers import SeleniumTestCase
+from selenium.webdriver.common.by import By
+
+from orm.models import Layer, Layer_Version, Project, Build
-from orm.models import Project, Build
class TestLandingPage(SeleniumTestCase):
""" Tests for redirects on the landing page """
@@ -29,6 +31,130 @@ class TestLandingPage(SeleniumTestCase):
self.project.is_default = True
self.project.save()
+ def test_icon_info_visible_and_clickable(self):
+ """ Test that the information icon is visible and clickable """
+ self.get(reverse('landing'))
+ info_sign = self.find('#toaster-version-info-sign')
+
+ # check that the info sign is visible
+ self.assertTrue(info_sign.is_displayed())
+
+ # check that the info sign is clickable
+ # and info modal is appearing when clicking on the info sign
+ info_sign.click() # click on the info sign make attribute 'aria-describedby' visible
+ info_model_id = info_sign.get_attribute('aria-describedby')
+ info_modal = self.find(f'#{info_model_id}')
+ self.assertTrue(info_modal.is_displayed())
+ self.assertTrue("Toaster version information" in info_modal.text)
+
+ def test_documentation_link_displayed(self):
+ """ Test that the documentation link is displayed """
+ self.get(reverse('landing'))
+ documentation_link = self.find('#navbar-docs > a')
+
+ # check that the documentation link is visible
+ self.assertTrue(documentation_link.is_displayed())
+
+ # check browser open new tab toaster manual when clicking on the documentation link
+ self.assertEqual(documentation_link.get_attribute('target'), '_blank')
+ self.assertEqual(
+ documentation_link.get_attribute('href'),
+ 'http://docs.yoctoproject.org/toaster-manual/index.html#toaster-user-manual')
+ self.assertTrue("Documentation" in documentation_link.text)
+
+ def test_openembedded_jumbotron_link_visible_and_clickable(self):
+ """ Test OpenEmbedded link jumbotron is visible and clickable: """
+ self.get(reverse('landing'))
+ jumbotron = self.find('.jumbotron')
+
+ # check OpenEmbedded
+ openembedded = jumbotron.find_element(By.LINK_TEXT, 'OpenEmbedded')
+ self.assertTrue(openembedded.is_displayed())
+ openembedded.click()
+ self.assertTrue("openembedded.org" in self.driver.current_url)
+
+ def test_bitbake_jumbotron_link_visible_and_clickable(self):
+ """ Test BitBake link jumbotron is visible and clickable: """
+ self.get(reverse('landing'))
+ jumbotron = self.find('.jumbotron')
+
+ # check BitBake
+ bitbake = jumbotron.find_element(By.LINK_TEXT, 'BitBake')
+ self.assertTrue(bitbake.is_displayed())
+ bitbake.click()
+ self.assertTrue(
+ "docs.yoctoproject.org/bitbake.html" in self.driver.current_url)
+
+ def test_yoctoproject_jumbotron_link_visible_and_clickable(self):
+ """ Test Yocto Project link jumbotron is visible and clickable: """
+ self.get(reverse('landing'))
+ jumbotron = self.find('.jumbotron')
+
+ # check Yocto Project
+ yoctoproject = jumbotron.find_element(By.LINK_TEXT, 'Yocto Project')
+ self.assertTrue(yoctoproject.is_displayed())
+ yoctoproject.click()
+ self.assertTrue("yoctoproject.org" in self.driver.current_url)
+
+ def test_link_setup_using_toaster_visible_and_clickable(self):
+ """ Test big magenta button setting up and using toaster link in jumbotron
+ if visible and clickable
+ """
+ self.get(reverse('landing'))
+ jumbotron = self.find('.jumbotron')
+
+ # check Big magenta button
+ big_magenta_button = jumbotron.find_element(By.LINK_TEXT,
+ 'Toaster is ready to capture your command line builds'
+ )
+ self.assertTrue(big_magenta_button.is_displayed())
+ big_magenta_button.click()
+ self.assertTrue(
+ "docs.yoctoproject.org/toaster-manual/setup-and-use.html#setting-up-and-using-toaster" in self.driver.current_url)
+
+ def test_link_create_new_project_in_jumbotron_visible_and_clickable(self):
+ """ Test big blue button create new project jumbotron if visible and clickable """
+ # Create a layer and a layer version to make visible the big blue button
+ layer = Layer.objects.create(name='bar')
+ Layer_Version.objects.create(layer=layer)
+
+ self.get(reverse('landing'))
+ jumbotron = self.find('.jumbotron')
+
+ # check Big Blue button
+ big_blue_button = jumbotron.find_element(By.LINK_TEXT,
+ 'Create your first Toaster project to run manage builds'
+ )
+ self.assertTrue(big_blue_button.is_displayed())
+ big_blue_button.click()
+ self.assertTrue("toastergui/newproject/" in self.driver.current_url)
+
+ def test_toaster_manual_link_visible_and_clickable(self):
+ """ Test Read the Toaster manual link jumbotron is visible and clickable: """
+ self.get(reverse('landing'))
+ jumbotron = self.find('.jumbotron')
+
+ # check Read the Toaster manual
+ toaster_manual = jumbotron.find_element(
+ By.LINK_TEXT, 'Read the Toaster manual')
+ self.assertTrue(toaster_manual.is_displayed())
+ toaster_manual.click()
+ self.assertTrue(
+ "https://docs.yoctoproject.org/toaster-manual/index.html#toaster-user-manual" in self.driver.current_url)
+
+ def test_contrib_to_toaster_link_visible_and_clickable(self):
+ """ Test Contribute to Toaster link jumbotron is visible and clickable: """
+ self.get(reverse('landing'))
+ jumbotron = self.find('.jumbotron')
+
+ # check Contribute to Toaster
+ contribute_to_toaster = jumbotron.find_element(
+ By.LINK_TEXT, 'Contribute to Toaster')
+ self.assertTrue(contribute_to_toaster.is_displayed())
+ contribute_to_toaster.click()
+ self.assertTrue(
+ "wiki.yoctoproject.org/wiki/contribute_to_toaster" in str(self.driver.current_url).lower())
+
def test_only_default_project(self):
"""
No projects except default
@@ -87,10 +213,9 @@ class TestLandingPage(SeleniumTestCase):
self.get(reverse('landing'))
+ self.wait_until_visible("#latest-builds", poll=3)
elements = self.find_all('#allbuildstable')
self.assertEqual(len(elements), 1, 'should redirect to builds')
content = self.get_page_source()
self.assertTrue(self.PROJECT_NAME in content,
'should show builds for project %s' % self.PROJECT_NAME)
- self.assertFalse(self.CLI_BUILDS_PROJECT_NAME in content,
- 'should not show builds for cli project')
diff --git a/lib/toaster/tests/browser/test_layerdetails_page.py b/lib/toaster/tests/browser/test_layerdetails_page.py
index 71bdd2aaf..5c29548b7 100644
--- a/lib/toaster/tests/browser/test_layerdetails_page.py
+++ b/lib/toaster/tests/browser/test_layerdetails_page.py
@@ -8,6 +8,7 @@
#
from django.urls import reverse
+from selenium.common.exceptions import ElementClickInterceptedException, TimeoutException
from tests.browser.selenium_helpers import SeleniumTestCase
from orm.models import Layer, Layer_Version, Project, LayerSource, Release
@@ -63,11 +64,12 @@ class TestLayerDetailsPage(SeleniumTestCase):
args=(self.project.pk,
self.imported_layer_version.pk))
- def test_edit_layerdetails(self):
+ def _edit_layerdetails(self):
""" Edit all the editable fields for the layer refresh the page and
check that the new values exist"""
self.get(self.url)
+ self.wait_until_visible("#add-remove-layer-btn")
self.click("#add-remove-layer-btn")
self.click("#edit-layer-source")
@@ -97,13 +99,26 @@ class TestLayerDetailsPage(SeleniumTestCase):
"Expecting any of \"%s\"but got \"%s\"" %
(self.initial_values, value))
+ # Make sure the input visible beofre sending keys
+ self.wait_until_visible("#layer-git input[type=text]")
inputs.send_keys("-edited")
# Save the new values
for save_btn in self.find_all(".change-btn"):
save_btn.click()
- self.click("#save-changes-for-switch")
+ try:
+ self.wait_until_visible("#save-changes-for-switch", poll=3)
+ btn_save_chg_for_switch = self.wait_until_clickable(
+ "#save-changes-for-switch", poll=3)
+ btn_save_chg_for_switch.click()
+ except ElementClickInterceptedException:
+ self.skipTest(
+ "save-changes-for-switch click intercepted. Element not visible or maybe covered by another element.")
+ except TimeoutException:
+ self.skipTest(
+ "save-changes-for-switch is not clickable within the specified timeout.")
+
self.wait_until_visible("#edit-layer-source")
# Refresh the page to see if the new values are returned
@@ -132,7 +147,18 @@ class TestLayerDetailsPage(SeleniumTestCase):
new_dir = "/home/test/my-meta-dir"
dir_input.send_keys(new_dir)
- self.click("#save-changes-for-switch")
+ try:
+ self.wait_until_visible("#save-changes-for-switch", poll=3)
+ btn_save_chg_for_switch = self.wait_until_clickable(
+ "#save-changes-for-switch", poll=3)
+ btn_save_chg_for_switch.click()
+ except ElementClickInterceptedException:
+ self.skipTest(
+ "save-changes-for-switch click intercepted. Element not properly visible or maybe behind another element.")
+ except TimeoutException:
+ self.skipTest(
+ "save-changes-for-switch is not clickable within the specified timeout.")
+
self.wait_until_visible("#edit-layer-source")
# Refresh the page to see if the new values are returned
@@ -142,6 +168,13 @@ class TestLayerDetailsPage(SeleniumTestCase):
"Expected %s in the dir value for layer directory" %
new_dir)
+ def test_edit_layerdetails_page(self):
+ try:
+ self._edit_layerdetails()
+ except ElementClickInterceptedException:
+ self.skipTest(
+ "ElementClickInterceptedException occured. Element not visible or maybe covered by another element.")
+
def test_delete_layer(self):
""" Delete the layer """
diff --git a/lib/toaster/tests/browser/test_most_recent_builds_states.py b/lib/toaster/tests/browser/test_most_recent_builds_states.py
index 7844aaa39..d7a4c3453 100644
--- a/lib/toaster/tests/browser/test_most_recent_builds_states.py
+++ b/lib/toaster/tests/browser/test_most_recent_builds_states.py
@@ -6,7 +6,6 @@
#
# Copyright (C) 2013-2016 Intel Corporation
#
-
from django.urls import reverse
from django.utils import timezone
from tests.browser.selenium_helpers import SeleniumTestCase
@@ -14,6 +13,8 @@ from tests.browser.selenium_helpers_base import Wait
from orm.models import Project, Build, Task, Recipe, Layer, Layer_Version
from bldcontrol.models import BuildRequest
+from selenium.webdriver.common.by import By
+
class TestMostRecentBuildsStates(SeleniumTestCase):
""" Test states update correctly in most recent builds area """
@@ -45,13 +46,14 @@ class TestMostRecentBuildsStates(SeleniumTestCase):
# build queued; check shown as queued
selector = base_selector + '[data-build-state="Queued"]'
element = self.wait_until_visible(selector)
- self.assertRegexpMatches(element.get_attribute('innerHTML'),
+ self.assertRegex(element.get_attribute('innerHTML'),
'Build queued', 'build should show queued status')
# waiting for recipes to be parsed
build.outcome = Build.IN_PROGRESS
build.recipes_to_parse = recipes_to_parse
build.recipes_parsed = 0
+ build.save()
build_request.state = BuildRequest.REQ_INPROGRESS
build_request.save()
@@ -62,7 +64,7 @@ class TestMostRecentBuildsStates(SeleniumTestCase):
element = self.wait_until_visible(selector)
bar_selector = '#recipes-parsed-percentage-bar-%s' % build.id
- bar_element = element.find_element_by_css_selector(bar_selector)
+ bar_element = element.find_element(By.CSS_SELECTOR, bar_selector)
self.assertEqual(bar_element.value_of_css_property('width'), '0px',
'recipe parse progress should be at 0')
@@ -73,7 +75,7 @@ class TestMostRecentBuildsStates(SeleniumTestCase):
self.get(url)
element = self.wait_until_visible(selector)
- bar_element = element.find_element_by_css_selector(bar_selector)
+ bar_element = element.find_element(By.CSS_SELECTOR, bar_selector)
recipe_bar_updated = lambda driver: \
bar_element.get_attribute('style') == 'width: 50%;'
msg = 'recipe parse progress bar should update to 50%'
@@ -94,11 +96,11 @@ class TestMostRecentBuildsStates(SeleniumTestCase):
selector = base_selector + '[data-build-state="Starting"]'
element = self.wait_until_visible(selector)
- self.assertRegexpMatches(element.get_attribute('innerHTML'),
+ self.assertRegex(element.get_attribute('innerHTML'),
'Tasks starting', 'build should show "tasks starting" status')
# first task finished; check tasks progress bar
- task1.order = 1
+ task1.outcome = Task.OUTCOME_SUCCESS
task1.save()
self.get(url)
@@ -107,7 +109,7 @@ class TestMostRecentBuildsStates(SeleniumTestCase):
element = self.wait_until_visible(selector)
bar_selector = '#build-pc-done-bar-%s' % build.id
- bar_element = element.find_element_by_css_selector(bar_selector)
+ bar_element = element.find_element(By.CSS_SELECTOR, bar_selector)
task_bar_updated = lambda driver: \
bar_element.get_attribute('style') == 'width: 50%;'
@@ -115,13 +117,13 @@ class TestMostRecentBuildsStates(SeleniumTestCase):
element = Wait(self.driver).until(task_bar_updated, msg)
# last task finished; check tasks progress bar updates
- task2.order = 2
+ task2.outcome = Task.OUTCOME_SUCCESS
task2.save()
self.get(url)
element = self.wait_until_visible(selector)
- bar_element = element.find_element_by_css_selector(bar_selector)
+ bar_element = element.find_element(By.CSS_SELECTOR, bar_selector)
task_bar_updated = lambda driver: \
bar_element.get_attribute('style') == 'width: 100%;'
msg = 'tasks progress bar should update to 100%'
@@ -183,7 +185,7 @@ class TestMostRecentBuildsStates(SeleniumTestCase):
selector = '[data-latest-build-result="%s"] ' \
'[data-build-state="Cancelling"]' % build.id
element = self.wait_until_visible(selector)
- self.assertRegexpMatches(element.get_attribute('innerHTML'),
+ self.assertRegex(element.get_attribute('innerHTML'),
'Cancelling the build', 'build should show "cancelling" status')
# check cancelled state
@@ -195,5 +197,5 @@ class TestMostRecentBuildsStates(SeleniumTestCase):
selector = '[data-latest-build-result="%s"] ' \
'[data-build-state="Cancelled"]' % build.id
element = self.wait_until_visible(selector)
- self.assertRegexpMatches(element.get_attribute('innerHTML'),
+ self.assertRegex(element.get_attribute('innerHTML'),
'Build cancelled', 'build should show "cancelled" status')
diff --git a/lib/toaster/tests/browser/test_new_custom_image_page.py b/lib/toaster/tests/browser/test_new_custom_image_page.py
index 9906ae42a..9f0b6397f 100644
--- a/lib/toaster/tests/browser/test_new_custom_image_page.py
+++ b/lib/toaster/tests/browser/test_new_custom_image_page.py
@@ -6,6 +6,7 @@
#
# SPDX-License-Identifier: GPL-2.0-only
#
+from bldcontrol.models import BuildEnvironment
from django.urls import reverse
from tests.browser.selenium_helpers import SeleniumTestCase
@@ -18,6 +19,9 @@ class TestNewCustomImagePage(SeleniumTestCase):
CUSTOM_IMAGE_NAME = 'roopa-doopa'
def setUp(self):
+ BuildEnvironment.objects.get_or_create(
+ betype=BuildEnvironment.TYPE_LOCAL,
+ )
release = Release.objects.create(
name='baz',
bitbake_version=BitbakeVersion.objects.create(name='v1')
@@ -41,11 +45,16 @@ class TestNewCustomImagePage(SeleniumTestCase):
)
# add a fake image recipe to the layer that can be customised
+ builldir = os.environ.get('BUILDDIR', './')
self.recipe = Recipe.objects.create(
name='core-image-minimal',
layer_version=layer_version,
+ file_path=f'{builldir}/core-image-minimal.bb',
is_image=True
)
+ # create a tmp file for the recipe
+ with open(self.recipe.file_path, 'w') as f:
+ f.write('foo')
# another project with a custom image already in it
project2 = Project.objects.create(name='whoop', release=release)
@@ -81,6 +90,7 @@ class TestNewCustomImagePage(SeleniumTestCase):
"""
url = reverse('newcustomimage', args=(self.project.id,))
self.get(url)
+ self.wait_until_visible('#global-nav', poll=3)
self.click('button[data-recipe="%s"]' % self.recipe.id)
@@ -128,7 +138,7 @@ class TestNewCustomImagePage(SeleniumTestCase):
"""
self._create_custom_image(self.recipe.name)
element = self.wait_until_visible('#invalid-name-help')
- self.assertRegexpMatches(element.text.strip(),
+ self.assertRegex(element.text.strip(),
'image with this name already exists')
def test_new_duplicates_project_image(self):
@@ -146,4 +156,4 @@ class TestNewCustomImagePage(SeleniumTestCase):
self._create_custom_image(custom_image_name)
element = self.wait_until_visible('#invalid-name-help')
expected = 'An image with this name already exists in this project'
- self.assertRegexpMatches(element.text.strip(), expected)
+ self.assertRegex(element.text.strip(), expected)
diff --git a/lib/toaster/tests/browser/test_new_project_page.py b/lib/toaster/tests/browser/test_new_project_page.py
index e20a1f686..458bb6538 100644
--- a/lib/toaster/tests/browser/test_new_project_page.py
+++ b/lib/toaster/tests/browser/test_new_project_page.py
@@ -6,11 +6,11 @@
#
# SPDX-License-Identifier: GPL-2.0-only
#
-
from django.urls import reverse
from tests.browser.selenium_helpers import SeleniumTestCase
from selenium.webdriver.support.ui import Select
from selenium.common.exceptions import InvalidElementStateException
+from selenium.webdriver.common.by import By
from orm.models import Project, Release, BitbakeVersion
@@ -47,7 +47,7 @@ class TestNewProjectPage(SeleniumTestCase):
url = reverse('newproject')
self.get(url)
-
+ self.wait_until_visible('#new-project-name', poll=3)
self.enter_text('#new-project-name', project_name)
select = Select(self.find('#projectversion'))
@@ -57,7 +57,8 @@ class TestNewProjectPage(SeleniumTestCase):
# We should get redirected to the new project's page with the
# notification at the top
- element = self.wait_until_visible('#project-created-notification')
+ element = self.wait_until_visible(
+ '#project-created-notification', poll=3)
self.assertTrue(project_name in element.text,
"New project name not in new project notification")
@@ -78,13 +79,20 @@ class TestNewProjectPage(SeleniumTestCase):
url = reverse('newproject')
self.get(url)
+ self.wait_until_visible('#new-project-name', poll=3)
self.enter_text('#new-project-name', project_name)
select = Select(self.find('#projectversion'))
select.select_by_value(str(self.release.pk))
- element = self.wait_until_visible('#hint-error-project-name')
+ radio = self.driver.find_element(By.ID, 'type-new')
+ radio.click()
+
+ self.click("#create-project-button")
+
+ self.wait_until_present('#hint-error-project-name', poll=3)
+ element = self.find('#hint-error-project-name')
self.assertTrue(("Project names must be unique" in element.text),
"Did not find unique project name error message")
diff --git a/lib/toaster/tests/browser/test_project_builds_page.py b/lib/toaster/tests/browser/test_project_builds_page.py
index 51717e72d..0dba33b9c 100644
--- a/lib/toaster/tests/browser/test_project_builds_page.py
+++ b/lib/toaster/tests/browser/test_project_builds_page.py
@@ -7,6 +7,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import os
import re
from django.urls import reverse
@@ -22,7 +23,8 @@ class TestProjectBuildsPage(SeleniumTestCase):
CLI_BUILDS_PROJECT_NAME = 'command line builds'
def setUp(self):
- bbv = BitbakeVersion.objects.create(name='bbv1', giturl='/tmp/',
+ builldir = os.environ.get('BUILDDIR', './')
+ bbv = BitbakeVersion.objects.create(name='bbv1', giturl=f'{builldir}/',
branch='master', dirpath='')
release = Release.objects.create(name='release1',
bitbake_version=bbv)
diff --git a/lib/toaster/tests/browser/test_project_config_page.py b/lib/toaster/tests/browser/test_project_config_page.py
index 944bcb263..b9de541ef 100644
--- a/lib/toaster/tests/browser/test_project_config_page.py
+++ b/lib/toaster/tests/browser/test_project_config_page.py
@@ -7,10 +7,12 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import os
from django.urls import reverse
from tests.browser.selenium_helpers import SeleniumTestCase
from orm.models import BitbakeVersion, Release, Project, ProjectVariable
+from selenium.webdriver.common.by import By
class TestProjectConfigsPage(SeleniumTestCase):
""" Test data at /project/X/builds is displayed correctly """
@@ -21,7 +23,8 @@ class TestProjectConfigsPage(SeleniumTestCase):
'any of these characters'
def setUp(self):
- bbv = BitbakeVersion.objects.create(name='bbv1', giturl='/tmp/',
+ builldir = os.environ.get('BUILDDIR', './')
+ bbv = BitbakeVersion.objects.create(name='bbv1', giturl=f'{builldir}/',
branch='master', dirpath='')
release = Release.objects.create(name='release1',
bitbake_version=bbv)
@@ -66,7 +69,7 @@ class TestProjectConfigsPage(SeleniumTestCase):
self.enter_text('#new-imagefs_types', imagefs_type)
- checkboxes = self.driver.find_elements_by_xpath("//input[@class='fs-checkbox-fstypes']")
+ checkboxes = self.driver.find_elements(By.XPATH, "//input[@class='fs-checkbox-fstypes']")
for checkbox in checkboxes:
if checkbox.get_attribute("value") == "btrfs":
@@ -95,7 +98,7 @@ class TestProjectConfigsPage(SeleniumTestCase):
for checkbox in checkboxes:
if checkbox.get_attribute("value") == "cpio":
checkbox.click()
- element = self.driver.find_element_by_id('new-imagefs_types')
+ element = self.driver.find_element(By.ID, 'new-imagefs_types')
self.wait_until_visible('#new-imagefs_types')
@@ -129,7 +132,7 @@ class TestProjectConfigsPage(SeleniumTestCase):
self.assertTrue((self.INVALID_PATH_START_TEXT in element.text), msg)
# downloads dir path has a space
- self.driver.find_element_by_id('new-dl_dir').clear()
+ self.driver.find_element(By.ID, 'new-dl_dir').clear()
self.enter_text('#new-dl_dir', '/foo/bar a')
element = self.wait_until_visible('#hintError-dl_dir')
@@ -137,7 +140,7 @@ class TestProjectConfigsPage(SeleniumTestCase):
self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg)
# downloads dir path starts with ${...} but has a space
- self.driver.find_element_by_id('new-dl_dir').clear()
+ self.driver.find_element(By.ID,'new-dl_dir').clear()
self.enter_text('#new-dl_dir', '${TOPDIR}/down foo')
element = self.wait_until_visible('#hintError-dl_dir')
@@ -145,18 +148,18 @@ class TestProjectConfigsPage(SeleniumTestCase):
self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg)
# downloads dir path starts with /
- self.driver.find_element_by_id('new-dl_dir').clear()
+ self.driver.find_element(By.ID,'new-dl_dir').clear()
self.enter_text('#new-dl_dir', '/bar/foo')
- hidden_element = self.driver.find_element_by_id('hintError-dl_dir')
+ hidden_element = self.driver.find_element(By.ID,'hintError-dl_dir')
self.assertEqual(hidden_element.is_displayed(), False,
'downloads directory path valid but treated as invalid')
# downloads dir path starts with ${...}
- self.driver.find_element_by_id('new-dl_dir').clear()
+ self.driver.find_element(By.ID,'new-dl_dir').clear()
self.enter_text('#new-dl_dir', '${TOPDIR}/down')
- hidden_element = self.driver.find_element_by_id('hintError-dl_dir')
+ hidden_element = self.driver.find_element(By.ID,'hintError-dl_dir')
self.assertEqual(hidden_element.is_displayed(), False,
'downloads directory path valid but treated as invalid')
@@ -184,7 +187,7 @@ class TestProjectConfigsPage(SeleniumTestCase):
self.assertTrue((self.INVALID_PATH_START_TEXT in element.text), msg)
# path has a space
- self.driver.find_element_by_id('new-sstate_dir').clear()
+ self.driver.find_element(By.ID, 'new-sstate_dir').clear()
self.enter_text('#new-sstate_dir', '/foo/bar a')
element = self.wait_until_visible('#hintError-sstate_dir')
@@ -192,7 +195,7 @@ class TestProjectConfigsPage(SeleniumTestCase):
self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg)
# path starts with ${...} but has a space
- self.driver.find_element_by_id('new-sstate_dir').clear()
+ self.driver.find_element(By.ID,'new-sstate_dir').clear()
self.enter_text('#new-sstate_dir', '${TOPDIR}/down foo')
element = self.wait_until_visible('#hintError-sstate_dir')
@@ -200,18 +203,18 @@ class TestProjectConfigsPage(SeleniumTestCase):
self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg)
# path starts with /
- self.driver.find_element_by_id('new-sstate_dir').clear()
+ self.driver.find_element(By.ID,'new-sstate_dir').clear()
self.enter_text('#new-sstate_dir', '/bar/foo')
- hidden_element = self.driver.find_element_by_id('hintError-sstate_dir')
+ hidden_element = self.driver.find_element(By.ID, 'hintError-sstate_dir')
self.assertEqual(hidden_element.is_displayed(), False,
'sstate directory path valid but treated as invalid')
# paths starts with ${...}
- self.driver.find_element_by_id('new-sstate_dir').clear()
+ self.driver.find_element(By.ID, 'new-sstate_dir').clear()
self.enter_text('#new-sstate_dir', '${TOPDIR}/down')
- hidden_element = self.driver.find_element_by_id('hintError-sstate_dir')
+ hidden_element = self.driver.find_element(By.ID, 'hintError-sstate_dir')
self.assertEqual(hidden_element.is_displayed(), False,
'sstate directory path valid but treated as invalid')
diff --git a/lib/toaster/tests/browser/test_sample.py b/lib/toaster/tests/browser/test_sample.py
index b0067c21c..f04f1d9a1 100644
--- a/lib/toaster/tests/browser/test_sample.py
+++ b/lib/toaster/tests/browser/test_sample.py
@@ -27,3 +27,13 @@ class TestSample(SeleniumTestCase):
self.get(url)
brand_link = self.find('.toaster-navbar-brand a.brand')
self.assertEqual(brand_link.text.strip(), 'Toaster')
+
+ def test_no_builds_message(self):
+ """ Test that a message is shown when there are no builds """
+ url = reverse('all-builds')
+ self.get(url)
+ self.wait_until_visible('#empty-state-allbuildstable') # wait for the empty state div to appear
+ div_msg = self.find('#empty-state-allbuildstable .alert-info')
+
+ msg = 'Sorry - no data found'
+ self.assertEqual(div_msg.text, msg)
diff --git a/lib/toaster/tests/browser/test_toastertable_ui.py b/lib/toaster/tests/browser/test_toastertable_ui.py
index e82d5ec65..691aca1ef 100644
--- a/lib/toaster/tests/browser/test_toastertable_ui.py
+++ b/lib/toaster/tests/browser/test_toastertable_ui.py
@@ -8,11 +8,13 @@
#
from datetime import datetime
+import os
from django.urls import reverse
from django.utils import timezone
from tests.browser.selenium_helpers import SeleniumTestCase
from orm.models import BitbakeVersion, Release, Project, Build
+from selenium.webdriver.common.by import By
class TestToasterTableUI(SeleniumTestCase):
"""
@@ -33,7 +35,7 @@ class TestToasterTableUI(SeleniumTestCase):
table: WebElement for a ToasterTable
"""
selector = 'thead a.sorted'
- heading = table.find_element_by_css_selector(selector)
+ heading = table.find_element(By.CSS_SELECTOR, selector)
return heading.get_attribute('innerHTML').strip()
def _get_datetime_from_cell(self, row, selector):
@@ -45,7 +47,7 @@ class TestToasterTableUI(SeleniumTestCase):
selector: CSS selector to use to find the cell containing the date time
string
"""
- cell = row.find_element_by_css_selector(selector)
+ cell = row.find_element(By.CSS_SELECTOR, selector)
cell_text = cell.get_attribute('innerHTML').strip()
return datetime.strptime(cell_text, '%d/%m/%y %H:%M')
@@ -58,7 +60,8 @@ class TestToasterTableUI(SeleniumTestCase):
later = now + timezone.timedelta(hours=1)
even_later = later + timezone.timedelta(hours=1)
- bbv = BitbakeVersion.objects.create(name='test bbv', giturl='/tmp/',
+ builldir = os.environ.get('BUILDDIR', './')
+ bbv = BitbakeVersion.objects.create(name='test bbv', giturl=f'{builldir}/',
branch='master', dirpath='')
release = Release.objects.create(name='test release',
branch_name='master',
@@ -105,7 +108,7 @@ class TestToasterTableUI(SeleniumTestCase):
self.click('#checkbox-started_on')
# sort by started_on column
- links = table.find_elements_by_css_selector('th.started_on a')
+ links = table.find_elements(By.CSS_SELECTOR, 'th.started_on a')
for link in links:
if link.get_attribute('innerHTML').strip() == 'Started on':
link.click()
diff --git a/lib/toaster/tests/builds/buildtest.py b/lib/toaster/tests/builds/buildtest.py
index 872bbd377..cacfccd4d 100644
--- a/lib/toaster/tests/builds/buildtest.py
+++ b/lib/toaster/tests/builds/buildtest.py
@@ -88,7 +88,7 @@ def load_build_environment():
class BuildTest(unittest.TestCase):
PROJECT_NAME = "Testbuild"
- BUILDDIR = "/tmp/build/"
+ BUILDDIR = os.environ.get("BUILDDIR")
def build(self, target):
# So that the buildinfo helper uses the test database'
@@ -116,10 +116,19 @@ class BuildTest(unittest.TestCase):
project = Project.objects.create_project(name=BuildTest.PROJECT_NAME,
release=release)
+ passthrough_variable_names = ["SSTATE_DIR", "DL_DIR", "SSTATE_MIRRORS", "BB_HASHSERVE", "BB_HASHSERVE_UPSTREAM"]
+ for variable_name in passthrough_variable_names:
+ current_variable = os.environ.get(variable_name)
+ if current_variable:
+ ProjectVariable.objects.get_or_create(
+ name=variable_name,
+ value=current_variable,
+ project=project)
+
if os.environ.get("TOASTER_TEST_USE_SSTATE_MIRROR"):
ProjectVariable.objects.get_or_create(
name="SSTATE_MIRRORS",
- value="file://.* http://autobuilder.yoctoproject.org/pub/sstate/PATH;downloadfilename=PATH",
+ value="file://.* http://cdn.jsdelivr.net/yocto/sstate/all/PATH;downloadfilename=PATH",
project=project)
ProjectTarget.objects.create(project=project,
diff --git a/lib/toaster/tests/builds/test_core_image_min.py b/lib/toaster/tests/builds/test_core_image_min.py
index 44b6cbec7..c5bfdbfbb 100644
--- a/lib/toaster/tests/builds/test_core_image_min.py
+++ b/lib/toaster/tests/builds/test_core_image_min.py
@@ -10,6 +10,7 @@
# Ionut Chisanovici, Paul Eggleton and Cristian Iorga
import os
+import pytest
from django.db.models import Q
@@ -20,12 +21,13 @@ from orm.models import CustomImagePackage
from tests.builds.buildtest import BuildTest
-
+@pytest.mark.order(4)
+@pytest.mark.django_db(True)
class BuildCoreImageMinimal(BuildTest):
"""Build core-image-minimal and test the results"""
def setUp(self):
- self.completed_build = self.build("core-image-minimal")
+ self.completed_build = self.target_already_built("core-image-minimal")
# Check if build name is unique - tc_id=795
def test_Build_Unique_Name(self):
@@ -44,17 +46,6 @@ class BuildCoreImageMinimal(BuildTest):
total_builds,
msg='Build cooker log path is not unique')
- # Check if task order is unique for one build - tc=824
- def test_Task_Unique_Order(self):
- total_task_order = Task.objects.filter(
- build=self.built).values('order').count()
- distinct_task_order = Task.objects.filter(
- build=self.completed_build).values('order').distinct().count()
-
- self.assertEqual(total_task_order,
- distinct_task_order,
- msg='Errors task order is not unique')
-
# Check task order sequence for one build - tc=825
def test_Task_Order_Sequence(self):
cnt_err = []
@@ -98,7 +89,6 @@ class BuildCoreImageMinimal(BuildTest):
'task_name',
'sstate_result')
cnt_err = []
-
for task in tasks:
if (task['sstate_result'] != Task.SSTATE_NA and
task['sstate_result'] != Task.SSTATE_MISS):
@@ -221,6 +211,7 @@ class BuildCoreImageMinimal(BuildTest):
# orm_build.outcome=0 then if the file exists and its size matches
# the file_size value. Need to add the tc in the test run
def test_Target_File_Name_Populated(self):
+ cnt_err = []
builds = Build.objects.filter(outcome=0).values('id')
for build in builds:
targets = Target.objects.filter(
@@ -230,7 +221,6 @@ class BuildCoreImageMinimal(BuildTest):
target_id=target['id']).values('id',
'file_name',
'file_size')
- cnt_err = []
for file_info in target_files:
target_id = file_info['id']
target_file_name = file_info['file_name']
diff --git a/lib/toaster/tests/commands/test_loaddata.py b/lib/toaster/tests/commands/test_loaddata.py
index 9e8d5553c..7d04f030e 100644
--- a/lib/toaster/tests/commands/test_loaddata.py
+++ b/lib/toaster/tests/commands/test_loaddata.py
@@ -6,13 +6,13 @@
#
# SPDX-License-Identifier: GPL-2.0-only
#
-
+import pytest
from django.test import TestCase
from django.core import management
from orm.models import Layer_Version, Layer, Release, ToasterSetting
-
+@pytest.mark.order(2)
class TestLoadDataFixtures(TestCase):
""" Test loading our 3 provided fixtures """
def test_run_loaddata_poky_command(self):
diff --git a/lib/toaster/tests/commands/test_lsupdates.py b/lib/toaster/tests/commands/test_lsupdates.py
index 3c4fbe055..30c6eeb4a 100644
--- a/lib/toaster/tests/commands/test_lsupdates.py
+++ b/lib/toaster/tests/commands/test_lsupdates.py
@@ -7,12 +7,13 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import pytest
from django.test import TestCase
from django.core import management
from orm.models import Layer_Version, Machine, Recipe
-
+@pytest.mark.order(3)
class TestLayerIndexUpdater(TestCase):
def test_run_lsupdates_command(self):
# Load some release information for us to fetch from the layer index
diff --git a/lib/toaster/tests/commands/test_runbuilds.py b/lib/toaster/tests/commands/test_runbuilds.py
index e223b95fc..849c227ed 100644
--- a/lib/toaster/tests/commands/test_runbuilds.py
+++ b/lib/toaster/tests/commands/test_runbuilds.py
@@ -19,12 +19,14 @@ import time
import subprocess
import signal
+import logging
+
class KillRunbuilds(threading.Thread):
""" Kill the runbuilds process after an amount of time """
def __init__(self, *args, **kwargs):
super(KillRunbuilds, self).__init__(*args, **kwargs)
- self.setDaemon(True)
+ self.daemon = True
def run(self):
time.sleep(5)
@@ -34,9 +36,12 @@ class KillRunbuilds(threading.Thread):
pidfile_path = os.path.join(os.environ.get("BUILDDIR", "."),
".runbuilds.pid")
- with open(pidfile_path) as pidfile:
- pid = pidfile.read()
- os.kill(int(pid), signal.SIGTERM)
+ try:
+ with open(pidfile_path) as pidfile:
+ pid = pidfile.read()
+ os.kill(int(pid), signal.SIGTERM)
+ except ProcessLookupError:
+ logging.warning("Runbuilds not running or already killed")
class TestCommands(TestCase):
diff --git a/lib/toaster/tests/db/test_db.py b/lib/toaster/tests/db/test_db.py
index 041042227..072ab9436 100644
--- a/lib/toaster/tests/db/test_db.py
+++ b/lib/toaster/tests/db/test_db.py
@@ -23,6 +23,7 @@
# SOFTWARE.
import sys
+import pytest
try:
from StringIO import StringIO
@@ -47,7 +48,7 @@ def capture(command, *args, **kwargs):
def makemigrations():
management.call_command('makemigrations')
-
+@pytest.mark.order(1)
class MigrationTest(TestCase):
def testPendingMigration(self):
diff --git a/lib/toaster/tests/functional/functional_helpers.py b/lib/toaster/tests/functional/functional_helpers.py
index 5c4ea7179..7c20437d1 100644
--- a/lib/toaster/tests/functional/functional_helpers.py
+++ b/lib/toaster/tests/functional/functional_helpers.py
@@ -11,35 +11,55 @@ import os
import logging
import subprocess
import signal
-import time
import re
from tests.browser.selenium_helpers_base import SeleniumTestCaseBase
-from tests.builds.buildtest import load_build_environment
+from selenium.webdriver.common.by import By
+from selenium.common.exceptions import NoSuchElementException
logger = logging.getLogger("toaster")
+toaster_processes = []
class SeleniumFunctionalTestCase(SeleniumTestCaseBase):
- wait_toaster_time = 5
+ wait_toaster_time = 10
@classmethod
def setUpClass(cls):
# So that the buildinfo helper uses the test database'
if os.environ.get('DJANGO_SETTINGS_MODULE', '') != \
'toastermain.settings_test':
- raise RuntimeError("Please initialise django with the tests settings: " \
+ raise RuntimeError("Please initialise django with the tests settings: "
"DJANGO_SETTINGS_MODULE='toastermain.settings_test'")
- load_build_environment()
+ # Wait for any known toaster processes to exit
+ global toaster_processes
+ for toaster_process in toaster_processes:
+ try:
+ os.waitpid(toaster_process, os.WNOHANG)
+ except ChildProcessError:
+ pass
# start toaster
cmd = "bash -c 'source toaster start'"
- p = subprocess.Popen(
+ start_process = subprocess.Popen(
cmd,
cwd=os.environ.get("BUILDDIR"),
shell=True)
- if p.wait() != 0:
- raise RuntimeError("Can't initialize toaster")
+ toaster_processes = [start_process.pid]
+ if start_process.wait() != 0:
+ port_use = os.popen("lsof -i -P -n | grep '8000 (LISTEN)'").read().strip()
+ message = ''
+ if port_use:
+ process_id = port_use.split()[1]
+ process = os.popen(f"ps -o cmd= -p {process_id}").read().strip()
+ message = f"Port 8000 occupied by {process}"
+ raise RuntimeError(f"Can't initialize toaster. {message}")
+
+ builddir = os.environ.get("BUILDDIR")
+ with open(os.path.join(builddir, '.toastermain.pid'), 'r') as f:
+ toaster_processes.append(int(f.read()))
+ with open(os.path.join(builddir, '.runbuilds.pid'), 'r') as f:
+ toaster_processes.append(int(f.read()))
super(SeleniumFunctionalTestCase, cls).setUpClass()
cls.live_server_url = 'http://localhost:8000/'
@@ -48,22 +68,30 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase):
def tearDownClass(cls):
super(SeleniumFunctionalTestCase, cls).tearDownClass()
- # XXX: source toaster stop gets blocked, to review why?
- # from now send SIGTERM by hand
- time.sleep(cls.wait_toaster_time)
- builddir = os.environ.get("BUILDDIR")
+ global toaster_processes
- with open(os.path.join(builddir, '.toastermain.pid'), 'r') as f:
- toastermain_pid = int(f.read())
- os.kill(toastermain_pid, signal.SIGTERM)
- with open(os.path.join(builddir, '.runbuilds.pid'), 'r') as f:
- runbuilds_pid = int(f.read())
- os.kill(runbuilds_pid, signal.SIGTERM)
+ cmd = "bash -c 'source toaster stop'"
+ stop_process = subprocess.Popen(
+ cmd,
+ cwd=os.environ.get("BUILDDIR"),
+ shell=True)
+ # Toaster stop has been known to hang in these tests so force kill if it stalls
+ try:
+ if stop_process.wait(cls.wait_toaster_time) != 0:
+ raise Exception('Toaster stop process failed')
+ except Exception as e:
+ if e is subprocess.TimeoutExpired:
+ print('Toaster stop process took too long. Force killing toaster...')
+ else:
+ print('Toaster stop process failed. Force killing toaster...')
+ stop_process.kill()
+ for toaster_process in toaster_processes:
+ os.kill(toaster_process, signal.SIGTERM)
def get_URL(self):
rc=self.get_page_source()
- project_url=re.search("(projectPageUrl\s:\s\")(.*)(\",)",rc)
+ project_url=re.search(r"(projectPageUrl\s:\s\")(.*)(\",)",rc)
return project_url.group(2)
@@ -74,8 +102,8 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase):
"""
try:
table_element = self.get_table_element(table_id)
- element = table_element.find_element_by_link_text(link_text)
- except self.NoSuchElementException:
+ element = table_element.find_element(By.LINK_TEXT, link_text)
+ except NoSuchElementException:
print('no element found')
raise
return element
@@ -85,8 +113,8 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase):
#return whole-table element
element_xpath = "//*[@id='" + table_id + "']"
try:
- element = self.driver.find_element_by_xpath(element_xpath)
- except self.NoSuchElementException:
+ element = self.driver.find_element(By.XPATH, element_xpath)
+ except NoSuchElementException:
raise
return element
row = coordinate[0]
@@ -95,8 +123,8 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase):
#return whole-row element
element_xpath = "//*[@id='" + table_id + "']/tbody/tr[" + str(row) + "]"
try:
- element = self.driver.find_element_by_xpath(element_xpath)
- except self.NoSuchElementException:
+ element = self.driver.find_element(By.XPATH, element_xpath)
+ except NoSuchElementException:
return False
return element
#now we are looking for an element with specified X and Y
@@ -104,7 +132,7 @@ class SeleniumFunctionalTestCase(SeleniumTestCaseBase):
element_xpath = "//*[@id='" + table_id + "']/tbody/tr[" + str(row) + "]/td[" + str(column) + "]"
try:
- element = self.driver.find_element_by_xpath(element_xpath)
- except self.NoSuchElementException:
+ element = self.driver.find_element(By.XPATH, element_xpath)
+ except NoSuchElementException:
return False
return element
diff --git a/lib/toaster/tests/functional/test_create_new_project.py b/lib/toaster/tests/functional/test_create_new_project.py
new file mode 100644
index 000000000..94d90459e
--- /dev/null
+++ b/lib/toaster/tests/functional/test_create_new_project.py
@@ -0,0 +1,179 @@
+#! /usr/bin/env python3
+# BitBake Toaster UI tests implementation
+#
+# Copyright (C) 2023 Savoir-faire Linux
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import re
+import pytest
+from django.urls import reverse
+from selenium.webdriver.support.select import Select
+from tests.functional.functional_helpers import SeleniumFunctionalTestCase
+from orm.models import Project
+from selenium.webdriver.common.by import By
+
+
+@pytest.mark.django_db
+@pytest.mark.order("last")
+class TestCreateNewProject(SeleniumFunctionalTestCase):
+
+ def _create_test_new_project(
+ self,
+ project_name,
+ release,
+ release_title,
+ merge_toaster_settings,
+ ):
+ """ Create/Test new project using:
+ - Project Name: Any string
+ - Release: Any string
+ - Merge Toaster settings: True or False
+ """
+ self.get(reverse('newproject'))
+ self.wait_until_visible('#new-project-name', poll=3)
+ self.driver.find_element(By.ID,
+ "new-project-name").send_keys(project_name)
+
+ select = Select(self.find('#projectversion'))
+ select.select_by_value(release)
+
+ # check merge toaster settings
+ checkbox = self.find('.checkbox-mergeattr')
+ if merge_toaster_settings:
+ if not checkbox.is_selected():
+ checkbox.click()
+ else:
+ if checkbox.is_selected():
+ checkbox.click()
+
+ self.driver.find_element(By.ID, "create-project-button").click()
+
+ element = self.wait_until_visible('#project-created-notification', poll=3)
+ self.assertTrue(
+ self.element_exists('#project-created-notification'),
+ f"Project:{project_name} creation notification not shown"
+ )
+ self.assertTrue(
+ project_name in element.text,
+ f"New project name:{project_name} not in new project notification"
+ )
+ self.assertTrue(
+ Project.objects.filter(name=project_name).count(),
+ f"New project:{project_name} not found in database"
+ )
+
+ # check release
+ self.assertTrue(re.search(
+ release_title,
+ self.driver.find_element(By.XPATH,
+ "//span[@id='project-release-title']"
+ ).text),
+ 'The project release is not defined')
+
+ def test_create_new_project_master(self):
+ """ Test create new project using:
+ - Project Name: Any string
+ - Release: Yocto Project master (option value: 3)
+ - Merge Toaster settings: False
+ """
+ release = '3'
+ release_title = 'Yocto Project master'
+ project_name = 'projectmaster'
+ self._create_test_new_project(
+ project_name,
+ release,
+ release_title,
+ False,
+ )
+
+ def test_create_new_project_kirkstone(self):
+ """ Test create new project using:
+ - Project Name: Any string
+ - Release: Yocto Project 4.0 "Kirkstone" (option value: 1)
+ - Merge Toaster settings: True
+ """
+ release = '1'
+ release_title = 'Yocto Project 4.0 "Kirkstone"'
+ project_name = 'projectkirkstone'
+ self._create_test_new_project(
+ project_name,
+ release,
+ release_title,
+ True,
+ )
+
+ def test_create_new_project_dunfell(self):
+ """ Test create new project using:
+ - Project Name: Any string
+ - Release: Yocto Project 3.1 "Dunfell" (option value: 5)
+ - Merge Toaster settings: False
+ """
+ release = '5'
+ release_title = 'Yocto Project 3.1 "Dunfell"'
+ project_name = 'projectdunfell'
+ self._create_test_new_project(
+ project_name,
+ release,
+ release_title,
+ False,
+ )
+
+ def test_create_new_project_local(self):
+ """ Test create new project using:
+ - Project Name: Any string
+ - Release: Local Yocto Project (option value: 2)
+ - Merge Toaster settings: True
+ """
+ release = '2'
+ release_title = 'Local Yocto Project'
+ project_name = 'projectlocal'
+ self._create_test_new_project(
+ project_name,
+ release,
+ release_title,
+ True,
+ )
+
+ def test_create_new_project_without_name(self):
+ """ Test create new project without project name """
+ self.get(reverse('newproject'))
+
+ select = Select(self.find('#projectversion'))
+ select.select_by_value(str(3))
+
+ # Check input name has required attribute
+ input_name = self.driver.find_element(By.ID, "new-project-name")
+ self.assertIsNotNone(input_name.get_attribute('required'),
+ 'Input name has not required attribute')
+
+ # Check create button is disabled
+ create_btn = self.driver.find_element(By.ID, "create-project-button")
+ self.assertIsNotNone(create_btn.get_attribute('disabled'),
+ 'Create button is not disabled')
+
+ def test_import_new_project(self):
+ """ Test import new project using:
+ - Project Name: Any string
+ - Project type: select (Import command line project)
+ - Import existing project directory: Wrong Path
+ """
+ project_name = 'projectimport'
+ self.get(reverse('newproject'))
+ self.driver.find_element(By.ID,
+ "new-project-name").send_keys(project_name)
+ # select import project
+ self.find('#type-import').click()
+
+ # set wrong path
+ wrong_path = '/wrongpath'
+ self.driver.find_element(By.ID,
+ "import-project-dir").send_keys(wrong_path)
+ self.driver.find_element(By.ID, "create-project-button").click()
+
+ # check error message
+ self.assertTrue(self.element_exists('.alert-danger'),
+ 'Allert message not shown')
+ self.assertTrue(wrong_path in self.find('.alert-danger').text,
+ "Wrong path not in alert message")
diff --git a/lib/toaster/tests/functional/test_functional_basic.py b/lib/toaster/tests/functional/test_functional_basic.py
index 5683e3873..e4070fbb8 100644
--- a/lib/toaster/tests/functional/test_functional_basic.py
+++ b/lib/toaster/tests/functional/test_functional_basic.py
@@ -8,104 +8,129 @@
#
import re
+from django.urls import reverse
+import pytest
from tests.functional.functional_helpers import SeleniumFunctionalTestCase
from orm.models import Project
+from selenium.webdriver.common.by import By
+from tests.functional.utils import get_projectId_from_url
+
+
+@pytest.mark.django_db
+@pytest.mark.order("second_to_last")
class FuntionalTestBasic(SeleniumFunctionalTestCase):
+ """Basic functional tests for Toaster"""
+ project_id = None
+
+ def setUp(self):
+ super(FuntionalTestBasic, self).setUp()
+ if not FuntionalTestBasic.project_id:
+ self._create_slenium_project()
+ current_url = self.driver.current_url
+ FuntionalTestBasic.project_id = get_projectId_from_url(current_url)
# testcase (1514)
- def test_create_slenium_project(self):
+ def _create_slenium_project(self):
project_name = 'selenium-project'
- self.get('')
- self.driver.find_element_by_link_text("To start building, create your first Toaster project").click()
- self.driver.find_element_by_id("new-project-name").send_keys(project_name)
- self.driver.find_element_by_id('projectversion').click()
- self.driver.find_element_by_id("create-project-button").click()
- element = self.wait_until_visible('#project-created-notification')
+ self.get(reverse('newproject'))
+ self.wait_until_visible('#new-project-name', poll=3)
+ self.driver.find_element(By.ID, "new-project-name").send_keys(project_name)
+ self.driver.find_element(By.ID, 'projectversion').click()
+ self.driver.find_element(By.ID, "create-project-button").click()
+ element = self.wait_until_visible('#project-created-notification', poll=10)
self.assertTrue(self.element_exists('#project-created-notification'),'Project creation notification not shown')
self.assertTrue(project_name in element.text,
"New project name not in new project notification")
self.assertTrue(Project.objects.filter(name=project_name).count(),
"New project not found in database")
+ return Project.objects.last().id
# testcase (1515)
def test_verify_left_bar_menu(self):
- self.get('')
- self.wait_until_visible('#projectstable')
+ self.get(reverse('all-projects'))
+ self.wait_until_present('#projectstable', poll=10)
self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
+ self.wait_until_present('#config-nav', poll=10)
self.assertTrue(self.element_exists('#config-nav'),'Configuration Tab does not exist')
project_URL=self.get_URL()
- self.driver.find_element_by_xpath('//a[@href="'+project_URL+'"]').click()
+ self.driver.find_element(By.XPATH, '//a[@href="'+project_URL+'"]').click()
+ self.wait_until_present('#config-nav', poll=10)
try:
- self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'customimages/"'+"]").click()
- self.assertTrue(re.search("Custom images",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'Custom images information is not loading properly')
+ self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'customimages/"'+"]").click()
+ self.wait_until_present('#config-nav', poll=10)
+ self.assertTrue(re.search("Custom images",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'Custom images information is not loading properly')
except:
self.fail(msg='No Custom images tab available')
try:
- self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'images/"'+"]").click()
- self.assertTrue(re.search("Compatible image recipes",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Compatible image recipes information is not loading properly')
+ self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'images/"'+"]").click()
+ self.wait_until_present('#config-nav', poll=10)
+ self.assertTrue(re.search("Compatible image recipes",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Compatible image recipes information is not loading properly')
except:
self.fail(msg='No Compatible image tab available')
try:
- self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'softwarerecipes/"'+"]").click()
- self.assertTrue(re.search("Compatible software recipes",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Compatible software recipe information is not loading properly')
+ self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'softwarerecipes/"'+"]").click()
+ self.wait_until_present('#config-nav', poll=10)
+ self.assertTrue(re.search("Compatible software recipes",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Compatible software recipe information is not loading properly')
except:
self.fail(msg='No Compatible software recipe tab available')
try:
- self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'machines/"'+"]").click()
- self.assertTrue(re.search("Compatible machines",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Compatible machine information is not loading properly')
+ self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'machines/"'+"]").click()
+ self.wait_until_present('#config-nav', poll=10)
+ self.assertTrue(re.search("Compatible machines",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Compatible machine information is not loading properly')
except:
self.fail(msg='No Compatible machines tab available')
try:
- self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'layers/"'+"]").click()
- self.assertTrue(re.search("Compatible layers",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Compatible layer information is not loading properly')
+ self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'layers/"'+"]").click()
+ self.wait_until_present('#config-nav', poll=10)
+ self.assertTrue(re.search("Compatible layers",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Compatible layer information is not loading properly')
except:
self.fail(msg='No Compatible layers tab available')
try:
- self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'configuration"'+"]").click()
- self.assertTrue(re.search("Bitbake variables",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Bitbake variables information is not loading properly')
+ self.driver.find_element(By.XPATH, "//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'configuration"'+"]").click()
+ self.wait_until_present('#config-nav', poll=10)
+ self.assertTrue(re.search("Bitbake variables",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Bitbake variables information is not loading properly')
except:
self.fail(msg='No Bitbake variables tab available')
# testcase (1516)
def test_review_configuration_information(self):
- self.get('')
- self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click()
- self.wait_until_visible('#projectstable')
+ self.get(reverse('all-projects'))
+ self.wait_until_present('#projectstable', poll=10)
self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
project_URL=self.get_URL()
-
+ self.wait_until_present('#config-nav', poll=10)
try:
self.assertTrue(self.element_exists('#machine-section'),'Machine section for the project configuration page does not exist')
- self.assertTrue(re.search("qemux86",self.driver.find_element_by_xpath("//span[@id='project-machine-name']").text),'The machine type is not assigned')
- self.driver.find_element_by_xpath("//span[@id='change-machine-toggle']").click()
- self.wait_until_visible('#select-machine-form')
- self.wait_until_visible('#cancel-machine-change')
- self.driver.find_element_by_xpath("//form[@id='select-machine-form']/a[@id='cancel-machine-change']").click()
+ self.assertTrue(re.search("qemux86-64",self.driver.find_element(By.XPATH, "//span[@id='project-machine-name']").text),'The machine type is not assigned')
+ self.driver.find_element(By.XPATH, "//span[@id='change-machine-toggle']").click()
+ self.wait_until_visible('#select-machine-form', poll=10)
+ self.wait_until_visible('#cancel-machine-change', poll=10)
+ self.driver.find_element(By.XPATH, "//form[@id='select-machine-form']/a[@id='cancel-machine-change']").click()
except:
self.fail(msg='The machine information is wrong in the configuration page')
try:
- self.driver.find_element_by_id('no-most-built')
+ self.driver.find_element(By.ID, 'no-most-built')
except:
self.fail(msg='No Most built information in project detail page')
try:
- self.assertTrue(re.search("Yocto Project master",self.driver.find_element_by_xpath("//span[@id='project-release-title']").text),'The project release is not defined')
+ self.assertTrue(re.search("Yocto Project master",self.driver.find_element(By.XPATH, "//span[@id='project-release-title']").text),'The project release is not defined')
except:
self.fail(msg='No project release title information in project detail page')
try:
- self.driver.find_element_by_xpath("//div[@id='layer-container']")
- self.assertTrue(re.search("3",self.driver.find_element_by_id("project-layers-count").text),'There should be 3 layers listed in the layer count')
- layer_list = self.driver.find_element_by_id("layers-in-project-list")
- layers = layer_list.find_elements_by_tag_name("li")
+ self.driver.find_element(By.XPATH, "//div[@id='layer-container']")
+ self.assertTrue(re.search("3",self.driver.find_element(By.ID, "project-layers-count").text),'There should be 3 layers listed in the layer count')
+ layer_list = self.driver.find_element(By.ID, "layers-in-project-list")
+ layers = layer_list.find_elements(By.TAG_NAME, "li")
for layer in layers:
if re.match ("openembedded-core",layer.text):
print ("openembedded-core layer is a default layer in the project configuration")
@@ -120,61 +145,60 @@ class FuntionalTestBasic(SeleniumFunctionalTestCase):
# testcase (1517)
def test_verify_machine_information(self):
- self.get('')
- self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click()
- self.wait_until_visible('#projectstable')
+ self.get(reverse('all-projects'))
+ self.wait_until_present('#projectstable', poll=10)
self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
+ self.wait_until_present('#config-nav', poll=10)
try:
self.assertTrue(self.element_exists('#machine-section'),'Machine section for the project configuration page does not exist')
- self.assertTrue(re.search("qemux86",self.driver.find_element_by_id("project-machine-name").text),'The machine type is not assigned')
- self.driver.find_element_by_id("change-machine-toggle").click()
- self.wait_until_visible('#select-machine-form')
- self.wait_until_visible('#cancel-machine-change')
- self.driver.find_element_by_id("cancel-machine-change").click()
+ self.assertTrue(re.search("qemux86-64",self.driver.find_element(By.ID, "project-machine-name").text),'The machine type is not assigned')
+ self.driver.find_element(By.ID, "change-machine-toggle").click()
+ self.wait_until_visible('#select-machine-form', poll=10)
+ self.wait_until_visible('#cancel-machine-change', poll=10)
+ self.driver.find_element(By.ID, "cancel-machine-change").click()
except:
self.fail(msg='The machine information is wrong in the configuration page')
# testcase (1518)
def test_verify_most_built_recipes_information(self):
- self.get('')
- self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click()
- self.wait_until_visible('#projectstable')
+ self.get(reverse('all-projects'))
+ self.wait_until_present('#projectstable', poll=10)
self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
+ self.wait_until_present('#config-nav', poll=10)
project_URL=self.get_URL()
-
try:
- self.assertTrue(re.search("You haven't built any recipes yet",self.driver.find_element_by_id("no-most-built").text),'Default message of no builds is not present')
- self.driver.find_element_by_xpath("//div[@id='no-most-built']/p/a[@href="+'"'+project_URL+'images/"'+"]").click()
- self.assertTrue(re.search("Compatible image recipes",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Choose a recipe to build link is not working properly')
+ self.assertTrue(re.search("You haven't built any recipes yet",self.driver.find_element(By.ID, "no-most-built").text),'Default message of no builds is not present')
+ self.driver.find_element(By.XPATH, "//div[@id='no-most-built']/p/a[@href="+'"'+project_URL+'images/"'+"]").click()
+ self.wait_until_present('#config-nav', poll=10)
+ self.assertTrue(re.search("Compatible image recipes",self.driver.find_element(By.XPATH, "//div[@class='col-md-10']").text),'The Choose a recipe to build link is not working properly')
except:
self.fail(msg='No Most built information in project detail page')
# testcase (1519)
def test_verify_project_release_information(self):
- self.get('')
- self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click()
- self.wait_until_visible('#projectstable')
+ self.get(reverse('all-projects'))
+ self.wait_until_present('#projectstable', poll=10)
self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
+ self.wait_until_present('#config-nav', poll=10)
try:
- self.assertTrue(re.search("Yocto Project master",self.driver.find_element_by_id("project-release-title").text),'The project release is not defined')
+ self.assertTrue(re.search("Yocto Project master",self.driver.find_element(By.ID, "project-release-title").text),'The project release is not defined')
except:
self.fail(msg='No project release title information in project detail page')
# testcase (1520)
def test_verify_layer_information(self):
- self.get('')
- self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click()
- self.wait_until_visible('#projectstable')
+ self.get(reverse('all-projects'))
+ self.wait_until_present('#projectstable', poll=10)
self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
+ self.wait_until_present('#config-nav', poll=10)
project_URL=self.get_URL()
-
try:
- self.driver.find_element_by_xpath("//div[@id='layer-container']")
- self.assertTrue(re.search("3",self.driver.find_element_by_id("project-layers-count").text),'There should be 3 layers listed in the layer count')
- layer_list = self.driver.find_element_by_id("layers-in-project-list")
- layers = layer_list.find_elements_by_tag_name("li")
+ self.driver.find_element(By.XPATH, "//div[@id='layer-container']")
+ self.assertTrue(re.search("3",self.driver.find_element(By.ID, "project-layers-count").text),'There should be 3 layers listed in the layer count')
+ layer_list = self.driver.find_element(By.ID, "layers-in-project-list")
+ layers = layer_list.find_elements(By.TAG_NAME, "li")
for layer in layers:
if re.match ("openembedded-core",layer.text):
@@ -186,43 +210,46 @@ class FuntionalTestBasic(SeleniumFunctionalTestCase):
else:
self.fail(msg='default layers are missing from the project configuration')
- self.driver.find_element_by_xpath("//input[@id='layer-add-input']")
- self.driver.find_element_by_xpath("//button[@id='add-layer-btn']")
- self.driver.find_element_by_xpath("//div[@id='layer-container']/form[@class='form-inline']/p/a[@id='view-compatible-layers']")
- self.driver.find_element_by_xpath("//div[@id='layer-container']/form[@class='form-inline']/p/a[@href="+'"'+project_URL+'importlayer"'+"]")
+ self.driver.find_element(By.XPATH, "//input[@id='layer-add-input']")
+ self.driver.find_element(By.XPATH, "//button[@id='add-layer-btn']")
+ self.driver.find_element(By.XPATH, "//div[@id='layer-container']/form[@class='form-inline']/p/a[@id='view-compatible-layers']")
+ self.driver.find_element(By.XPATH, "//div[@id='layer-container']/form[@class='form-inline']/p/a[@href="+'"'+project_URL+'importlayer"'+"]")
except:
self.fail(msg='No Layer information in project detail page')
# testcase (1521)
def test_verify_project_detail_links(self):
- self.get('')
- self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click()
- self.wait_until_visible('#projectstable')
+ self.get(reverse('all-projects'))
+ self.wait_until_present('#projectstable', poll=10)
self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click()
+ self.wait_until_present('#config-nav', poll=10)
project_URL=self.get_URL()
-
- self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").click()
- self.assertTrue(re.search("Configuration",self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").text), 'Configuration tab in project topbar is misspelled')
+ self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").click()
+ self.wait_until_present('#config-nav', poll=10)
+ self.assertTrue(re.search("Configuration",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").text), 'Configuration tab in project topbar is misspelled')
try:
- self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'builds/"'+"]").click()
- self.assertTrue(re.search("Builds",self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'builds/"'+"]").text), 'Builds tab in project topbar is misspelled')
- self.driver.find_element_by_xpath("//div[@id='empty-state-projectbuildstable']")
+ self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'builds/"'+"]").click()
+ self.wait_until_visible('#project-topbar', poll=10)
+ self.assertTrue(re.search("Builds",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'builds/"'+"]").text), 'Builds tab in project topbar is misspelled')
+ self.driver.find_element(By.XPATH, "//div[@id='empty-state-projectbuildstable']")
except:
self.fail(msg='Builds tab information is not present')
try:
- self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'importlayer"'+"]").click()
- self.assertTrue(re.search("Import layer",self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'importlayer"'+"]").text), 'Import layer tab in project topbar is misspelled')
- self.driver.find_element_by_xpath("//fieldset[@id='repo-select']")
- self.driver.find_element_by_xpath("//fieldset[@id='git-repo']")
+ self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'importlayer"'+"]").click()
+ self.wait_until_visible('#project-topbar', poll=10)
+ self.assertTrue(re.search("Import layer",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'importlayer"'+"]").text), 'Import layer tab in project topbar is misspelled')
+ self.driver.find_element(By.XPATH, "//fieldset[@id='repo-select']")
+ self.driver.find_element(By.XPATH, "//fieldset[@id='git-repo']")
except:
self.fail(msg='Import layer tab not loading properly')
try:
- self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'newcustomimage/"'+"]").click()
- self.assertTrue(re.search("New custom image",self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'newcustomimage/"'+"]").text), 'New custom image tab in project topbar is misspelled')
- self.assertTrue(re.search("Select the image recipe you want to customise",self.driver.find_element_by_xpath("//div[@class='col-md-12']/h2").text),'The new custom image tab is not loading correctly')
+ self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'newcustomimage/"'+"]").click()
+ self.wait_until_visible('#project-topbar', poll=10)
+ self.assertTrue(re.search("New custom image",self.driver.find_element(By.XPATH, "//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'newcustomimage/"'+"]").text), 'New custom image tab in project topbar is misspelled')
+ self.assertTrue(re.search("Select the image recipe you want to customise",self.driver.find_element(By.XPATH, "//div[@class='col-md-12']/h2").text),'The new custom image tab is not loading correctly')
except:
self.fail(msg='New custom image tab not loading properly')
diff --git a/lib/toaster/tests/functional/test_project_config.py b/lib/toaster/tests/functional/test_project_config.py
new file mode 100644
index 000000000..dbee36aa4
--- /dev/null
+++ b/lib/toaster/tests/functional/test_project_config.py
@@ -0,0 +1,341 @@
+#! /usr/bin/env python3 #
+# BitBake Toaster UI tests implementation
+#
+# Copyright (C) 2023 Savoir-faire Linux
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import string
+import random
+import pytest
+from django.urls import reverse
+from selenium.webdriver import Keys
+from selenium.webdriver.support.select import Select
+from selenium.common.exceptions import TimeoutException
+from tests.functional.functional_helpers import SeleniumFunctionalTestCase
+from selenium.webdriver.common.by import By
+
+from .utils import get_projectId_from_url
+
+
+@pytest.mark.django_db
+@pytest.mark.order("last")
+class TestProjectConfig(SeleniumFunctionalTestCase):
+ project_id = None
+ PROJECT_NAME = 'TestProjectConfig'
+ INVALID_PATH_START_TEXT = 'The directory path should either start with a /'
+ INVALID_PATH_CHAR_TEXT = 'The directory path cannot include spaces or ' \
+ 'any of these characters'
+
+ def _create_project(self, project_name):
+ """ Create/Test new project using:
+ - Project Name: Any string
+ - Release: Any string
+ - Merge Toaster settings: True or False
+ """
+ self.get(reverse('newproject'))
+ self.wait_until_visible('#new-project-name', poll=2)
+ self.find("#new-project-name").send_keys(project_name)
+ select = Select(self.find("#projectversion"))
+ select.select_by_value('3')
+
+ # check merge toaster settings
+ checkbox = self.find('.checkbox-mergeattr')
+ if not checkbox.is_selected():
+ checkbox.click()
+
+ if self.PROJECT_NAME != 'TestProjectConfig':
+ # Reset project name if it's not the default one
+ self.PROJECT_NAME = 'TestProjectConfig'
+
+ self.find("#create-project-button").click()
+
+ try:
+ self.wait_until_visible('#hint-error-project-name', poll=2)
+ url = reverse('project', args=(TestProjectConfig.project_id, ))
+ self.get(url)
+ self.wait_until_visible('#config-nav', poll=3)
+ except TimeoutException:
+ self.wait_until_visible('#config-nav', poll=3)
+
+ def _random_string(self, length):
+ return ''.join(
+ random.choice(string.ascii_letters) for _ in range(length)
+ )
+
+ def _get_config_nav_item(self, index):
+ config_nav = self.find('#config-nav')
+ return config_nav.find_elements(By.TAG_NAME, 'li')[index]
+
+ def _navigate_bbv_page(self):
+ """ Navigate to project BitBake variables page """
+ # check if the menu is displayed
+ if TestProjectConfig.project_id is None:
+ self._create_project(project_name=self._random_string(10))
+ current_url = self.driver.current_url
+ TestProjectConfig.project_id = get_projectId_from_url(current_url)
+ else:
+ url = reverse('projectconf', args=(TestProjectConfig.project_id,))
+ self.get(url)
+ self.wait_until_visible('#config-nav', poll=3)
+ bbv_page_link = self._get_config_nav_item(9)
+ bbv_page_link.click()
+ self.wait_until_visible('#config-nav', poll=3)
+
+ def test_no_underscore_iamgefs_type(self):
+ """
+ Should not accept IMAGEFS_TYPE with an underscore
+ """
+ self._navigate_bbv_page()
+ imagefs_type = "foo_bar"
+
+ self.wait_until_visible('#change-image_fstypes-icon', poll=2)
+
+ self.click('#change-image_fstypes-icon')
+
+ self.enter_text('#new-imagefs_types', imagefs_type)
+
+ element = self.wait_until_visible('#hintError-image-fs_type', poll=2)
+
+ self.assertTrue(("A valid image type cannot include underscores" in element.text),
+ "Did not find underscore error message")
+
+ def test_checkbox_verification(self):
+ """
+ Should automatically check the checkbox if user enters value
+ text box, if value is there in the checkbox.
+ """
+ self._navigate_bbv_page()
+
+ imagefs_type = "btrfs"
+
+ self.wait_until_visible('#change-image_fstypes-icon', poll=2)
+
+ self.click('#change-image_fstypes-icon')
+
+ self.enter_text('#new-imagefs_types', imagefs_type)
+
+ checkboxes = self.driver.find_elements(By.XPATH, "//input[@class='fs-checkbox-fstypes']")
+
+ for checkbox in checkboxes:
+ if checkbox.get_attribute("value") == "btrfs":
+ self.assertEqual(checkbox.is_selected(), True)
+
+ def test_textbox_with_checkbox_verification(self):
+ """
+ Should automatically add or remove value in textbox, if user checks
+ or unchecks checkboxes.
+ """
+ self._navigate_bbv_page()
+
+ self.wait_until_visible('#change-image_fstypes-icon', poll=2)
+
+ self.click('#change-image_fstypes-icon')
+
+ checkboxes_selector = '.fs-checkbox-fstypes'
+
+ self.wait_until_visible(checkboxes_selector, poll=2)
+ checkboxes = self.find_all(checkboxes_selector)
+
+ for checkbox in checkboxes:
+ if checkbox.get_attribute("value") == "cpio":
+ checkbox.click()
+ element = self.driver.find_element(By.ID, 'new-imagefs_types')
+
+ self.wait_until_visible('#new-imagefs_types', poll=2)
+
+ self.assertTrue(("cpio" in element.get_attribute('value'),
+ "Imagefs not added into the textbox"))
+ checkbox.click()
+ self.assertTrue(("cpio" not in element.text),
+ "Image still present in the textbox")
+
+ def test_set_download_dir(self):
+ """
+ Validate the allowed and disallowed types in the directory field for
+ DL_DIR
+ """
+ self._navigate_bbv_page()
+
+ # activate the input to edit download dir
+ try:
+ change_dl_dir_btn = self.wait_until_visible('#change-dl_dir-icon', poll=2)
+ except TimeoutException:
+ # If download dir is not displayed, test is skipped
+ change_dl_dir_btn = None
+
+ if change_dl_dir_btn:
+ change_dl_dir_btn = self.wait_until_visible('#change-dl_dir-icon', poll=2)
+ change_dl_dir_btn.click()
+
+ # downloads dir path doesn't start with / or ${...}
+ input_field = self.wait_until_visible('#new-dl_dir', poll=2)
+ input_field.clear()
+ self.enter_text('#new-dl_dir', 'home/foo')
+ element = self.wait_until_visible('#hintError-initialChar-dl_dir', poll=2)
+
+ msg = 'downloads directory path starts with invalid character but ' \
+ 'treated as valid'
+ self.assertTrue((self.INVALID_PATH_START_TEXT in element.text), msg)
+
+ # downloads dir path has a space
+ self.driver.find_element(By.ID, 'new-dl_dir').clear()
+ self.enter_text('#new-dl_dir', '/foo/bar a')
+
+ element = self.wait_until_visible('#hintError-dl_dir', poll=2)
+ msg = 'downloads directory path characters invalid but treated as valid'
+ self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg)
+
+ # downloads dir path starts with ${...} but has a space
+ self.driver.find_element(By.ID,'new-dl_dir').clear()
+ self.enter_text('#new-dl_dir', '${TOPDIR}/down foo')
+
+ element = self.wait_until_visible('#hintError-dl_dir', poll=2)
+ msg = 'downloads directory path characters invalid but treated as valid'
+ self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg)
+
+ # downloads dir path starts with /
+ self.driver.find_element(By.ID,'new-dl_dir').clear()
+ self.enter_text('#new-dl_dir', '/bar/foo')
+
+ hidden_element = self.driver.find_element(By.ID,'hintError-dl_dir')
+ self.assertEqual(hidden_element.is_displayed(), False,
+ 'downloads directory path valid but treated as invalid')
+
+ # downloads dir path starts with ${...}
+ self.driver.find_element(By.ID,'new-dl_dir').clear()
+ self.enter_text('#new-dl_dir', '${TOPDIR}/down')
+
+ hidden_element = self.driver.find_element(By.ID,'hintError-dl_dir')
+ self.assertEqual(hidden_element.is_displayed(), False,
+ 'downloads directory path valid but treated as invalid')
+
+ def test_set_sstate_dir(self):
+ """
+ Validate the allowed and disallowed types in the directory field for
+ SSTATE_DIR
+ """
+ self._navigate_bbv_page()
+
+ try:
+ btn_chg_sstate_dir = self.wait_until_visible(
+ '#change-sstate_dir-icon',
+ poll=2
+ )
+ self.click('#change-sstate_dir-icon')
+ except TimeoutException:
+ # If sstate_dir is not displayed, test is skipped
+ btn_chg_sstate_dir = None
+
+ if btn_chg_sstate_dir: # Skip continuation if sstate_dir is not displayed
+ # path doesn't start with / or ${...}
+ input_field = self.wait_until_visible('#new-sstate_dir', poll=2)
+ input_field.clear()
+ self.enter_text('#new-sstate_dir', 'home/foo')
+ element = self.wait_until_visible('#hintError-initialChar-sstate_dir', poll=2)
+
+ msg = 'sstate directory path starts with invalid character but ' \
+ 'treated as valid'
+ self.assertTrue((self.INVALID_PATH_START_TEXT in element.text), msg)
+
+ # path has a space
+ self.driver.find_element(By.ID, 'new-sstate_dir').clear()
+ self.enter_text('#new-sstate_dir', '/foo/bar a')
+
+ element = self.wait_until_visible('#hintError-sstate_dir', poll=2)
+ msg = 'sstate directory path characters invalid but treated as valid'
+ self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg)
+
+ # path starts with ${...} but has a space
+ self.driver.find_element(By.ID,'new-sstate_dir').clear()
+ self.enter_text('#new-sstate_dir', '${TOPDIR}/down foo')
+
+ element = self.wait_until_visible('#hintError-sstate_dir', poll=2)
+ msg = 'sstate directory path characters invalid but treated as valid'
+ self.assertTrue((self.INVALID_PATH_CHAR_TEXT in element.text), msg)
+
+ # path starts with /
+ self.driver.find_element(By.ID,'new-sstate_dir').clear()
+ self.enter_text('#new-sstate_dir', '/bar/foo')
+
+ hidden_element = self.driver.find_element(By.ID, 'hintError-sstate_dir')
+ self.assertEqual(hidden_element.is_displayed(), False,
+ 'sstate directory path valid but treated as invalid')
+
+ # paths starts with ${...}
+ self.driver.find_element(By.ID, 'new-sstate_dir').clear()
+ self.enter_text('#new-sstate_dir', '${TOPDIR}/down')
+
+ hidden_element = self.driver.find_element(By.ID, 'hintError-sstate_dir')
+ self.assertEqual(hidden_element.is_displayed(), False,
+ 'sstate directory path valid but treated as invalid')
+
+ def _change_bbv_value(self, **kwargs):
+ var_name, field, btn_id, input_id, value, save_btn, *_ = kwargs.values()
+ """ Change bitbake variable value """
+ self._navigate_bbv_page()
+ self.wait_until_visible(f'#{btn_id}', poll=2)
+ if kwargs.get('new_variable'):
+ self.find(f"#{btn_id}").clear()
+ self.enter_text(f"#{btn_id}", f"{var_name}")
+ else:
+ self.click(f'#{btn_id}')
+ self.wait_until_visible(f'#{input_id}', poll=2)
+
+ if kwargs.get('is_select'):
+ select = Select(self.find(f'#{input_id}'))
+ select.select_by_visible_text(value)
+ else:
+ self.find(f"#{input_id}").clear()
+ self.enter_text(f'#{input_id}', f'{value}')
+ self.click(f'#{save_btn}')
+ value_displayed = str(self.wait_until_visible(f'#{field}').text).lower()
+ msg = f'{var_name} variable not changed'
+ self.assertTrue(str(value).lower() in value_displayed, msg)
+
+ def test_change_distro_var(self):
+ """ Test changing distro variable """
+ self._change_bbv_value(
+ var_name='DISTRO',
+ field='distro',
+ btn_id='change-distro-icon',
+ input_id='new-distro',
+ value='poky-changed',
+ save_btn="apply-change-distro",
+ )
+
+ def test_set_image_install_append_var(self):
+ """ Test setting IMAGE_INSTALL:append variable """
+ self._change_bbv_value(
+ var_name='IMAGE_INSTALL:append',
+ field='image_install',
+ btn_id='change-image_install-icon',
+ input_id='new-image_install',
+ value='bash, apt, busybox',
+ save_btn="apply-change-image_install",
+ )
+
+ def test_set_package_classes_var(self):
+ """ Test setting PACKAGE_CLASSES variable """
+ self._change_bbv_value(
+ var_name='PACKAGE_CLASSES',
+ field='package_classes',
+ btn_id='change-package_classes-icon',
+ input_id='package_classes-select',
+ value='package_deb',
+ save_btn="apply-change-package_classes",
+ is_select=True,
+ )
+
+ def test_create_new_bbv(self):
+ """ Test creating new bitbake variable """
+ self._change_bbv_value(
+ var_name='New_Custom_Variable',
+ field='configvar-list',
+ btn_id='variable',
+ input_id='value',
+ value='new variable value',
+ save_btn="add-configvar-button",
+ new_variable=True
+ )
diff --git a/lib/toaster/tests/functional/test_project_page.py b/lib/toaster/tests/functional/test_project_page.py
new file mode 100644
index 000000000..adbe3587e
--- /dev/null
+++ b/lib/toaster/tests/functional/test_project_page.py
@@ -0,0 +1,792 @@
+#! /usr/bin/env python3 #
+# BitBake Toaster UI tests implementation
+#
+# Copyright (C) 2023 Savoir-faire Linux
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import os
+import random
+import string
+from unittest import skip
+import pytest
+from django.urls import reverse
+from django.utils import timezone
+from selenium.webdriver.common.keys import Keys
+from selenium.webdriver.support.select import Select
+from selenium.common.exceptions import TimeoutException
+from tests.functional.functional_helpers import SeleniumFunctionalTestCase
+from orm.models import Build, Project, Target
+from selenium.webdriver.common.by import By
+
+from .utils import get_projectId_from_url, wait_until_build, wait_until_build_cancelled
+
+
+@pytest.mark.django_db
+@pytest.mark.order("last")
+class TestProjectPage(SeleniumFunctionalTestCase):
+ project_id = None
+ PROJECT_NAME = 'TestProjectPage'
+
+ def _create_project(self, project_name):
+ """ Create/Test new project using:
+ - Project Name: Any string
+ - Release: Any string
+ - Merge Toaster settings: True or False
+ """
+ self.get(reverse('newproject'))
+ self.wait_until_visible('#new-project-name')
+ self.find("#new-project-name").send_keys(project_name)
+ select = Select(self.find("#projectversion"))
+ select.select_by_value('3')
+
+ # check merge toaster settings
+ checkbox = self.find('.checkbox-mergeattr')
+ if not checkbox.is_selected():
+ checkbox.click()
+
+ if self.PROJECT_NAME != 'TestProjectPage':
+ # Reset project name if it's not the default one
+ self.PROJECT_NAME = 'TestProjectPage'
+
+ self.find("#create-project-button").click()
+
+ try:
+ self.wait_until_visible('#hint-error-project-name')
+ url = reverse('project', args=(TestProjectPage.project_id, ))
+ self.get(url)
+ self.wait_until_visible('#config-nav', poll=3)
+ except TimeoutException:
+ self.wait_until_visible('#config-nav', poll=3)
+
+ def _random_string(self, length):
+ return ''.join(
+ random.choice(string.ascii_letters) for _ in range(length)
+ )
+
+ def _navigate_to_project_page(self):
+ # Navigate to project page
+ if TestProjectPage.project_id is None:
+ self._create_project(project_name=self._random_string(10))
+ current_url = self.driver.current_url
+ TestProjectPage.project_id = get_projectId_from_url(current_url)
+ else:
+ url = reverse('project', args=(TestProjectPage.project_id,))
+ self.get(url)
+ self.wait_until_visible('#config-nav')
+
+ def _get_create_builds(self, **kwargs):
+ """ Create a build and return the build object """
+ # parameters for builds to associate with the projects
+ now = timezone.now()
+ self.project1_build_success = {
+ 'project': Project.objects.get(id=TestProjectPage.project_id),
+ 'started_on': now,
+ 'completed_on': now,
+ 'outcome': Build.SUCCEEDED
+ }
+
+ self.project1_build_failure = {
+ 'project': Project.objects.get(id=TestProjectPage.project_id),
+ 'started_on': now,
+ 'completed_on': now,
+ 'outcome': Build.FAILED
+ }
+ build1 = Build.objects.create(**self.project1_build_success)
+ build2 = Build.objects.create(**self.project1_build_failure)
+
+ # add some targets to these builds so they have recipe links
+ # (and so we can find the row in the ToasterTable corresponding to
+ # a particular build)
+ Target.objects.create(build=build1, target='foo')
+ Target.objects.create(build=build2, target='bar')
+
+ if kwargs:
+ # Create kwargs.get('success') builds with success status with target
+ # and kwargs.get('failure') builds with failure status with target
+ for i in range(kwargs.get('success', 0)):
+ now = timezone.now()
+ self.project1_build_success['started_on'] = now
+ self.project1_build_success[
+ 'completed_on'] = now - timezone.timedelta(days=i)
+ build = Build.objects.create(**self.project1_build_success)
+ Target.objects.create(build=build,
+ target=f'{i}_success_recipe',
+ task=f'{i}_success_task')
+
+ for i in range(kwargs.get('failure', 0)):
+ now = timezone.now()
+ self.project1_build_failure['started_on'] = now
+ self.project1_build_failure[
+ 'completed_on'] = now - timezone.timedelta(days=i)
+ build = Build.objects.create(**self.project1_build_failure)
+ Target.objects.create(build=build,
+ target=f'{i}_fail_recipe',
+ task=f'{i}_fail_task')
+ return build1, build2
+
+ def _mixin_test_table_edit_column(
+ self,
+ table_id,
+ edit_btn_id,
+ list_check_box_id: list
+ ):
+ # Check edit column
+ edit_column = self.find(f'#{edit_btn_id}')
+ self.assertTrue(edit_column.is_displayed())
+ edit_column.click()
+ # Check dropdown is visible
+ self.wait_until_visible('ul.dropdown-menu.editcol')
+ for check_box_id in list_check_box_id:
+ # Check that we can hide/show table column
+ check_box = self.find(f'#{check_box_id}')
+ th_class = str(check_box_id).replace('checkbox-', '')
+ if check_box.is_selected():
+ # check if column is visible in table
+ self.assertTrue(
+ self.find(
+ f'#{table_id} thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
+ )
+ check_box.click()
+ # check if column is hidden in table
+ self.assertFalse(
+ self.find(
+ f'#{table_id} thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
+ )
+ else:
+ # check if column is hidden in table
+ self.assertFalse(
+ self.find(
+ f'#{table_id} thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
+ )
+ check_box.click()
+ # check if column is visible in table
+ self.assertTrue(
+ self.find(
+ f'#{table_id} thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
+ )
+
+ def _get_config_nav_item(self, index):
+ config_nav = self.find('#config-nav')
+ return config_nav.find_elements(By.TAG_NAME, 'li')[index]
+
+ def _navigate_to_config_nav(self, nav_id, nav_index):
+ # navigate to the project page
+ self._navigate_to_project_page()
+ # click on "Software recipe" tab
+ soft_recipe = self._get_config_nav_item(nav_index)
+ soft_recipe.click()
+ self.wait_until_visible(f'#{nav_id}')
+
+ def _mixin_test_table_show_rows(self, table_selector, **kwargs):
+ """ Test the show rows feature in the builds table on the all builds page """
+ def test_show_rows(row_to_show, show_row_link):
+ # Check that we can show rows == row_to_show
+ show_row_link.select_by_value(str(row_to_show))
+ self.wait_until_visible(f'#{table_selector} tbody tr', poll=3)
+ # check at least some rows are visible
+ self.assertTrue(
+ len(self.find_all(f'#{table_selector} tbody tr')) > 0
+ )
+ self.wait_until_present(f'#{table_selector} tbody tr')
+ show_rows = self.driver.find_elements(
+ By.XPATH,
+ f'//select[@class="form-control pagesize-{table_selector}"]'
+ )
+ rows_to_show = [10, 25, 50, 100, 150]
+ to_skip = kwargs.get('to_skip', [])
+ # Check show rows
+ for show_row_link in show_rows:
+ show_row_link = Select(show_row_link)
+ for row_to_show in rows_to_show:
+ if row_to_show not in to_skip:
+ test_show_rows(row_to_show, show_row_link)
+
+ def _mixin_test_table_search_input(self, **kwargs):
+ input_selector, input_text, searchBtn_selector, table_selector, *_ = kwargs.values()
+ # Test search input
+ self.wait_until_visible(f'#{input_selector}')
+ recipe_input = self.find(f'#{input_selector}')
+ recipe_input.send_keys(input_text)
+ self.find(f'#{searchBtn_selector}').click()
+ self.wait_until_visible(f'#{table_selector} tbody tr')
+ rows = self.find_all(f'#{table_selector} tbody tr')
+ self.assertTrue(len(rows) > 0)
+
+ def test_create_project(self):
+ """ Create/Test new project using:
+ - Project Name: Any string
+ - Release: Any string
+ - Merge Toaster settings: True or False
+ """
+ self._create_project(project_name=self.PROJECT_NAME)
+
+ def test_image_recipe_editColumn(self):
+ """ Test the edit column feature in image recipe table on project page """
+ self._get_create_builds(success=10, failure=10)
+
+ url = reverse('projectimagerecipes', args=(TestProjectPage.project_id,))
+ self.get(url)
+ self.wait_until_present('#imagerecipestable tbody tr')
+
+ column_list = [
+ 'get_description_or_summary', 'layer_version__get_vcs_reference',
+ 'layer_version__layer__name', 'license', 'recipe-file', 'section',
+ 'version'
+ ]
+
+ # Check that we can hide the edit column
+ self._mixin_test_table_edit_column(
+ 'imagerecipestable',
+ 'edit-columns-button',
+ [f'checkbox-{column}' for column in column_list]
+ )
+
+ def test_page_header_on_project_page(self):
+ """ Check page header in project page:
+ - AT LEFT -> Logo of Yocto project, displayed, clickable
+ - "Toaster"+" Information icon", displayed, clickable
+ - "Server Icon" + "All builds", displayed, clickable
+ - "Directory Icon" + "All projects", displayed, clickable
+ - "Book Icon" + "Documentation", displayed, clickable
+ - AT RIGHT -> button "New project", displayed, clickable
+ """
+ # navigate to the project page
+ self._navigate_to_project_page()
+
+ # check page header
+ # AT LEFT -> Logo of Yocto project
+ logo = self.driver.find_element(
+ By.XPATH,
+ "//div[@class='toaster-navbar-brand']",
+ )
+ logo_img = logo.find_element(By.TAG_NAME, 'img')
+ self.assertTrue(logo_img.is_displayed(),
+ 'Logo of Yocto project not found')
+ self.assertTrue(
+ '/static/img/logo.png' in str(logo_img.get_attribute('src')),
+ 'Logo of Yocto project not found'
+ )
+ # "Toaster"+" Information icon", clickable
+ toaster = self.driver.find_element(
+ By.XPATH,
+ "//div[@class='toaster-navbar-brand']//a[@class='brand']",
+ )
+ self.assertTrue(toaster.is_displayed(), 'Toaster not found')
+ self.assertTrue(toaster.text == 'Toaster')
+ info_sign = self.find('.glyphicon-info-sign')
+ self.assertTrue(info_sign.is_displayed())
+
+ # "Server Icon" + "All builds"
+ all_builds = self.find('#navbar-all-builds')
+ all_builds_link = all_builds.find_element(By.TAG_NAME, 'a')
+ self.assertTrue("All builds" in all_builds_link.text)
+ self.assertTrue(
+ '/toastergui/builds/' in str(all_builds_link.get_attribute('href'))
+ )
+ server_icon = all_builds.find_element(By.TAG_NAME, 'i')
+ self.assertTrue(
+ server_icon.get_attribute('class') == 'glyphicon glyphicon-tasks'
+ )
+ self.assertTrue(server_icon.is_displayed())
+
+ # "Directory Icon" + "All projects"
+ all_projects = self.find('#navbar-all-projects')
+ all_projects_link = all_projects.find_element(By.TAG_NAME, 'a')
+ self.assertTrue("All projects" in all_projects_link.text)
+ self.assertTrue(
+ '/toastergui/projects/' in str(all_projects_link.get_attribute(
+ 'href'))
+ )
+ dir_icon = all_projects.find_element(By.TAG_NAME, 'i')
+ self.assertTrue(
+ dir_icon.get_attribute('class') == 'icon-folder-open'
+ )
+ self.assertTrue(dir_icon.is_displayed())
+
+ # "Book Icon" + "Documentation"
+ toaster_docs_link = self.find('#navbar-docs')
+ toaster_docs_link_link = toaster_docs_link.find_element(By.TAG_NAME,
+ 'a')
+ self.assertTrue("Documentation" in toaster_docs_link_link.text)
+ self.assertTrue(
+ toaster_docs_link_link.get_attribute('href') == 'http://docs.yoctoproject.org/toaster-manual/index.html#toaster-user-manual'
+ )
+ book_icon = toaster_docs_link.find_element(By.TAG_NAME, 'i')
+ self.assertTrue(
+ book_icon.get_attribute('class') == 'glyphicon glyphicon-book'
+ )
+ self.assertTrue(book_icon.is_displayed())
+
+ # AT RIGHT -> button "New project"
+ new_project_button = self.find('#new-project-button')
+ self.assertTrue(new_project_button.is_displayed())
+ self.assertTrue(new_project_button.text == 'New project')
+ new_project_button.click()
+ self.assertTrue(
+ '/toastergui/newproject/' in str(self.driver.current_url)
+ )
+
+ def test_edit_project_name(self):
+ """ Test edit project name:
+ - Click on "Edit" icon button
+ - Change project name
+ - Click on "Save" button
+ - Check project name is changed
+ """
+ # navigate to the project page
+ self._navigate_to_project_page()
+
+ # click on "Edit" icon button
+ self.wait_until_visible('#project-name-container')
+ edit_button = self.find('#project-change-form-toggle')
+ edit_button.click()
+ project_name_input = self.find('#project-name-change-input')
+ self.assertTrue(project_name_input.is_displayed())
+ project_name_input.clear()
+ project_name_input.send_keys('New Name')
+ self.find('#project-name-change-btn').click()
+
+ # check project name is changed
+ self.wait_until_visible('#project-name-container')
+ self.assertTrue(
+ 'New Name' in str(self.find('#project-name-container').text)
+ )
+
+ def test_project_page_tabs(self):
+ """ Test project tabs:
+ - "configuration" tab
+ - "Builds" tab
+ - "Import layers" tab
+ - "New custom image" tab
+ Check search box used to build recipes
+ """
+ # navigate to the project page
+ self._navigate_to_project_page()
+
+ # check "configuration" tab
+ self.wait_until_visible('#topbar-configuration-tab')
+ config_tab = self.find('#topbar-configuration-tab')
+ self.assertTrue(config_tab.get_attribute('class') == 'active')
+ self.assertTrue('Configuration' in str(config_tab.text))
+ self.assertTrue(
+ f"/toastergui/project/{TestProjectPage.project_id}" in str(self.driver.current_url)
+ )
+
+ def get_tabs():
+ # tabs links list
+ return self.driver.find_elements(
+ By.XPATH,
+ '//div[@id="project-topbar"]//li'
+ )
+
+ def check_tab_link(tab_index, tab_name, url):
+ tab = get_tabs()[tab_index]
+ tab_link = tab.find_element(By.TAG_NAME, 'a')
+ self.assertTrue(url in tab_link.get_attribute('href'))
+ self.assertTrue(tab_name in tab_link.text)
+ self.assertTrue(tab.get_attribute('class') == 'active')
+
+ # check "Builds" tab
+ builds_tab = get_tabs()[1]
+ builds_tab.find_element(By.TAG_NAME, 'a').click()
+ check_tab_link(
+ 1,
+ 'Builds',
+ f"/toastergui/project/{TestProjectPage.project_id}/builds"
+ )
+
+ # check "Import layers" tab
+ import_layers_tab = get_tabs()[2]
+ import_layers_tab.find_element(By.TAG_NAME, 'a').click()
+ check_tab_link(
+ 2,
+ 'Import layer',
+ f"/toastergui/project/{TestProjectPage.project_id}/importlayer"
+ )
+
+ # check "New custom image" tab
+ new_custom_image_tab = get_tabs()[3]
+ new_custom_image_tab.find_element(By.TAG_NAME, 'a').click()
+ check_tab_link(
+ 3,
+ 'New custom image',
+ f"/toastergui/project/{TestProjectPage.project_id}/newcustomimage"
+ )
+
+ # check search box can be use to build recipes
+ search_box = self.find('#build-input')
+ search_box.send_keys('core-image-minimal')
+ self.find('#build-button').click()
+ self.wait_until_visible('#latest-builds')
+ lastest_builds = self.driver.find_elements(
+ By.XPATH,
+ '//div[@id="latest-builds"]',
+ )
+ last_build = lastest_builds[0]
+ self.assertTrue(
+ 'core-image-minimal' in str(last_build.text)
+ )
+
+ def test_softwareRecipe_page(self):
+ """ Test software recipe page
+ - Check title "Compatible software recipes" is displayed
+ - Check search input
+ - Check "build recipe" button works
+ - Check software recipe table feature(show/hide column, pagination)
+ """
+ self._navigate_to_config_nav('softwarerecipestable', 4)
+ # check title "Compatible software recipes" is displayed
+ self.assertTrue("Compatible software recipes" in self.get_page_source())
+ # Test search input
+ self._mixin_test_table_search_input(
+ input_selector='search-input-softwarerecipestable',
+ input_text='busybox',
+ searchBtn_selector='search-submit-softwarerecipestable',
+ table_selector='softwarerecipestable'
+ )
+ # check "build recipe" button works
+ rows = self.find_all('#softwarerecipestable tbody tr')
+ image_to_build = rows[0]
+ build_btn = image_to_build.find_element(
+ By.XPATH,
+ '//td[@class="add-del-layers"]//a[1]'
+ )
+ build_btn.click()
+ build_state = wait_until_build(self, 'queued cloning starting parsing failed')
+ lastest_builds = self.driver.find_elements(
+ By.XPATH,
+ '//div[@id="latest-builds"]/div'
+ )
+ self.assertTrue(len(lastest_builds) > 0)
+ last_build = lastest_builds[0]
+ cancel_button = last_build.find_element(
+ By.XPATH,
+ '//span[@class="cancel-build-btn pull-right alert-link"]',
+ )
+ cancel_button.click()
+ if 'starting' not in build_state: # change build state when cancelled in starting state
+ wait_until_build_cancelled(self)
+
+ # check software recipe table feature(show/hide column, pagination)
+ self._navigate_to_config_nav('softwarerecipestable', 4)
+ column_list = [
+ 'get_description_or_summary',
+ 'layer_version__get_vcs_reference',
+ 'layer_version__layer__name',
+ 'license',
+ 'recipe-file',
+ 'section',
+ 'version',
+ ]
+ self._mixin_test_table_edit_column(
+ 'softwarerecipestable',
+ 'edit-columns-button',
+ [f'checkbox-{column}' for column in column_list]
+ )
+ self._navigate_to_config_nav('softwarerecipestable', 4)
+ # check show rows(pagination)
+ self._mixin_test_table_show_rows(
+ table_selector='softwarerecipestable',
+ to_skip=[150],
+ )
+
+ def test_machines_page(self):
+ """ Test Machine page
+ - Check if title "Compatible machines" is displayed
+ - Check search input
+ - Check "Select machine" button works
+ - Check "Add layer" button works
+ - Check Machine table feature(show/hide column, pagination)
+ """
+ self._navigate_to_config_nav('machinestable', 5)
+ # check title "Compatible software recipes" is displayed
+ self.assertTrue("Compatible machines" in self.get_page_source())
+ # Test search input
+ self._mixin_test_table_search_input(
+ input_selector='search-input-machinestable',
+ input_text='qemux86-64',
+ searchBtn_selector='search-submit-machinestable',
+ table_selector='machinestable'
+ )
+ # check "Select machine" button works
+ rows = self.find_all('#machinestable tbody tr')
+ machine_to_select = rows[0]
+ select_btn = machine_to_select.find_element(
+ By.XPATH,
+ '//td[@class="add-del-layers"]//a[1]'
+ )
+ select_btn.send_keys(Keys.RETURN)
+ self.wait_until_visible('#config-nav')
+ project_machine_name = self.find('#project-machine-name')
+ self.assertTrue(
+ 'qemux86-64' in project_machine_name.text
+ )
+ # check "Add layer" button works
+ self._navigate_to_config_nav('machinestable', 5)
+ # Search for a machine whit layer not in project
+ self._mixin_test_table_search_input(
+ input_selector='search-input-machinestable',
+ input_text='qemux86-64-tpm2',
+ searchBtn_selector='search-submit-machinestable',
+ table_selector='machinestable'
+ )
+ self.wait_until_visible('#machinestable tbody tr', poll=3)
+ rows = self.find_all('#machinestable tbody tr')
+ machine_to_add = rows[0]
+ add_btn = machine_to_add.find_element(By.XPATH, '//td[@class="add-del-layers"]')
+ add_btn.click()
+ self.wait_until_visible('#change-notification')
+ change_notification = self.find('#change-notification')
+ self.assertTrue(
+ f'You have added 1 layer to your project' in str(change_notification.text)
+ )
+ # check Machine table feature(show/hide column, pagination)
+ self._navigate_to_config_nav('machinestable', 5)
+ column_list = [
+ 'description',
+ 'layer_version__get_vcs_reference',
+ 'layer_version__layer__name',
+ 'machinefile',
+ ]
+ self._mixin_test_table_edit_column(
+ 'machinestable',
+ 'edit-columns-button',
+ [f'checkbox-{column}' for column in column_list]
+ )
+ self._navigate_to_config_nav('machinestable', 5)
+ # check show rows(pagination)
+ self._mixin_test_table_show_rows(
+ table_selector='machinestable',
+ to_skip=[150],
+ )
+
+ def test_layers_page(self):
+ """ Test layers page
+ - Check if title "Compatible layerss" is displayed
+ - Check search input
+ - Check "Add layer" button works
+ - Check "Remove layer" button works
+ - Check layers table feature(show/hide column, pagination)
+ """
+ self._navigate_to_config_nav('layerstable', 6)
+ # check title "Compatible layers" is displayed
+ self.assertTrue("Compatible layers" in self.get_page_source())
+ # Test search input
+ input_text='meta-tanowrt'
+ self._mixin_test_table_search_input(
+ input_selector='search-input-layerstable',
+ input_text=input_text,
+ searchBtn_selector='search-submit-layerstable',
+ table_selector='layerstable'
+ )
+ # check "Add layer" button works
+ self.wait_until_visible('#layerstable tbody tr', poll=3)
+ rows = self.find_all('#layerstable tbody tr')
+ layer_to_add = rows[0]
+ add_btn = layer_to_add.find_element(
+ By.XPATH,
+ '//td[@class="add-del-layers"]'
+ )
+ add_btn.click()
+ # check modal is displayed
+ self.wait_until_visible('#dependencies-modal', poll=3)
+ list_dependencies = self.find_all('#dependencies-list li')
+ # click on add-layers button
+ add_layers_btn = self.driver.find_element(
+ By.XPATH,
+ '//form[@id="dependencies-modal-form"]//button[@class="btn btn-primary"]'
+ )
+ add_layers_btn.click()
+ self.wait_until_visible('#change-notification')
+ change_notification = self.find('#change-notification')
+ self.assertTrue(
+ f'You have added {len(list_dependencies)+1} layers to your project: {input_text} and its dependencies' in str(change_notification.text)
+ )
+ # check "Remove layer" button works
+ self.wait_until_visible('#layerstable tbody tr', poll=3)
+ rows = self.find_all('#layerstable tbody tr')
+ layer_to_remove = rows[0]
+ remove_btn = layer_to_remove.find_element(
+ By.XPATH,
+ '//td[@class="add-del-layers"]'
+ )
+ remove_btn.click()
+ self.wait_until_visible('#change-notification', poll=2)
+ change_notification = self.find('#change-notification')
+ self.assertTrue(
+ f'You have removed 1 layer from your project: {input_text}' in str(change_notification.text)
+ )
+ # check layers table feature(show/hide column, pagination)
+ self._navigate_to_config_nav('layerstable', 6)
+ column_list = [
+ 'dependencies',
+ 'revision',
+ 'layer__vcs_url',
+ 'git_subdir',
+ 'layer__summary',
+ ]
+ self._mixin_test_table_edit_column(
+ 'layerstable',
+ 'edit-columns-button',
+ [f'checkbox-{column}' for column in column_list]
+ )
+ self._navigate_to_config_nav('layerstable', 6)
+ # check show rows(pagination)
+ self._mixin_test_table_show_rows(
+ table_selector='layerstable',
+ to_skip=[150],
+ )
+
+ def test_distro_page(self):
+ """ Test distros page
+ - Check if title "Compatible distros" is displayed
+ - Check search input
+ - Check "Add layer" button works
+ - Check distro table feature(show/hide column, pagination)
+ """
+ self._navigate_to_config_nav('distrostable', 7)
+ # check title "Compatible distros" is displayed
+ self.assertTrue("Compatible Distros" in self.get_page_source())
+ # Test search input
+ input_text='poky-altcfg'
+ self._mixin_test_table_search_input(
+ input_selector='search-input-distrostable',
+ input_text=input_text,
+ searchBtn_selector='search-submit-distrostable',
+ table_selector='distrostable'
+ )
+ # check "Add distro" button works
+ rows = self.find_all('#distrostable tbody tr')
+ distro_to_add = rows[0]
+ add_btn = distro_to_add.find_element(
+ By.XPATH,
+ '//td[@class="add-del-layers"]//a[1]'
+ )
+ add_btn.click()
+ self.wait_until_visible('#change-notification', poll=2)
+ change_notification = self.find('#change-notification')
+ self.assertTrue(
+ f'You have changed the distro to: {input_text}' in str(change_notification.text)
+ )
+ # check distro table feature(show/hide column, pagination)
+ self._navigate_to_config_nav('distrostable', 7)
+ column_list = [
+ 'description',
+ 'templatefile',
+ 'layer_version__get_vcs_reference',
+ 'layer_version__layer__name',
+ ]
+ self._mixin_test_table_edit_column(
+ 'distrostable',
+ 'edit-columns-button',
+ [f'checkbox-{column}' for column in column_list]
+ )
+ self._navigate_to_config_nav('distrostable', 7)
+ # check show rows(pagination)
+ self._mixin_test_table_show_rows(
+ table_selector='distrostable',
+ to_skip=[150],
+ )
+
+ def test_single_layer_page(self):
+ """ Test layer page
+ - Check if title is displayed
+ - Check add/remove layer button works
+ - Check tabs(layers, recipes, machines) are displayed
+ - Check left section is displayed
+ - Check layer name
+ - Check layer summary
+ - Check layer description
+ """
+ url = reverse("layerdetails", args=(TestProjectPage.project_id, 8))
+ self.get(url)
+ self.wait_until_visible('.page-header')
+ # check title is displayed
+ self.assertTrue(self.find('.page-header h1').is_displayed())
+
+ # check add layer button works
+ remove_layer_btn = self.find('#add-remove-layer-btn')
+ remove_layer_btn.click()
+ self.wait_until_visible('#change-notification', poll=2)
+ change_notification = self.find('#change-notification')
+ self.assertTrue(
+ f'You have removed 1 layer from your project' in str(change_notification.text)
+ )
+ # check add layer button works, 18 is the random layer id
+ add_layer_btn = self.find('#add-remove-layer-btn')
+ add_layer_btn.click()
+ self.wait_until_visible('#change-notification')
+ change_notification = self.find('#change-notification')
+ self.assertTrue(
+ f'You have added 1 layer to your project' in str(change_notification.text)
+ )
+ # check tabs(layers, recipes, machines) are displayed
+ tabs = self.find_all('.nav-tabs li')
+ self.assertEqual(len(tabs), 3)
+ # Check first tab
+ tabs[0].click()
+ self.assertTrue(
+ 'active' in str(self.find('#information').get_attribute('class'))
+ )
+ # Check second tab
+ tabs[1].click()
+ self.assertTrue(
+ 'active' in str(self.find('#recipes').get_attribute('class'))
+ )
+ # Check third tab
+ tabs[2].click()
+ self.assertTrue(
+ 'active' in str(self.find('#machines').get_attribute('class'))
+ )
+ # Check left section is displayed
+ section = self.find('.well')
+ # Check layer name
+ self.assertTrue(
+ section.find_element(By.XPATH, '//h2[1]').is_displayed()
+ )
+ # Check layer summary
+ self.assertTrue("Summary" in section.text)
+ # Check layer description
+ self.assertTrue("Description" in section.text)
+
+ def test_single_recipe_page(self):
+ """ Test recipe page
+ - Check if title is displayed
+ - Check add recipe layer displayed
+ - Check left section is displayed
+ - Check recipe: name, summary, description, Version, Section,
+ License, Approx. packages included, Approx. size, Recipe file
+ """
+ url = reverse("recipedetails", args=(TestProjectPage.project_id, 53428))
+ self.get(url)
+ self.wait_until_visible('.page-header')
+ # check title is displayed
+ self.assertTrue(self.find('.page-header h1').is_displayed())
+ # check add recipe layer displayed
+ add_recipe_layer_btn = self.find('#add-layer-btn')
+ self.assertTrue(add_recipe_layer_btn.is_displayed())
+ # check left section is displayed
+ section = self.find('.well')
+ # Check recipe name
+ self.assertTrue(
+ section.find_element(By.XPATH, '//h2[1]').is_displayed()
+ )
+ # Check recipe sections details info are displayed
+ self.assertTrue("Summary" in section.text)
+ self.assertTrue("Description" in section.text)
+ self.assertTrue("Version" in section.text)
+ self.assertTrue("Section" in section.text)
+ self.assertTrue("License" in section.text)
+ self.assertTrue("Approx. packages included" in section.text)
+ self.assertTrue("Approx. package size" in section.text)
+ self.assertTrue("Recipe file" in section.text)
diff --git a/lib/toaster/tests/functional/test_project_page_tab_config.py b/lib/toaster/tests/functional/test_project_page_tab_config.py
new file mode 100644
index 000000000..eb905ddf3
--- /dev/null
+++ b/lib/toaster/tests/functional/test_project_page_tab_config.py
@@ -0,0 +1,528 @@
+#! /usr/bin/env python3 #
+# BitBake Toaster UI tests implementation
+#
+# Copyright (C) 2023 Savoir-faire Linux
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import string
+import random
+import pytest
+from django.urls import reverse
+from selenium.webdriver import Keys
+from selenium.webdriver.support.select import Select
+from selenium.common.exceptions import ElementClickInterceptedException, NoSuchElementException, TimeoutException
+from orm.models import Project
+from tests.functional.functional_helpers import SeleniumFunctionalTestCase
+from selenium.webdriver.common.by import By
+
+from .utils import get_projectId_from_url, wait_until_build, wait_until_build_cancelled
+
+
+@pytest.mark.django_db
+@pytest.mark.order("last")
+class TestProjectConfigTab(SeleniumFunctionalTestCase):
+ PROJECT_NAME = 'TestProjectConfigTab'
+ project_id = None
+
+ def _create_project(self, project_name, **kwargs):
+ """ Create/Test new project using:
+ - Project Name: Any string
+ - Release: Any string
+ - Merge Toaster settings: True or False
+ """
+ release = kwargs.get('release', '3')
+ self.get(reverse('newproject'))
+ self.wait_until_visible('#new-project-name')
+ self.find("#new-project-name").send_keys(project_name)
+ select = Select(self.find("#projectversion"))
+ select.select_by_value(release)
+
+ # check merge toaster settings
+ checkbox = self.find('.checkbox-mergeattr')
+ if not checkbox.is_selected():
+ checkbox.click()
+
+ if self.PROJECT_NAME != 'TestProjectConfigTab':
+ # Reset project name if it's not the default one
+ self.PROJECT_NAME = 'TestProjectConfigTab'
+
+ self.find("#create-project-button").click()
+
+ try:
+ self.wait_until_visible('#hint-error-project-name', poll=3)
+ url = reverse('project', args=(TestProjectConfigTab.project_id, ))
+ self.get(url)
+ self.wait_until_visible('#config-nav', poll=3)
+ except TimeoutException:
+ self.wait_until_visible('#config-nav', poll=3)
+
+ def _random_string(self, length):
+ return ''.join(
+ random.choice(string.ascii_letters) for _ in range(length)
+ )
+
+ def _navigate_to_project_page(self):
+ # Navigate to project page
+ if TestProjectConfigTab.project_id is None:
+ self._create_project(project_name=self._random_string(10))
+ current_url = self.driver.current_url
+ TestProjectConfigTab.project_id = get_projectId_from_url(
+ current_url)
+ else:
+ url = reverse('project', args=(TestProjectConfigTab.project_id,))
+ self.get(url)
+ self.wait_until_visible('#config-nav')
+
+ def _create_builds(self):
+ # check search box can be use to build recipes
+ search_box = self.find('#build-input')
+ search_box.send_keys('foo')
+ self.find('#build-button').click()
+ self.wait_until_present('#latest-builds')
+ # loop until reach the parsing state
+ wait_until_build(self, 'queued cloning starting parsing failed')
+ lastest_builds = self.driver.find_elements(
+ By.XPATH,
+ '//div[@id="latest-builds"]/div',
+ )
+ last_build = lastest_builds[0]
+ self.assertTrue(
+ 'foo' in str(last_build.text)
+ )
+ last_build = lastest_builds[0]
+ try:
+ cancel_button = last_build.find_element(
+ By.XPATH,
+ '//span[@class="cancel-build-btn pull-right alert-link"]',
+ )
+ cancel_button.click()
+ except NoSuchElementException:
+ # Skip if the build is already cancelled
+ pass
+ wait_until_build_cancelled(self)
+
+ def _get_tabs(self):
+ # tabs links list
+ return self.driver.find_elements(
+ By.XPATH,
+ '//div[@id="project-topbar"]//li'
+ )
+
+ def _get_config_nav_item(self, index):
+ config_nav = self.find('#config-nav')
+ return config_nav.find_elements(By.TAG_NAME, 'li')[index]
+
+ def test_project_config_nav(self):
+ """ Test project config tab navigation:
+ - Check if the menu is displayed and contains the right elements:
+ - Configuration
+ - COMPATIBLE METADATA
+ - Custom images
+ - Image recipes
+ - Software recipes
+ - Machines
+ - Layers
+ - Distro
+ - EXTRA CONFIGURATION
+ - Bitbake variables
+ - Actions
+ - Delete project
+ """
+ self._navigate_to_project_page()
+
+ def _get_config_nav_item(index):
+ config_nav = self.find('#config-nav')
+ return config_nav.find_elements(By.TAG_NAME, 'li')[index]
+
+ def check_config_nav_item(index, item_name, url):
+ item = _get_config_nav_item(index)
+ self.assertTrue(item_name in item.text)
+ self.assertTrue(item.get_attribute('class') == 'active')
+ self.assertTrue(url in self.driver.current_url)
+
+ # check if the menu contains the right elements
+ # COMPATIBLE METADATA
+ compatible_metadata = _get_config_nav_item(1)
+ self.assertTrue(
+ "compatible metadata" in compatible_metadata.text.lower()
+ )
+ # EXTRA CONFIGURATION
+ extra_configuration = _get_config_nav_item(8)
+ self.assertTrue(
+ "extra configuration" in extra_configuration.text.lower()
+ )
+ # Actions
+ actions = _get_config_nav_item(10)
+ self.assertTrue("actions" in str(actions.text).lower())
+
+ conf_nav_list = [
+ # config
+ [0, 'Configuration',
+ f"/toastergui/project/{TestProjectConfigTab.project_id}"],
+ # custom images
+ [2, 'Custom images',
+ f"/toastergui/project/{TestProjectConfigTab.project_id}/customimages"],
+ # image recipes
+ [3, 'Image recipes',
+ f"/toastergui/project/{TestProjectConfigTab.project_id}/images"],
+ # software recipes
+ [4, 'Software recipes',
+ f"/toastergui/project/{TestProjectConfigTab.project_id}/softwarerecipes"],
+ # machines
+ [5, 'Machines',
+ f"/toastergui/project/{TestProjectConfigTab.project_id}/machines"],
+ # layers
+ [6, 'Layers',
+ f"/toastergui/project/{TestProjectConfigTab.project_id}/layers"],
+ # distro
+ [7, 'Distros',
+ f"/toastergui/project/{TestProjectConfigTab.project_id}/distros"],
+ # [9, 'BitBake variables', f"/toastergui/project/{TestProjectConfigTab.project_id}/configuration"], # bitbake variables
+ ]
+ for index, item_name, url in conf_nav_list:
+ item = _get_config_nav_item(index)
+ if item.get_attribute('class') != 'active':
+ item.click()
+ check_config_nav_item(index, item_name, url)
+
+ def test_image_recipe_editColumn(self):
+ """ Test the edit column feature in image recipe table on project page """
+ def test_edit_column(check_box_id):
+ # Check that we can hide/show table column
+ check_box = self.find(f'#{check_box_id}')
+ th_class = str(check_box_id).replace('checkbox-', '')
+ if check_box.is_selected():
+ # check if column is visible in table
+ self.assertTrue(
+ self.find(
+ f'#imagerecipestable thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
+ )
+ check_box.click()
+ # check if column is hidden in table
+ self.assertFalse(
+ self.find(
+ f'#imagerecipestable thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
+ )
+ else:
+ # check if column is hidden in table
+ self.assertFalse(
+ self.find(
+ f'#imagerecipestable thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is unchecked in EditColumn dropdown, but it's visible in table"
+ )
+ check_box.click()
+ # check if column is visible in table
+ self.assertTrue(
+ self.find(
+ f'#imagerecipestable thead th.{th_class}'
+ ).is_displayed(),
+ f"The {th_class} column is checked in EditColumn dropdown, but it's not visible in table"
+ )
+
+ self._navigate_to_project_page()
+ # navigate to project image recipe page
+ recipe_image_page_link = self._get_config_nav_item(3)
+ recipe_image_page_link.click()
+ self.wait_until_present('#imagerecipestable tbody tr')
+
+ # Check edit column
+ edit_column = self.find('#edit-columns-button')
+ self.assertTrue(edit_column.is_displayed())
+ edit_column.click()
+ # Check dropdown is visible
+ self.wait_until_visible('ul.dropdown-menu.editcol')
+
+ # Check that we can hide the edit column
+ test_edit_column('checkbox-get_description_or_summary')
+ test_edit_column('checkbox-layer_version__get_vcs_reference')
+ test_edit_column('checkbox-layer_version__layer__name')
+ test_edit_column('checkbox-license')
+ test_edit_column('checkbox-recipe-file')
+ test_edit_column('checkbox-section')
+ test_edit_column('checkbox-version')
+
+ def test_image_recipe_show_rows(self):
+ """ Test the show rows feature in image recipe table on project page """
+ def test_show_rows(row_to_show, show_row_link):
+ # Check that we can show rows == row_to_show
+ show_row_link.select_by_value(str(row_to_show))
+ self.wait_until_visible('#imagerecipestable tbody tr', poll=3)
+ # check at least some rows are visible
+ self.assertTrue(
+ len(self.find_all('#imagerecipestable tbody tr')) > 0
+ )
+
+ self._navigate_to_project_page()
+ # navigate to project image recipe page
+ recipe_image_page_link = self._get_config_nav_item(3)
+ recipe_image_page_link.click()
+ self.wait_until_present('#imagerecipestable tbody tr')
+
+ show_rows = self.driver.find_elements(
+ By.XPATH,
+ '//select[@class="form-control pagesize-imagerecipestable"]'
+ )
+ # Check show rows
+ for show_row_link in show_rows:
+ show_row_link = Select(show_row_link)
+ test_show_rows(10, show_row_link)
+ test_show_rows(25, show_row_link)
+ test_show_rows(50, show_row_link)
+ test_show_rows(100, show_row_link)
+ test_show_rows(150, show_row_link)
+
+ def test_project_config_tab_right_section(self):
+ """ Test project config tab right section contains five blocks:
+ - Machine:
+ - check 'Machine' is displayed
+ - check can change Machine
+ - Distro:
+ - check 'Distro' is displayed
+ - check can change Distro
+ - Most built recipes:
+ - check 'Most built recipes' is displayed
+ - check can select a recipe and build it
+ - Project release:
+ - check 'Project release' is displayed
+ - check project has right release displayed
+ - Layers:
+ - check can add a layer if exists
+ - check at least three layers are displayed
+ - openembedded-core
+ - meta-poky
+ - meta-yocto-bsp
+ """
+ # Create a new project for this test
+ project_name = self._random_string(10)
+ self._create_project(project_name=project_name)
+ # check if the menu is displayed
+ self.wait_until_visible('#project-page')
+ block_l = self.driver.find_element(
+ By.XPATH, '//*[@id="project-page"]/div[2]')
+ project_release = self.driver.find_element(
+ By.XPATH, '//*[@id="project-page"]/div[1]/div[4]')
+ layers = block_l.find_element(By.ID, 'layer-container')
+
+ def check_machine_distro(self, item_name, new_item_name, block_id):
+ block = self.find(f'#{block_id}')
+ title = block.find_element(By.TAG_NAME, 'h3')
+ self.assertTrue(item_name.capitalize() in title.text)
+ edit_btn = self.find(f'#change-{item_name}-toggle')
+ edit_btn.click()
+ self.wait_until_visible(f'#{item_name}-change-input')
+ name_input = self.find(f'#{item_name}-change-input')
+ name_input.clear()
+ name_input.send_keys(new_item_name)
+ change_btn = self.find(f'#{item_name}-change-btn')
+ change_btn.click()
+ self.wait_until_visible(f'#project-{item_name}-name')
+ project_name = self.find(f'#project-{item_name}-name')
+ self.assertTrue(new_item_name in project_name.text)
+ # check change notificaiton is displayed
+ change_notification = self.find('#change-notification')
+ self.assertTrue(
+ f'You have changed the {item_name} to: {new_item_name}' in change_notification.text
+ )
+
+ # Machine
+ check_machine_distro(self, 'machine', 'qemux86-64', 'machine-section')
+ # Distro
+ check_machine_distro(self, 'distro', 'poky-altcfg', 'distro-section')
+
+ # Project release
+ title = project_release.find_element(By.TAG_NAME, 'h3')
+ self.assertTrue("Project release" in title.text)
+ self.assertTrue(
+ "Yocto Project master" in self.find('#project-release-title').text
+ )
+ # Layers
+ title = layers.find_element(By.TAG_NAME, 'h3')
+ self.assertTrue("Layers" in title.text)
+ # check at least three layers are displayed
+ # openembedded-core
+ # meta-poky
+ # meta-yocto-bsp
+ layers_list = layers.find_element(By.ID, 'layers-in-project-list')
+ layers_list_items = layers_list.find_elements(By.TAG_NAME, 'li')
+ # remove all layers except the first three layers
+ for i in range(3, len(layers_list_items)):
+ layers_list_items[i].find_element(By.TAG_NAME, 'span').click()
+ # check can add a layer if exists
+ add_layer_input = layers.find_element(By.ID, 'layer-add-input')
+ add_layer_input.send_keys('meta-oe')
+ self.wait_until_visible('#layer-container > form > div > span > div')
+ dropdown_item = self.driver.find_element(
+ By.XPATH,
+ '//*[@id="layer-container"]/form/div/span/div'
+ )
+ try:
+ dropdown_item.click()
+ except ElementClickInterceptedException:
+ self.skipTest(
+ "layer-container dropdown item click intercepted. Element not properly visible.")
+ add_layer_btn = layers.find_element(By.ID, 'add-layer-btn')
+ add_layer_btn.click()
+ self.wait_until_visible('#layers-in-project-list')
+ # check layer is added
+ layers_list_items = layers_list.find_elements(By.TAG_NAME, 'li')
+ self.assertTrue(len(layers_list_items) == 4)
+
+ def test_most_build_recipes(self):
+ """ Test most build recipes block contains"""
+ def rebuild_from_most_build_recipes(recipe_list_items):
+ checkbox = recipe_list_items[0].find_element(By.TAG_NAME, 'input')
+ checkbox.click()
+ build_btn = self.find('#freq-build-btn')
+ build_btn.click()
+ self.wait_until_visible('#latest-builds')
+ wait_until_build(self, 'queued cloning starting parsing failed')
+ lastest_builds = self.driver.find_elements(
+ By.XPATH,
+ '//div[@id="latest-builds"]/div'
+ )
+ self.assertTrue(len(lastest_builds) >= 2)
+ last_build = lastest_builds[0]
+ try:
+ cancel_button = last_build.find_element(
+ By.XPATH,
+ '//span[@class="cancel-build-btn pull-right alert-link"]',
+ )
+ cancel_button.click()
+ except NoSuchElementException:
+ # Skip if the build is already cancelled
+ pass
+ wait_until_build_cancelled(self)
+ # Create a new project for remaining asserts
+ project_name = self._random_string(10)
+ self._create_project(project_name=project_name, release='2')
+ current_url = self.driver.current_url
+ TestProjectConfigTab.project_id = get_projectId_from_url(current_url)
+ url = current_url.split('?')[0]
+
+ # Create a new builds
+ self._create_builds()
+
+ # back to project page
+ self.driver.get(url)
+
+ self.wait_until_visible('#project-page', poll=3)
+
+ # Most built recipes
+ most_built_recipes = self.driver.find_element(
+ By.XPATH, '//*[@id="project-page"]/div[1]/div[3]')
+ title = most_built_recipes.find_element(By.TAG_NAME, 'h3')
+ self.assertTrue("Most built recipes" in title.text)
+ # check can select a recipe and build it
+ self.wait_until_visible('#freq-build-list', poll=3)
+ recipe_list = self.find('#freq-build-list')
+ recipe_list_items = recipe_list.find_elements(By.TAG_NAME, 'li')
+ self.assertTrue(
+ len(recipe_list_items) > 0,
+ msg="Any recipes found in the most built recipes list",
+ )
+ rebuild_from_most_build_recipes(recipe_list_items)
+ TestProjectConfigTab.project_id = None # reset project id
+
+ def test_project_page_tab_importlayer(self):
+ """ Test project page tab import layer """
+ self._navigate_to_project_page()
+ # navigate to "Import layers" tab
+ import_layers_tab = self._get_tabs()[2]
+ import_layers_tab.find_element(By.TAG_NAME, 'a').click()
+ self.wait_until_visible('#layer-git-repo-url')
+
+ # Check git repo radio button
+ git_repo_radio = self.find('#git-repo-radio')
+ git_repo_radio.click()
+
+ # Set git repo url
+ input_repo_url = self.find('#layer-git-repo-url')
+ input_repo_url.send_keys('git://git.yoctoproject.org/meta-fake')
+ # Blur the input to trigger the validation
+ input_repo_url.send_keys(Keys.TAB)
+
+ # Check name is set
+ input_layer_name = self.find('#import-layer-name')
+ self.assertTrue(input_layer_name.get_attribute('value') == 'meta-fake')
+
+ # Set branch
+ input_branch = self.find('#layer-git-ref')
+ input_branch.send_keys('master')
+
+ # Import layer
+ self.find('#import-and-add-btn').click()
+
+ # Check layer is added
+ self.wait_until_visible('#layer-container')
+ block_l = self.driver.find_element(
+ By.XPATH, '//*[@id="project-page"]/div[2]')
+ layers = block_l.find_element(By.ID, 'layer-container')
+ layers_list = layers.find_element(By.ID, 'layers-in-project-list')
+ layers_list_items = layers_list.find_elements(By.TAG_NAME, 'li')
+ self.assertTrue(
+ 'meta-fake' in str(layers_list_items[-1].text)
+ )
+
+ def test_project_page_custom_image_no_image(self):
+ """ Test project page tab "New custom image" when no custom image """
+ project_name = self._random_string(10)
+ self._create_project(project_name=project_name)
+ current_url = self.driver.current_url
+ TestProjectConfigTab.project_id = get_projectId_from_url(current_url)
+ # navigate to "Custom image" tab
+ custom_image_section = self._get_config_nav_item(2)
+ custom_image_section.click()
+ self.wait_until_visible('#empty-state-customimagestable')
+
+ # Check message when no custom image
+ self.assertTrue(
+ "You have not created any custom images yet." in str(
+ self.find('#empty-state-customimagestable').text
+ )
+ )
+ div_empty_msg = self.find('#empty-state-customimagestable')
+ link_create_custom_image = div_empty_msg.find_element(
+ By.TAG_NAME, 'a')
+ self.assertTrue(TestProjectConfigTab.project_id is not None)
+ self.assertTrue(
+ f"/toastergui/project/{TestProjectConfigTab.project_id}/newcustomimage" in str(
+ link_create_custom_image.get_attribute('href')
+ )
+ )
+ self.assertTrue(
+ "Create your first custom image" in str(
+ link_create_custom_image.text
+ )
+ )
+ TestProjectConfigTab.project_id = None # reset project id
+
+ def test_project_page_image_recipe(self):
+ """ Test project page section images
+ - Check image recipes are displayed
+ - Check search input
+ - Check image recipe build button works
+ - Check image recipe table features(show/hide column, pagination)
+ """
+ self._navigate_to_project_page()
+ # navigate to "Images section"
+ images_section = self._get_config_nav_item(3)
+ images_section.click()
+ self.wait_until_visible('#imagerecipestable')
+ rows = self.find_all('#imagerecipestable tbody tr')
+ self.assertTrue(len(rows) > 0)
+
+ # Test search input
+ self.wait_until_visible('#search-input-imagerecipestable')
+ recipe_input = self.find('#search-input-imagerecipestable')
+ recipe_input.send_keys('core-image-minimal')
+ self.find('#search-submit-imagerecipestable').click()
+ self.wait_until_visible('#imagerecipestable tbody tr')
+ rows = self.find_all('#imagerecipestable tbody tr')
+ self.assertTrue(len(rows) > 0)
diff --git a/lib/toaster/tests/functional/utils.py b/lib/toaster/tests/functional/utils.py
new file mode 100644
index 000000000..7269fa180
--- /dev/null
+++ b/lib/toaster/tests/functional/utils.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# BitBake Toaster UI tests implementation
+#
+# Copyright (C) 2023 Savoir-faire Linux
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+
+from time import sleep
+from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException, TimeoutException
+from selenium.webdriver.common.by import By
+
+from orm.models import Build
+
+
+def wait_until_build(test_instance, state):
+ timeout = 60
+ start_time = 0
+ build_state = ''
+ while True:
+ try:
+ if start_time > timeout:
+ raise TimeoutException(
+ f'Build did not reach {state} state within {timeout} seconds'
+ )
+ last_build_state = test_instance.driver.find_element(
+ By.XPATH,
+ '//*[@id="latest-builds"]/div[1]//div[@class="build-state"]',
+ )
+ build_state = last_build_state.get_attribute(
+ 'data-build-state')
+ state_text = state.lower().split()
+ if any(x in str(build_state).lower() for x in state_text):
+ return str(build_state).lower()
+ if 'failed' in str(build_state).lower():
+ break
+ except NoSuchElementException:
+ continue
+ except TimeoutException:
+ break
+ start_time += 1
+ sleep(1) # take a breath and try again
+
+def wait_until_build_cancelled(test_instance):
+ """ Cancel build take a while sometime, the method is to wait driver action
+ until build being cancelled
+ """
+ timeout = 30
+ start_time = 0
+ build = None
+ while True:
+ try:
+ if start_time > timeout:
+ raise TimeoutException(
+ f'Build did not reach cancelled state within {timeout} seconds'
+ )
+ last_build_state = test_instance.driver.find_element(
+ By.XPATH,
+ '//*[@id="latest-builds"]/div[1]//div[@class="build-state"]',
+ )
+ build_state = last_build_state.get_attribute(
+ 'data-build-state')
+ if 'failed' in str(build_state).lower():
+ break
+ if 'cancelling' in str(build_state).lower():
+ # Change build state to cancelled
+ if not build: # get build object only once
+ build = Build.objects.last()
+ build.outcome = Build.CANCELLED
+ build.save()
+ if 'cancelled' in str(build_state).lower():
+ break
+ except NoSuchElementException:
+ continue
+ except StaleElementReferenceException:
+ continue
+ except TimeoutException:
+ break
+ start_time += 1
+ sleep(1) # take a breath and try again
+
+def get_projectId_from_url(url):
+ # url = 'http://domainename.com/toastergui/project/1656/whatever
+ # or url = 'http://domainename.com/toastergui/project/1/
+ # or url = 'http://domainename.com/toastergui/project/186
+ assert '/toastergui/project/' in url, "URL is not valid"
+ url_to_list = url.split('/toastergui/project/')
+ return int(url_to_list[1].split('/')[0]) # project_id
diff --git a/lib/toaster/tests/toaster-tests-requirements.txt b/lib/toaster/tests/toaster-tests-requirements.txt
index 4f9fcc46d..71cc08343 100644
--- a/lib/toaster/tests/toaster-tests-requirements.txt
+++ b/lib/toaster/tests/toaster-tests-requirements.txt
@@ -1 +1,7 @@
-selenium==2.49.2
+selenium>=4.13.0
+pytest==7.4.2
+pytest-django==4.5.2
+pytest-env==1.1.0
+pytest-html==4.0.2
+pytest-metadata==3.0.0
+pytest-order==1.1.0
diff --git a/lib/toaster/tests/views/test_views.py b/lib/toaster/tests/views/test_views.py
index 735d596bc..e1adfcf86 100644
--- a/lib/toaster/tests/views/test_views.py
+++ b/lib/toaster/tests/views/test_views.py
@@ -9,6 +9,8 @@
"""Test cases for Toaster GUI and ReST."""
+import os
+import pytest
from django.test import TestCase
from django.test.client import RequestFactory
from django.urls import reverse
@@ -19,6 +21,7 @@ from orm.models import Layer_Version, Recipe
from orm.models import CustomImageRecipe
from orm.models import CustomImagePackage
+from bldcontrol.models import BuildEnvironment
import inspect
import toastergui
@@ -32,19 +35,32 @@ PROJECT_NAME2 = "test project 2"
CLI_BUILDS_PROJECT_NAME = 'Command line builds'
+
class ViewTests(TestCase):
"""Tests to verify view APIs."""
fixtures = ['toastergui-unittest-data']
+ builldir = os.environ.get('BUILDDIR')
def setUp(self):
self.project = Project.objects.first()
+
self.recipe1 = Recipe.objects.get(pk=2)
+ # create a file and to recipe1 file_path
+ file_path = f"{self.builldir}/{self.recipe1.name.strip().replace(' ', '-')}.bb"
+ with open(file_path, 'w') as f:
+ f.write('foo')
+ self.recipe1.file_path = file_path
+ self.recipe1.save()
+
self.customr = CustomImageRecipe.objects.first()
self.cust_package = CustomImagePackage.objects.first()
self.package = Package.objects.first()
self.lver = Layer_Version.objects.first()
+ if BuildEnvironment.objects.count() == 0:
+ BuildEnvironment.objects.create(betype=BuildEnvironment.TYPE_LOCAL)
+
def test_get_base_call_returns_html(self):
"""Basic test for all-projects view"""
@@ -226,7 +242,7 @@ class ViewTests(TestCase):
recipe = CustomImageRecipe.objects.create(
name=name, project=self.project,
base_recipe=self.recipe1,
- file_path="/tmp/testing",
+ file_path=f"{self.builldir}/testing",
layer_version=self.customr.layer_version)
url = reverse('xhr_customrecipe_id', args=(recipe.id,))
response = self.client.delete(url)
@@ -297,7 +313,7 @@ class ViewTests(TestCase):
"""Download the recipe file generated for the custom image"""
# Create a dummy recipe file for the custom image generation to read
- open("/tmp/a_recipe.bb", 'a').close()
+ open(f"{self.builldir}/a_recipe.bb", 'a').close()
response = self.client.get(reverse('customrecipedownload',
args=(self.project.id,
self.customr.id)))
diff --git a/lib/toaster/toastergui/api.py b/lib/toaster/toastergui/api.py
index b4cdc335e..e367bd910 100644
--- a/lib/toaster/toastergui/api.py
+++ b/lib/toaster/toastergui/api.py
@@ -11,7 +11,7 @@ import os
import re
import logging
import json
-import subprocess
+import glob
from collections import Counter
from orm.models import Project, ProjectTarget, Build, Layer_Version
@@ -227,20 +227,18 @@ class XhrSetDefaultImageUrl(View):
# same logical name
# * Each project that uses a layer will have its own
# LayerVersion and Project Layer for it
-# * During the Paroject delete process, when the last
+# * During the Project delete process, when the last
# LayerVersion for a 'local_source_dir' layer is deleted
# then the Layer record is deleted to remove orphans
#
def scan_layer_content(layer,layer_version):
# if this is a local layer directory, we can immediately scan its content
- if layer.local_source_dir:
+ if os.path.isdir(layer.local_source_dir):
try:
# recipes-*/*/*.bb
- cmd = '%s %s' % ('ls', os.path.join(layer.local_source_dir,'recipes-*/*/*.bb'))
- recipes_list = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,stderr=subprocess.STDOUT).stdout.read()
- recipes_list = recipes_list.decode("utf-8").strip()
- if recipes_list and 'No such' not in recipes_list:
+ recipes_list = glob.glob(os.path.join(layer.local_source_dir, 'recipes-*/*/*.bb'))
+ for recipe in recipes_list:
for recipe in recipes_list.split('\n'):
recipe_path = recipe[recipe.rfind('recipes-'):]
recipe_name = recipe[recipe.rfind('/')+1:].replace('.bb','')
@@ -260,6 +258,9 @@ def scan_layer_content(layer,layer_version):
except Exception as e:
logger.warning("ERROR:scan_layer_content: %s" % e)
+ else:
+ logger.warning("ERROR: wrong path given")
+ raise KeyError("local_source_dir")
class XhrLayer(View):
""" Delete, Get, Add and Update Layer information
@@ -456,15 +457,18 @@ class XhrLayer(View):
'layerdetailurl':
layer_dep.get_detailspage_url(project.pk)})
- # Scan the layer's content and update components
- scan_layer_content(layer,layer_version)
+ # Only scan_layer_content if layer is local
+ if layer_data.get('local_source_dir', None):
+ # Scan the layer's content and update components
+ scan_layer_content(layer,layer_version)
except Layer_Version.DoesNotExist:
return error_response("layer-dep-not-found")
except Project.DoesNotExist:
return error_response("project-not-found")
- except KeyError:
- return error_response("incorrect-parameters")
+ except KeyError as e:
+ _log("KeyError: %s" % e)
+ return error_response(f"incorrect-parameters")
return JsonResponse({'error': "ok",
'imported_layer': {
diff --git a/lib/toaster/toastergui/fixtures/toastergui-unittest-data.xml b/lib/toaster/toastergui/fixtures/toastergui-unittest-data.xml
index 4517ed176..f626572fd 100644
--- a/lib/toaster/toastergui/fixtures/toastergui-unittest-data.xml
+++ b/lib/toaster/toastergui/fixtures/toastergui-unittest-data.xml
@@ -6,10 +6,22 @@
<field type="CharField" name="dirpath">b</field>
<field type="CharField" name="branch">a</field>
</object>
+ <object pk="1" model="orm.distro">
+ <field type="DateTimeField" name="up_date"><None></None></field>
+ <field to="orm.layer_version" name="layer_version" rel="ManyToOneRel">1</field>
+ <field type="CharField" name="name">poky_distro1</field>
+ <field type="CharField" name="description">poky_distro1 description</field>
+ </object>
+ <object pk="2" model="orm.distro">
+ <field type="DateTimeField" name="up_date"><None></None></field>
+ <field to="orm.layer_version" name="layer_version" rel="ManyToOneRel">2</field>
+ <field type="CharField" name="name">poky_distro2</field>
+ <field type="CharField" name="description">poky_distro2 description</field>
+ </object>
<object pk="1" model="orm.release">
- <field type="CharField" name="name">master</field>
+ <field type="CharField" name="name">foo_master</field>
<field type="CharField" name="description">master project</field>
- <field to="orm.bitbake_version" name="bitbake_version">1</field>
+ <field to="orm.bitbakeversion" name="bitbake_version">1</field>
</object>
<object pk="1" model="orm.project">
<field type="CharField" name="name">a test project</field>
@@ -34,12 +46,12 @@
<object pk="1" model="orm.ProjectVariable">
<field to="orm.project" name="project" rel="ManyToOneRel">1</field>
<field type="CharField" name="name">MACHINE</field>
- <field type="TextField" name="value">qemux86</field>
+ <field type="TextField" name="value">qemux86-64</field>
</object>
<object pk="2" model="orm.ProjectVariable">
<field to="orm.project" name="project" rel="ManyToOneRel">2</field>
<field type="CharField" name="name">MACHINE</field>
- <field type="TextField" name="value">qemux86</field>
+ <field type="TextField" name="value">qemux86-64</field>
</object>
<object pk="1" model="orm.build">
<field to="orm.project" name="project" rel="ManyToOneRel">1</field>
@@ -67,7 +79,7 @@
</object>
<object pk="3" model="orm.build">
<field to="orm.project" name="project" rel="ManyToOneRel">1</field>
- <field type="CharField" name="machine">qemux86</field>
+ <field type="CharField" name="machine">qemux86-64</field>
<field type="CharField" name="distro"></field>
<field type="CharField" name="distro_version"></field>
<field type="DateTimeField" name="started_on">2016-02-12T18:46:20.114530+00:00</field>
@@ -79,7 +91,7 @@
</object>
<object pk="4" model="orm.build">
<field to="orm.project" name="project" rel="ManyToOneRel">2</field>
- <field type="CharField" name="machine">qemux86</field>
+ <field type="CharField" name="machine">qemux86-64</field>
<field type="CharField" name="distro"></field>
<field type="CharField" name="distro_version"></field>
<field type="DateTimeField" name="started_on">2016-02-11T18:46:20.114530+00:00</field>
diff --git a/lib/toaster/toastergui/forms.py b/lib/toaster/toastergui/forms.py
new file mode 100644
index 000000000..0f279e06c
--- /dev/null
+++ b/lib/toaster/toastergui/forms.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# BitBake Toaster UI tests implementation
+#
+# Copyright (C) 2023 Savoir-faire Linux
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+from django import forms
+from django.core.validators import FileExtensionValidator
+
+class LoadFileForm(forms.Form):
+ eventlog_file = forms.FileField(widget=forms.FileInput(attrs={'accept': '.json'}))
diff --git a/lib/toaster/toastergui/static/css/default.css b/lib/toaster/toastergui/static/css/default.css
index 5cd7e211a..284355e70 100644
--- a/lib/toaster/toastergui/static/css/default.css
+++ b/lib/toaster/toastergui/static/css/default.css
@@ -367,3 +367,31 @@ h2.panel-title { font-size: 30px; }
}
}
/* End copied in from newer version of Font-Awesome 4.3.0 */
+
+
+#overlay {
+ display: flex;
+ position: fixed;
+ top: 0;
+ left: 0;
+ width: 100%;
+ height: 100%;
+ background-color: rgba(0, 0, 0, 0.7);
+ align-items: center;
+ justify-content: center;
+ z-index: 999;
+}
+
+.spinner {
+ border: 6px solid rgba(255, 255, 255, 0.3);
+ border-radius: 50%;
+ border-top: 6px solid #3498db;
+ width: 50px;
+ height: 50px;
+ animation: spin 1s linear infinite;
+}
+
+@keyframes spin {
+ 0% { transform: rotate(0deg); }
+ 100% { transform: rotate(360deg); }
+}
diff --git a/lib/toaster/toastergui/static/css/jquery.dataTables-1.13.8.min.css b/lib/toaster/toastergui/static/css/jquery.dataTables-1.13.8.min.css
new file mode 100644
index 000000000..c0a442ce0
--- /dev/null
+++ b/lib/toaster/toastergui/static/css/jquery.dataTables-1.13.8.min.css
@@ -0,0 +1 @@
+:root{--dt-row-selected: 13, 110, 253;--dt-row-selected-text: 255, 255, 255;--dt-row-selected-link: 9, 10, 11;--dt-row-stripe: 0, 0, 0;--dt-row-hover: 0, 0, 0;--dt-column-ordering: 0, 0, 0;--dt-html-background: white}:root.dark{--dt-html-background: rgb(33, 37, 41)}table.dataTable td.dt-control{text-align:center;cursor:pointer}table.dataTable td.dt-control:before{display:inline-block;color:rgba(0, 0, 0, 0.5);content:"â–¶"}table.dataTable tr.dt-hasChild td.dt-control:before{content:"â–¼"}html.dark table.dataTable td.dt-control:before{color:rgba(255, 255, 255, 0.5)}html.dark table.dataTable tr.dt-hasChild td.dt-control:before{color:rgba(255, 255, 255, 0.5)}table.dataTable thead>tr>th.sorting,table.dataTable thead>tr>th.sorting_asc,table.dataTable thead>tr>th.sorting_desc,table.dataTable thead>tr>th.sorting_asc_disabled,table.dataTable thead>tr>th.sorting_desc_disabled,table.dataTable thead>tr>td.sorting,table.dataTable thead>tr>td.sorting_asc,table.dataTable thead>tr>td.sorting_desc,table.dataTable thead>tr>td.sorting_asc_disabled,table.dataTable thead>tr>td.sorting_desc_disabled{cursor:pointer;position:relative;padding-right:26px}table.dataTable thead>tr>th.sorting:before,table.dataTable thead>tr>th.sorting:after,table.dataTable thead>tr>th.sorting_asc:before,table.dataTable thead>tr>th.sorting_asc:after,table.dataTable thead>tr>th.sorting_desc:before,table.dataTable thead>tr>th.sorting_desc:after,table.dataTable thead>tr>th.sorting_asc_disabled:before,table.dataTable thead>tr>th.sorting_asc_disabled:after,table.dataTable thead>tr>th.sorting_desc_disabled:before,table.dataTable thead>tr>th.sorting_desc_disabled:after,table.dataTable thead>tr>td.sorting:before,table.dataTable thead>tr>td.sorting:after,table.dataTable thead>tr>td.sorting_asc:before,table.dataTable thead>tr>td.sorting_asc:after,table.dataTable thead>tr>td.sorting_desc:before,table.dataTable thead>tr>td.sorting_desc:after,table.dataTable thead>tr>td.sorting_asc_disabled:before,table.dataTable thead>tr>td.sorting_asc_disabled:after,table.dataTable thead>tr>td.sorting_desc_disabled:before,table.dataTable thead>tr>td.sorting_desc_disabled:after{position:absolute;display:block;opacity:.125;right:10px;line-height:9px;font-size:.8em}table.dataTable thead>tr>th.sorting:before,table.dataTable thead>tr>th.sorting_asc:before,table.dataTable thead>tr>th.sorting_desc:before,table.dataTable thead>tr>th.sorting_asc_disabled:before,table.dataTable thead>tr>th.sorting_desc_disabled:before,table.dataTable thead>tr>td.sorting:before,table.dataTable thead>tr>td.sorting_asc:before,table.dataTable thead>tr>td.sorting_desc:before,table.dataTable thead>tr>td.sorting_asc_disabled:before,table.dataTable thead>tr>td.sorting_desc_disabled:before{bottom:50%;content:"â–²";content:"â–²"/""}table.dataTable thead>tr>th.sorting:after,table.dataTable thead>tr>th.sorting_asc:after,table.dataTable thead>tr>th.sorting_desc:after,table.dataTable thead>tr>th.sorting_asc_disabled:after,table.dataTable thead>tr>th.sorting_desc_disabled:after,table.dataTable thead>tr>td.sorting:after,table.dataTable thead>tr>td.sorting_asc:after,table.dataTable thead>tr>td.sorting_desc:after,table.dataTable thead>tr>td.sorting_asc_disabled:after,table.dataTable thead>tr>td.sorting_desc_disabled:after{top:50%;content:"â–¼";content:"â–¼"/""}table.dataTable thead>tr>th.sorting_asc:before,table.dataTable thead>tr>th.sorting_desc:after,table.dataTable thead>tr>td.sorting_asc:before,table.dataTable thead>tr>td.sorting_desc:after{opacity:.6}table.dataTable thead>tr>th.sorting_desc_disabled:after,table.dataTable thead>tr>th.sorting_asc_disabled:before,table.dataTable thead>tr>td.sorting_desc_disabled:after,table.dataTable thead>tr>td.sorting_asc_disabled:before{display:none}table.dataTable thead>tr>th:active,table.dataTable thead>tr>td:active{outline:none}div.dataTables_scrollBody>table.dataTable>thead>tr>th:before,div.dataTables_scrollBody>table.dataTable>thead>tr>th:after,div.dataTables_scrollBody>table.dataTable>thead>tr>td:before,div.dataTables_scrollBody>table.dataTable>thead>tr>td:after{display:none}div.dataTables_processing{position:absolute;top:50%;left:50%;width:200px;margin-left:-100px;margin-top:-26px;text-align:center;padding:2px;z-index:10}div.dataTables_processing>div:last-child{position:relative;width:80px;height:15px;margin:1em auto}div.dataTables_processing>div:last-child>div{position:absolute;top:0;width:13px;height:13px;border-radius:50%;background:rgb(13, 110, 253);background:rgb(var(--dt-row-selected));animation-timing-function:cubic-bezier(0, 1, 1, 0)}div.dataTables_processing>div:last-child>div:nth-child(1){left:8px;animation:datatables-loader-1 .6s infinite}div.dataTables_processing>div:last-child>div:nth-child(2){left:8px;animation:datatables-loader-2 .6s infinite}div.dataTables_processing>div:last-child>div:nth-child(3){left:32px;animation:datatables-loader-2 .6s infinite}div.dataTables_processing>div:last-child>div:nth-child(4){left:56px;animation:datatables-loader-3 .6s infinite}@keyframes datatables-loader-1{0%{transform:scale(0)}100%{transform:scale(1)}}@keyframes datatables-loader-3{0%{transform:scale(1)}100%{transform:scale(0)}}@keyframes datatables-loader-2{0%{transform:translate(0, 0)}100%{transform:translate(24px, 0)}}table.dataTable.nowrap th,table.dataTable.nowrap td{white-space:nowrap}table.dataTable th.dt-left,table.dataTable td.dt-left{text-align:left}table.dataTable th.dt-center,table.dataTable td.dt-center,table.dataTable td.dataTables_empty{text-align:center}table.dataTable th.dt-right,table.dataTable td.dt-right{text-align:right}table.dataTable th.dt-justify,table.dataTable td.dt-justify{text-align:justify}table.dataTable th.dt-nowrap,table.dataTable td.dt-nowrap{white-space:nowrap}table.dataTable thead th,table.dataTable thead td,table.dataTable tfoot th,table.dataTable tfoot td{text-align:left}table.dataTable thead th.dt-head-left,table.dataTable thead td.dt-head-left,table.dataTable tfoot th.dt-head-left,table.dataTable tfoot td.dt-head-left{text-align:left}table.dataTable thead th.dt-head-center,table.dataTable thead td.dt-head-center,table.dataTable tfoot th.dt-head-center,table.dataTable tfoot td.dt-head-center{text-align:center}table.dataTable thead th.dt-head-right,table.dataTable thead td.dt-head-right,table.dataTable tfoot th.dt-head-right,table.dataTable tfoot td.dt-head-right{text-align:right}table.dataTable thead th.dt-head-justify,table.dataTable thead td.dt-head-justify,table.dataTable tfoot th.dt-head-justify,table.dataTable tfoot td.dt-head-justify{text-align:justify}table.dataTable thead th.dt-head-nowrap,table.dataTable thead td.dt-head-nowrap,table.dataTable tfoot th.dt-head-nowrap,table.dataTable tfoot td.dt-head-nowrap{white-space:nowrap}table.dataTable tbody th.dt-body-left,table.dataTable tbody td.dt-body-left{text-align:left}table.dataTable tbody th.dt-body-center,table.dataTable tbody td.dt-body-center{text-align:center}table.dataTable tbody th.dt-body-right,table.dataTable tbody td.dt-body-right{text-align:right}table.dataTable tbody th.dt-body-justify,table.dataTable tbody td.dt-body-justify{text-align:justify}table.dataTable tbody th.dt-body-nowrap,table.dataTable tbody td.dt-body-nowrap{white-space:nowrap}table.dataTable{width:100%;margin:0 auto;clear:both;border-collapse:separate;border-spacing:0}table.dataTable thead th,table.dataTable tfoot th{font-weight:bold}table.dataTable>thead>tr>th,table.dataTable>thead>tr>td{padding:10px;border-bottom:1px solid rgba(0, 0, 0, 0.3)}table.dataTable>thead>tr>th:active,table.dataTable>thead>tr>td:active{outline:none}table.dataTable>tfoot>tr>th,table.dataTable>tfoot>tr>td{padding:10px 10px 6px 10px;border-top:1px solid rgba(0, 0, 0, 0.3)}table.dataTable tbody tr{background-color:transparent}table.dataTable tbody tr.selected>*{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.9);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.9);color:rgb(255, 255, 255);color:rgb(var(--dt-row-selected-text))}table.dataTable tbody tr.selected a{color:rgb(9, 10, 11);color:rgb(var(--dt-row-selected-link))}table.dataTable tbody th,table.dataTable tbody td{padding:8px 10px}table.dataTable.row-border>tbody>tr>th,table.dataTable.row-border>tbody>tr>td,table.dataTable.display>tbody>tr>th,table.dataTable.display>tbody>tr>td{border-top:1px solid rgba(0, 0, 0, 0.15)}table.dataTable.row-border>tbody>tr:first-child>th,table.dataTable.row-border>tbody>tr:first-child>td,table.dataTable.display>tbody>tr:first-child>th,table.dataTable.display>tbody>tr:first-child>td{border-top:none}table.dataTable.row-border>tbody>tr.selected+tr.selected>td,table.dataTable.display>tbody>tr.selected+tr.selected>td{border-top-color:#0262ef}table.dataTable.cell-border>tbody>tr>th,table.dataTable.cell-border>tbody>tr>td{border-top:1px solid rgba(0, 0, 0, 0.15);border-right:1px solid rgba(0, 0, 0, 0.15)}table.dataTable.cell-border>tbody>tr>th:first-child,table.dataTable.cell-border>tbody>tr>td:first-child{border-left:1px solid rgba(0, 0, 0, 0.15)}table.dataTable.cell-border>tbody>tr:first-child>th,table.dataTable.cell-border>tbody>tr:first-child>td{border-top:none}table.dataTable.stripe>tbody>tr.odd>*,table.dataTable.display>tbody>tr.odd>*{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.023);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-stripe), 0.023)}table.dataTable.stripe>tbody>tr.odd.selected>*,table.dataTable.display>tbody>tr.odd.selected>*{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.923);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.923)}table.dataTable.hover>tbody>tr:hover>*,table.dataTable.display>tbody>tr:hover>*{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.035);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-hover), 0.035)}table.dataTable.hover>tbody>tr.selected:hover>*,table.dataTable.display>tbody>tr.selected:hover>*{box-shadow:inset 0 0 0 9999px #0d6efd !important;box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 1) !important}table.dataTable.order-column>tbody tr>.sorting_1,table.dataTable.order-column>tbody tr>.sorting_2,table.dataTable.order-column>tbody tr>.sorting_3,table.dataTable.display>tbody tr>.sorting_1,table.dataTable.display>tbody tr>.sorting_2,table.dataTable.display>tbody tr>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.019);box-shadow:inset 0 0 0 9999px rgba(var(--dt-column-ordering), 0.019)}table.dataTable.order-column>tbody tr.selected>.sorting_1,table.dataTable.order-column>tbody tr.selected>.sorting_2,table.dataTable.order-column>tbody tr.selected>.sorting_3,table.dataTable.display>tbody tr.selected>.sorting_1,table.dataTable.display>tbody tr.selected>.sorting_2,table.dataTable.display>tbody tr.selected>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.919);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.919)}table.dataTable.display>tbody>tr.odd>.sorting_1,table.dataTable.order-column.stripe>tbody>tr.odd>.sorting_1{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.054);box-shadow:inset 0 0 0 9999px rgba(var(--dt-column-ordering), 0.054)}table.dataTable.display>tbody>tr.odd>.sorting_2,table.dataTable.order-column.stripe>tbody>tr.odd>.sorting_2{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.047);box-shadow:inset 0 0 0 9999px rgba(var(--dt-column-ordering), 0.047)}table.dataTable.display>tbody>tr.odd>.sorting_3,table.dataTable.order-column.stripe>tbody>tr.odd>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.039);box-shadow:inset 0 0 0 9999px rgba(var(--dt-column-ordering), 0.039)}table.dataTable.display>tbody>tr.odd.selected>.sorting_1,table.dataTable.order-column.stripe>tbody>tr.odd.selected>.sorting_1{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.954);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.954)}table.dataTable.display>tbody>tr.odd.selected>.sorting_2,table.dataTable.order-column.stripe>tbody>tr.odd.selected>.sorting_2{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.947);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.947)}table.dataTable.display>tbody>tr.odd.selected>.sorting_3,table.dataTable.order-column.stripe>tbody>tr.odd.selected>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.939);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.939)}table.dataTable.display>tbody>tr.even>.sorting_1,table.dataTable.order-column.stripe>tbody>tr.even>.sorting_1{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.019);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.019)}table.dataTable.display>tbody>tr.even>.sorting_2,table.dataTable.order-column.stripe>tbody>tr.even>.sorting_2{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.011);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.011)}table.dataTable.display>tbody>tr.even>.sorting_3,table.dataTable.order-column.stripe>tbody>tr.even>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.003);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.003)}table.dataTable.display>tbody>tr.even.selected>.sorting_1,table.dataTable.order-column.stripe>tbody>tr.even.selected>.sorting_1{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.919);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.919)}table.dataTable.display>tbody>tr.even.selected>.sorting_2,table.dataTable.order-column.stripe>tbody>tr.even.selected>.sorting_2{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.911);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.911)}table.dataTable.display>tbody>tr.even.selected>.sorting_3,table.dataTable.order-column.stripe>tbody>tr.even.selected>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.903);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.903)}table.dataTable.display tbody tr:hover>.sorting_1,table.dataTable.order-column.hover tbody tr:hover>.sorting_1{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.082);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-hover), 0.082)}table.dataTable.display tbody tr:hover>.sorting_2,table.dataTable.order-column.hover tbody tr:hover>.sorting_2{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.074);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-hover), 0.074)}table.dataTable.display tbody tr:hover>.sorting_3,table.dataTable.order-column.hover tbody tr:hover>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.062);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-hover), 0.062)}table.dataTable.display tbody tr:hover.selected>.sorting_1,table.dataTable.order-column.hover tbody tr:hover.selected>.sorting_1{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.982);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.982)}table.dataTable.display tbody tr:hover.selected>.sorting_2,table.dataTable.order-column.hover tbody tr:hover.selected>.sorting_2{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.974);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.974)}table.dataTable.display tbody tr:hover.selected>.sorting_3,table.dataTable.order-column.hover tbody tr:hover.selected>.sorting_3{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.962);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.962)}table.dataTable.no-footer{border-bottom:1px solid rgba(0, 0, 0, 0.3)}table.dataTable.compact thead th,table.dataTable.compact thead td,table.dataTable.compact tfoot th,table.dataTable.compact tfoot td,table.dataTable.compact tbody th,table.dataTable.compact tbody td{padding:4px}table.dataTable th,table.dataTable td{box-sizing:content-box}.dataTables_wrapper{position:relative;clear:both}.dataTables_wrapper .dataTables_length{float:left}.dataTables_wrapper .dataTables_length select{border:1px solid #aaa;border-radius:3px;padding:5px;background-color:transparent;color:inherit;padding:4px}.dataTables_wrapper .dataTables_filter{float:right;text-align:right}.dataTables_wrapper .dataTables_filter input{border:1px solid #aaa;border-radius:3px;padding:5px;background-color:transparent;color:inherit;margin-left:3px}.dataTables_wrapper .dataTables_info{clear:both;float:left;padding-top:.755em}.dataTables_wrapper .dataTables_paginate{float:right;text-align:right;padding-top:.25em}.dataTables_wrapper .dataTables_paginate .paginate_button{box-sizing:border-box;display:inline-block;min-width:1.5em;padding:.5em 1em;margin-left:2px;text-align:center;text-decoration:none !important;cursor:pointer;color:inherit !important;border:1px solid transparent;border-radius:2px;background:transparent}.dataTables_wrapper .dataTables_paginate .paginate_button.current,.dataTables_wrapper .dataTables_paginate .paginate_button.current:hover{color:inherit !important;border:1px solid rgba(0, 0, 0, 0.3);background-color:rgba(0, 0, 0, 0.05);background:-webkit-gradient(linear, left top, left bottom, color-stop(0%, rgba(230, 230, 230, 0.05)), color-stop(100%, rgba(0, 0, 0, 0.05)));background:-webkit-linear-gradient(top, rgba(230, 230, 230, 0.05) 0%, rgba(0, 0, 0, 0.05) 100%);background:-moz-linear-gradient(top, rgba(230, 230, 230, 0.05) 0%, rgba(0, 0, 0, 0.05) 100%);background:-ms-linear-gradient(top, rgba(230, 230, 230, 0.05) 0%, rgba(0, 0, 0, 0.05) 100%);background:-o-linear-gradient(top, rgba(230, 230, 230, 0.05) 0%, rgba(0, 0, 0, 0.05) 100%);background:linear-gradient(to bottom, rgba(230, 230, 230, 0.05) 0%, rgba(0, 0, 0, 0.05) 100%)}.dataTables_wrapper .dataTables_paginate .paginate_button.disabled,.dataTables_wrapper .dataTables_paginate .paginate_button.disabled:hover,.dataTables_wrapper .dataTables_paginate .paginate_button.disabled:active{cursor:default;color:#666 !important;border:1px solid transparent;background:transparent;box-shadow:none}.dataTables_wrapper .dataTables_paginate .paginate_button:hover{color:white !important;border:1px solid #111;background-color:#111;background:-webkit-gradient(linear, left top, left bottom, color-stop(0%, #585858), color-stop(100%, #111));background:-webkit-linear-gradient(top, #585858 0%, #111 100%);background:-moz-linear-gradient(top, #585858 0%, #111 100%);background:-ms-linear-gradient(top, #585858 0%, #111 100%);background:-o-linear-gradient(top, #585858 0%, #111 100%);background:linear-gradient(to bottom, #585858 0%, #111 100%)}.dataTables_wrapper .dataTables_paginate .paginate_button:active{outline:none;background-color:#0c0c0c;background:-webkit-gradient(linear, left top, left bottom, color-stop(0%, #2b2b2b), color-stop(100%, #0c0c0c));background:-webkit-linear-gradient(top, #2b2b2b 0%, #0c0c0c 100%);background:-moz-linear-gradient(top, #2b2b2b 0%, #0c0c0c 100%);background:-ms-linear-gradient(top, #2b2b2b 0%, #0c0c0c 100%);background:-o-linear-gradient(top, #2b2b2b 0%, #0c0c0c 100%);background:linear-gradient(to bottom, #2b2b2b 0%, #0c0c0c 100%);box-shadow:inset 0 0 3px #111}.dataTables_wrapper .dataTables_paginate .ellipsis{padding:0 1em}.dataTables_wrapper .dataTables_length,.dataTables_wrapper .dataTables_filter,.dataTables_wrapper .dataTables_info,.dataTables_wrapper .dataTables_processing,.dataTables_wrapper .dataTables_paginate{color:inherit}.dataTables_wrapper .dataTables_scroll{clear:both}.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody{-webkit-overflow-scrolling:touch}.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>thead>tr>th,.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>thead>tr>td,.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>tbody>tr>th,.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>tbody>tr>td{vertical-align:middle}.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>thead>tr>th>div.dataTables_sizing,.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>thead>tr>td>div.dataTables_sizing,.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>tbody>tr>th>div.dataTables_sizing,.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody>table>tbody>tr>td>div.dataTables_sizing{height:0;overflow:hidden;margin:0 !important;padding:0 !important}.dataTables_wrapper.no-footer .dataTables_scrollBody{border-bottom:1px solid rgba(0, 0, 0, 0.3)}.dataTables_wrapper.no-footer div.dataTables_scrollHead table.dataTable,.dataTables_wrapper.no-footer div.dataTables_scrollBody>table{border-bottom:none}.dataTables_wrapper:after{visibility:hidden;display:block;content:"";clear:both;height:0}@media screen and (max-width: 767px){.dataTables_wrapper .dataTables_info,.dataTables_wrapper .dataTables_paginate{float:none;text-align:center}.dataTables_wrapper .dataTables_paginate{margin-top:.5em}}@media screen and (max-width: 640px){.dataTables_wrapper .dataTables_length,.dataTables_wrapper .dataTables_filter{float:none;text-align:center}.dataTables_wrapper .dataTables_filter{margin-top:.5em}}html.dark{--dt-row-hover: 255, 255, 255;--dt-row-stripe: 255, 255, 255;--dt-column-ordering: 255, 255, 255}html.dark table.dataTable>thead>tr>th,html.dark table.dataTable>thead>tr>td{border-bottom:1px solid rgb(89, 91, 94)}html.dark table.dataTable>thead>tr>th:active,html.dark table.dataTable>thead>tr>td:active{outline:none}html.dark table.dataTable>tfoot>tr>th,html.dark table.dataTable>tfoot>tr>td{border-top:1px solid rgb(89, 91, 94)}html.dark table.dataTable.row-border>tbody>tr>th,html.dark table.dataTable.row-border>tbody>tr>td,html.dark table.dataTable.display>tbody>tr>th,html.dark table.dataTable.display>tbody>tr>td{border-top:1px solid rgb(64, 67, 70)}html.dark table.dataTable.row-border>tbody>tr.selected+tr.selected>td,html.dark table.dataTable.display>tbody>tr.selected+tr.selected>td{border-top-color:#0257d5}html.dark table.dataTable.cell-border>tbody>tr>th,html.dark table.dataTable.cell-border>tbody>tr>td{border-top:1px solid rgb(64, 67, 70);border-right:1px solid rgb(64, 67, 70)}html.dark table.dataTable.cell-border>tbody>tr>th:first-child,html.dark table.dataTable.cell-border>tbody>tr>td:first-child{border-left:1px solid rgb(64, 67, 70)}html.dark .dataTables_wrapper .dataTables_filter input,html.dark .dataTables_wrapper .dataTables_length select{border:1px solid rgba(255, 255, 255, 0.2);background-color:var(--dt-html-background)}html.dark .dataTables_wrapper .dataTables_paginate .paginate_button.current,html.dark .dataTables_wrapper .dataTables_paginate .paginate_button.current:hover{border:1px solid rgb(89, 91, 94);background:rgba(255, 255, 255, 0.15)}html.dark .dataTables_wrapper .dataTables_paginate .paginate_button.disabled,html.dark .dataTables_wrapper .dataTables_paginate .paginate_button.disabled:hover,html.dark .dataTables_wrapper .dataTables_paginate .paginate_button.disabled:active{color:#666 !important}html.dark .dataTables_wrapper .dataTables_paginate .paginate_button:hover{border:1px solid rgb(53, 53, 53);background:rgb(53, 53, 53)}html.dark .dataTables_wrapper .dataTables_paginate .paginate_button:active{background:#3a3a3a}
diff --git a/lib/toaster/toastergui/static/js/bootstrap.js b/lib/toaster/toastergui/static/js/bootstrap-3.4.1.js
index d47d640fe..170bd608f 100644
--- a/lib/toaster/toastergui/static/js/bootstrap.js
+++ b/lib/toaster/toastergui/static/js/bootstrap-3.4.1.js
@@ -1,6 +1,6 @@
/*!
- * Bootstrap v3.3.6 (http://getbootstrap.com)
- * Copyright 2011-2016 Twitter, Inc.
+ * Bootstrap v3.4.1 (https://getbootstrap.com/)
+ * Copyright 2011-2019 Twitter, Inc.
* Licensed under the MIT license
*/
@@ -11,16 +11,16 @@ if (typeof jQuery === 'undefined') {
+function ($) {
'use strict';
var version = $.fn.jquery.split(' ')[0].split('.')
- if ((version[0] < 2 && version[1] < 9) || (version[0] == 1 && version[1] == 9 && version[2] < 1) || (version[0] > 2)) {
- throw new Error('Bootstrap\'s JavaScript requires jQuery version 1.9.1 or higher, but lower than version 3')
+ if ((version[0] < 2 && version[1] < 9) || (version[0] == 1 && version[1] == 9 && version[2] < 1) || (version[0] > 3)) {
+ throw new Error('Bootstrap\'s JavaScript requires jQuery version 1.9.1 or higher, but lower than version 4')
}
}(jQuery);
/* ========================================================================
- * Bootstrap: transition.js v3.3.6
- * http://getbootstrap.com/javascript/#transitions
+ * Bootstrap: transition.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#transitions
* ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
+ * Copyright 2011-2019 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
@@ -28,7 +28,7 @@ if (typeof jQuery === 'undefined') {
+function ($) {
'use strict';
- // CSS TRANSITION SUPPORT (Shoutout: http://www.modernizr.com/)
+ // CSS TRANSITION SUPPORT (Shoutout: https://modernizr.com/)
// ============================================================
function transitionEnd() {
@@ -50,7 +50,7 @@ if (typeof jQuery === 'undefined') {
return false // explicit for ie8 ( ._.)
}
- // http://blog.alexmaccaw.com/css-transitions
+ // https://blog.alexmaccaw.com/css-transitions
$.fn.emulateTransitionEnd = function (duration) {
var called = false
var $el = this
@@ -77,10 +77,10 @@ if (typeof jQuery === 'undefined') {
}(jQuery);
/* ========================================================================
- * Bootstrap: alert.js v3.3.6
- * http://getbootstrap.com/javascript/#alerts
+ * Bootstrap: alert.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#alerts
* ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
+ * Copyright 2011-2019 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
@@ -96,7 +96,7 @@ if (typeof jQuery === 'undefined') {
$(el).on('click', dismiss, this.close)
}
- Alert.VERSION = '3.3.6'
+ Alert.VERSION = '3.4.1'
Alert.TRANSITION_DURATION = 150
@@ -109,7 +109,8 @@ if (typeof jQuery === 'undefined') {
selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7
}
- var $parent = $(selector)
+ selector = selector === '#' ? [] : selector
+ var $parent = $(document).find(selector)
if (e) e.preventDefault()
@@ -172,10 +173,10 @@ if (typeof jQuery === 'undefined') {
}(jQuery);
/* ========================================================================
- * Bootstrap: button.js v3.3.6
- * http://getbootstrap.com/javascript/#buttons
+ * Bootstrap: button.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#buttons
* ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
+ * Copyright 2011-2019 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
@@ -192,7 +193,7 @@ if (typeof jQuery === 'undefined') {
this.isLoading = false
}
- Button.VERSION = '3.3.6'
+ Button.VERSION = '3.4.1'
Button.DEFAULTS = {
loadingText: 'loading...'
@@ -214,10 +215,10 @@ if (typeof jQuery === 'undefined') {
if (state == 'loadingText') {
this.isLoading = true
- $el.addClass(d).attr(d, d)
+ $el.addClass(d).attr(d, d).prop(d, true)
} else if (this.isLoading) {
this.isLoading = false
- $el.removeClass(d).removeAttr(d)
+ $el.removeClass(d).removeAttr(d).prop(d, false)
}
}, this), 0)
}
@@ -281,10 +282,15 @@ if (typeof jQuery === 'undefined') {
$(document)
.on('click.bs.button.data-api', '[data-toggle^="button"]', function (e) {
- var $btn = $(e.target)
- if (!$btn.hasClass('btn')) $btn = $btn.closest('.btn')
+ var $btn = $(e.target).closest('.btn')
Plugin.call($btn, 'toggle')
- if (!($(e.target).is('input[type="radio"]') || $(e.target).is('input[type="checkbox"]'))) e.preventDefault()
+ if (!($(e.target).is('input[type="radio"], input[type="checkbox"]'))) {
+ // Prevent double click on radios, and the double selections (so cancellation) on checkboxes
+ e.preventDefault()
+ // The target component still receive the focus
+ if ($btn.is('input,button')) $btn.trigger('focus')
+ else $btn.find('input:visible,button:visible').first().trigger('focus')
+ }
})
.on('focus.bs.button.data-api blur.bs.button.data-api', '[data-toggle^="button"]', function (e) {
$(e.target).closest('.btn').toggleClass('focus', /^focus(in)?$/.test(e.type))
@@ -293,10 +299,10 @@ if (typeof jQuery === 'undefined') {
}(jQuery);
/* ========================================================================
- * Bootstrap: carousel.js v3.3.6
- * http://getbootstrap.com/javascript/#carousel
+ * Bootstrap: carousel.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#carousel
* ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
+ * Copyright 2011-2019 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
@@ -324,7 +330,7 @@ if (typeof jQuery === 'undefined') {
.on('mouseleave.bs.carousel', $.proxy(this.cycle, this))
}
- Carousel.VERSION = '3.3.6'
+ Carousel.VERSION = '3.4.1'
Carousel.TRANSITION_DURATION = 600
@@ -438,7 +444,9 @@ if (typeof jQuery === 'undefined') {
var slidEvent = $.Event('slid.bs.carousel', { relatedTarget: relatedTarget, direction: direction }) // yes, "slid"
if ($.support.transition && this.$element.hasClass('slide')) {
$next.addClass(type)
- $next[0].offsetWidth // force reflow
+ if (typeof $next === 'object' && $next.length) {
+ $next[0].offsetWidth // force reflow
+ }
$active.addClass(direction)
$next.addClass(direction)
$active
@@ -500,10 +508,17 @@ if (typeof jQuery === 'undefined') {
// =================
var clickHandler = function (e) {
- var href
var $this = $(this)
- var $target = $($this.attr('data-target') || (href = $this.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '')) // strip for ie7
+ var href = $this.attr('href')
+ if (href) {
+ href = href.replace(/.*(?=#[^\s]+$)/, '') // strip for ie7
+ }
+
+ var target = $this.attr('data-target') || href
+ var $target = $(document).find(target)
+
if (!$target.hasClass('carousel')) return
+
var options = $.extend({}, $target.data(), $this.data())
var slideIndex = $this.attr('data-slide-to')
if (slideIndex) options.interval = false
@@ -531,13 +546,14 @@ if (typeof jQuery === 'undefined') {
}(jQuery);
/* ========================================================================
- * Bootstrap: collapse.js v3.3.6
- * http://getbootstrap.com/javascript/#collapse
+ * Bootstrap: collapse.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#collapse
* ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
+ * Copyright 2011-2019 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+/* jshint latedef: false */
+function ($) {
'use strict';
@@ -561,7 +577,7 @@ if (typeof jQuery === 'undefined') {
if (this.options.toggle) this.toggle()
}
- Collapse.VERSION = '3.3.6'
+ Collapse.VERSION = '3.4.1'
Collapse.TRANSITION_DURATION = 350
@@ -668,7 +684,7 @@ if (typeof jQuery === 'undefined') {
}
Collapse.prototype.getParent = function () {
- return $(this.options.parent)
+ return $(document).find(this.options.parent)
.find('[data-toggle="collapse"][data-parent="' + this.options.parent + '"]')
.each($.proxy(function (i, element) {
var $element = $(element)
@@ -691,7 +707,7 @@ if (typeof jQuery === 'undefined') {
var target = $trigger.attr('data-target')
|| (href = $trigger.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '') // strip for ie7
- return $(target)
+ return $(document).find(target)
}
@@ -743,10 +759,10 @@ if (typeof jQuery === 'undefined') {
}(jQuery);
/* ========================================================================
- * Bootstrap: dropdown.js v3.3.6
- * http://getbootstrap.com/javascript/#dropdowns
+ * Bootstrap: dropdown.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#dropdowns
* ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
+ * Copyright 2011-2019 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
@@ -763,7 +779,7 @@ if (typeof jQuery === 'undefined') {
$(element).on('click.bs.dropdown', this.toggle)
}
- Dropdown.VERSION = '3.3.6'
+ Dropdown.VERSION = '3.4.1'
function getParent($this) {
var selector = $this.attr('data-target')
@@ -773,7 +789,7 @@ if (typeof jQuery === 'undefined') {
selector = selector && /#[A-Za-z]/.test(selector) && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7
}
- var $parent = selector && $(selector)
+ var $parent = selector !== '#' ? $(document).find(selector) : null
return $parent && $parent.length ? $parent : $this.parent()
}
@@ -909,10 +925,10 @@ if (typeof jQuery === 'undefined') {
}(jQuery);
/* ========================================================================
- * Bootstrap: modal.js v3.3.6
- * http://getbootstrap.com/javascript/#modals
+ * Bootstrap: modal.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#modals
* ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
+ * Copyright 2011-2019 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
@@ -924,15 +940,16 @@ if (typeof jQuery === 'undefined') {
// ======================
var Modal = function (element, options) {
- this.options = options
- this.$body = $(document.body)
- this.$element = $(element)
- this.$dialog = this.$element.find('.modal-dialog')
- this.$backdrop = null
- this.isShown = null
- this.originalBodyPad = null
- this.scrollbarWidth = 0
+ this.options = options
+ this.$body = $(document.body)
+ this.$element = $(element)
+ this.$dialog = this.$element.find('.modal-dialog')
+ this.$backdrop = null
+ this.isShown = null
+ this.originalBodyPad = null
+ this.scrollbarWidth = 0
this.ignoreBackdropClick = false
+ this.fixedContent = '.navbar-fixed-top, .navbar-fixed-bottom'
if (this.options.remote) {
this.$element
@@ -943,7 +960,7 @@ if (typeof jQuery === 'undefined') {
}
}
- Modal.VERSION = '3.3.6'
+ Modal.VERSION = '3.4.1'
Modal.TRANSITION_DURATION = 300
Modal.BACKDROP_TRANSITION_DURATION = 150
@@ -960,7 +977,7 @@ if (typeof jQuery === 'undefined') {
Modal.prototype.show = function (_relatedTarget) {
var that = this
- var e = $.Event('show.bs.modal', { relatedTarget: _relatedTarget })
+ var e = $.Event('show.bs.modal', { relatedTarget: _relatedTarget })
this.$element.trigger(e)
@@ -1050,7 +1067,9 @@ if (typeof jQuery === 'undefined') {
$(document)
.off('focusin.bs.modal') // guard against infinite focus loop
.on('focusin.bs.modal', $.proxy(function (e) {
- if (this.$element[0] !== e.target && !this.$element.has(e.target).length) {
+ if (document !== e.target &&
+ this.$element[0] !== e.target &&
+ !this.$element.has(e.target).length) {
this.$element.trigger('focus')
}
}, this))
@@ -1152,7 +1171,7 @@ if (typeof jQuery === 'undefined') {
var modalIsOverflowing = this.$element[0].scrollHeight > document.documentElement.clientHeight
this.$element.css({
- paddingLeft: !this.bodyIsOverflowing && modalIsOverflowing ? this.scrollbarWidth : '',
+ paddingLeft: !this.bodyIsOverflowing && modalIsOverflowing ? this.scrollbarWidth : '',
paddingRight: this.bodyIsOverflowing && !modalIsOverflowing ? this.scrollbarWidth : ''
})
}
@@ -1177,11 +1196,26 @@ if (typeof jQuery === 'undefined') {
Modal.prototype.setScrollbar = function () {
var bodyPad = parseInt((this.$body.css('padding-right') || 0), 10)
this.originalBodyPad = document.body.style.paddingRight || ''
- if (this.bodyIsOverflowing) this.$body.css('padding-right', bodyPad + this.scrollbarWidth)
+ var scrollbarWidth = this.scrollbarWidth
+ if (this.bodyIsOverflowing) {
+ this.$body.css('padding-right', bodyPad + scrollbarWidth)
+ $(this.fixedContent).each(function (index, element) {
+ var actualPadding = element.style.paddingRight
+ var calculatedPadding = $(element).css('padding-right')
+ $(element)
+ .data('padding-right', actualPadding)
+ .css('padding-right', parseFloat(calculatedPadding) + scrollbarWidth + 'px')
+ })
+ }
}
Modal.prototype.resetScrollbar = function () {
this.$body.css('padding-right', this.originalBodyPad)
+ $(this.fixedContent).each(function (index, element) {
+ var padding = $(element).data('padding-right')
+ $(element).removeData('padding-right')
+ element.style.paddingRight = padding ? padding : ''
+ })
}
Modal.prototype.measureScrollbar = function () { // thx walsh
@@ -1199,8 +1233,8 @@ if (typeof jQuery === 'undefined') {
function Plugin(option, _relatedTarget) {
return this.each(function () {
- var $this = $(this)
- var data = $this.data('bs.modal')
+ var $this = $(this)
+ var data = $this.data('bs.modal')
var options = $.extend({}, Modal.DEFAULTS, $this.data(), typeof option == 'object' && option)
if (!data) $this.data('bs.modal', (data = new Modal(this, options)))
@@ -1211,7 +1245,7 @@ if (typeof jQuery === 'undefined') {
var old = $.fn.modal
- $.fn.modal = Plugin
+ $.fn.modal = Plugin
$.fn.modal.Constructor = Modal
@@ -1228,10 +1262,13 @@ if (typeof jQuery === 'undefined') {
// ==============
$(document).on('click.bs.modal.data-api', '[data-toggle="modal"]', function (e) {
- var $this = $(this)
- var href = $this.attr('href')
- var $target = $($this.attr('data-target') || (href && href.replace(/.*(?=#[^\s]+$)/, ''))) // strip for ie7
- var option = $target.data('bs.modal') ? 'toggle' : $.extend({ remote: !/#/.test(href) && href }, $target.data(), $this.data())
+ var $this = $(this)
+ var href = $this.attr('href')
+ var target = $this.attr('data-target') ||
+ (href && href.replace(/.*(?=#[^\s]+$)/, '')) // strip for ie7
+
+ var $target = $(document).find(target)
+ var option = $target.data('bs.modal') ? 'toggle' : $.extend({ remote: !/#/.test(href) && href }, $target.data(), $this.data())
if ($this.is('a')) e.preventDefault()
@@ -1247,18 +1284,148 @@ if (typeof jQuery === 'undefined') {
}(jQuery);
/* ========================================================================
- * Bootstrap: tooltip.js v3.3.6
- * http://getbootstrap.com/javascript/#tooltip
+ * Bootstrap: tooltip.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#tooltip
* Inspired by the original jQuery.tipsy by Jason Frame
* ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
+ * Copyright 2011-2019 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
-
+function ($) {
'use strict';
+ var DISALLOWED_ATTRIBUTES = ['sanitize', 'whiteList', 'sanitizeFn']
+
+ var uriAttrs = [
+ 'background',
+ 'cite',
+ 'href',
+ 'itemtype',
+ 'longdesc',
+ 'poster',
+ 'src',
+ 'xlink:href'
+ ]
+
+ var ARIA_ATTRIBUTE_PATTERN = /^aria-[\w-]*$/i
+
+ var DefaultWhitelist = {
+ // Global attributes allowed on any supplied element below.
+ '*': ['class', 'dir', 'id', 'lang', 'role', ARIA_ATTRIBUTE_PATTERN],
+ a: ['target', 'href', 'title', 'rel'],
+ area: [],
+ b: [],
+ br: [],
+ col: [],
+ code: [],
+ div: [],
+ em: [],
+ hr: [],
+ h1: [],
+ h2: [],
+ h3: [],
+ h4: [],
+ h5: [],
+ h6: [],
+ i: [],
+ img: ['src', 'alt', 'title', 'width', 'height'],
+ li: [],
+ ol: [],
+ p: [],
+ pre: [],
+ s: [],
+ small: [],
+ span: [],
+ sub: [],
+ sup: [],
+ strong: [],
+ u: [],
+ ul: []
+ }
+
+ /**
+ * A pattern that recognizes a commonly useful subset of URLs that are safe.
+ *
+ * Shoutout to Angular 7 https://github.com/angular/angular/blob/7.2.4/packages/core/src/sanitization/url_sanitizer.ts
+ */
+ var SAFE_URL_PATTERN = /^(?:(?:https?|mailto|ftp|tel|file):|[^&:/?#]*(?:[/?#]|$))/gi
+
+ /**
+ * A pattern that matches safe data URLs. Only matches image, video and audio types.
+ *
+ * Shoutout to Angular 7 https://github.com/angular/angular/blob/7.2.4/packages/core/src/sanitization/url_sanitizer.ts
+ */
+ var DATA_URL_PATTERN = /^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[a-z0-9+/]+=*$/i
+
+ function allowedAttribute(attr, allowedAttributeList) {
+ var attrName = attr.nodeName.toLowerCase()
+
+ if ($.inArray(attrName, allowedAttributeList) !== -1) {
+ if ($.inArray(attrName, uriAttrs) !== -1) {
+ return Boolean(attr.nodeValue.match(SAFE_URL_PATTERN) || attr.nodeValue.match(DATA_URL_PATTERN))
+ }
+
+ return true
+ }
+
+ var regExp = $(allowedAttributeList).filter(function (index, value) {
+ return value instanceof RegExp
+ })
+
+ // Check if a regular expression validates the attribute.
+ for (var i = 0, l = regExp.length; i < l; i++) {
+ if (attrName.match(regExp[i])) {
+ return true
+ }
+ }
+
+ return false
+ }
+
+ function sanitizeHtml(unsafeHtml, whiteList, sanitizeFn) {
+ if (unsafeHtml.length === 0) {
+ return unsafeHtml
+ }
+
+ if (sanitizeFn && typeof sanitizeFn === 'function') {
+ return sanitizeFn(unsafeHtml)
+ }
+
+ // IE 8 and below don't support createHTMLDocument
+ if (!document.implementation || !document.implementation.createHTMLDocument) {
+ return unsafeHtml
+ }
+
+ var createdDocument = document.implementation.createHTMLDocument('sanitization')
+ createdDocument.body.innerHTML = unsafeHtml
+
+ var whitelistKeys = $.map(whiteList, function (el, i) { return i })
+ var elements = $(createdDocument.body).find('*')
+
+ for (var i = 0, len = elements.length; i < len; i++) {
+ var el = elements[i]
+ var elName = el.nodeName.toLowerCase()
+
+ if ($.inArray(elName, whitelistKeys) === -1) {
+ el.parentNode.removeChild(el)
+
+ continue
+ }
+
+ var attributeList = $.map(el.attributes, function (el) { return el })
+ var whitelistedAttributes = [].concat(whiteList['*'] || [], whiteList[elName] || [])
+
+ for (var j = 0, len2 = attributeList.length; j < len2; j++) {
+ if (!allowedAttribute(attributeList[j], whitelistedAttributes)) {
+ el.removeAttribute(attributeList[j].nodeName)
+ }
+ }
+ }
+
+ return createdDocument.body.innerHTML
+ }
+
// TOOLTIP PUBLIC CLASS DEFINITION
// ===============================
@@ -1274,7 +1441,7 @@ if (typeof jQuery === 'undefined') {
this.init('tooltip', element, options)
}
- Tooltip.VERSION = '3.3.6'
+ Tooltip.VERSION = '3.4.1'
Tooltip.TRANSITION_DURATION = 150
@@ -1291,7 +1458,10 @@ if (typeof jQuery === 'undefined') {
viewport: {
selector: 'body',
padding: 0
- }
+ },
+ sanitize : true,
+ sanitizeFn : null,
+ whiteList : DefaultWhitelist
}
Tooltip.prototype.init = function (type, element, options) {
@@ -1299,7 +1469,7 @@ if (typeof jQuery === 'undefined') {
this.type = type
this.$element = $(element)
this.options = this.getOptions(options)
- this.$viewport = this.options.viewport && $($.isFunction(this.options.viewport) ? this.options.viewport.call(this, this.$element) : (this.options.viewport.selector || this.options.viewport))
+ this.$viewport = this.options.viewport && $(document).find($.isFunction(this.options.viewport) ? this.options.viewport.call(this, this.$element) : (this.options.viewport.selector || this.options.viewport))
this.inState = { click: false, hover: false, focus: false }
if (this.$element[0] instanceof document.constructor && !this.options.selector) {
@@ -1332,7 +1502,15 @@ if (typeof jQuery === 'undefined') {
}
Tooltip.prototype.getOptions = function (options) {
- options = $.extend({}, this.getDefaults(), this.$element.data(), options)
+ var dataAttributes = this.$element.data()
+
+ for (var dataAttr in dataAttributes) {
+ if (dataAttributes.hasOwnProperty(dataAttr) && $.inArray(dataAttr, DISALLOWED_ATTRIBUTES) !== -1) {
+ delete dataAttributes[dataAttr]
+ }
+ }
+
+ options = $.extend({}, this.getDefaults(), dataAttributes, options)
if (options.delay && typeof options.delay == 'number') {
options.delay = {
@@ -1341,6 +1519,10 @@ if (typeof jQuery === 'undefined') {
}
}
+ if (options.sanitize) {
+ options.template = sanitizeHtml(options.template, options.whiteList, options.sanitizeFn)
+ }
+
return options
}
@@ -1452,7 +1634,7 @@ if (typeof jQuery === 'undefined') {
.addClass(placement)
.data('bs.' + this.type, this)
- this.options.container ? $tip.appendTo(this.options.container) : $tip.insertAfter(this.$element)
+ this.options.container ? $tip.appendTo($(document).find(this.options.container)) : $tip.insertAfter(this.$element)
this.$element.trigger('inserted.bs.' + this.type)
var pos = this.getPosition()
@@ -1554,7 +1736,16 @@ if (typeof jQuery === 'undefined') {
var $tip = this.tip()
var title = this.getTitle()
- $tip.find('.tooltip-inner')[this.options.html ? 'html' : 'text'](title)
+ if (this.options.html) {
+ if (this.options.sanitize) {
+ title = sanitizeHtml(title, this.options.whiteList, this.options.sanitizeFn)
+ }
+
+ $tip.find('.tooltip-inner').html(title)
+ } else {
+ $tip.find('.tooltip-inner').text(title)
+ }
+
$tip.removeClass('fade in top bottom left right')
}
@@ -1565,9 +1756,11 @@ if (typeof jQuery === 'undefined') {
function complete() {
if (that.hoverState != 'in') $tip.detach()
- that.$element
- .removeAttr('aria-describedby')
- .trigger('hidden.bs.' + that.type)
+ if (that.$element) { // TODO: Check whether guarding this code with this `if` is really necessary.
+ that.$element
+ .removeAttr('aria-describedby')
+ .trigger('hidden.bs.' + that.type)
+ }
callback && callback()
}
@@ -1610,7 +1803,10 @@ if (typeof jQuery === 'undefined') {
// width and height are missing in IE8, so compute them manually; see https://github.com/twbs/bootstrap/issues/14093
elRect = $.extend({}, elRect, { width: elRect.right - elRect.left, height: elRect.bottom - elRect.top })
}
- var elOffset = isBody ? { top: 0, left: 0 } : $element.offset()
+ var isSvg = window.SVGElement && el instanceof window.SVGElement
+ // Avoid using $.offset() on SVGs since it gives incorrect results in jQuery 3.
+ // See https://github.com/twbs/bootstrap/issues/20280
+ var elOffset = isBody ? { top: 0, left: 0 } : (isSvg ? null : $element.offset())
var scroll = { scroll: isBody ? document.documentElement.scrollTop || document.body.scrollTop : $element.scrollTop() }
var outerDims = isBody ? { width: $(window).width(), height: $(window).height() } : null
@@ -1726,9 +1922,13 @@ if (typeof jQuery === 'undefined') {
that.$tip = null
that.$arrow = null
that.$viewport = null
+ that.$element = null
})
}
+ Tooltip.prototype.sanitizeHtml = function (unsafeHtml) {
+ return sanitizeHtml(unsafeHtml, this.options.whiteList, this.options.sanitizeFn)
+ }
// TOOLTIP PLUGIN DEFINITION
// =========================
@@ -1762,10 +1962,10 @@ if (typeof jQuery === 'undefined') {
}(jQuery);
/* ========================================================================
- * Bootstrap: popover.js v3.3.6
- * http://getbootstrap.com/javascript/#popovers
+ * Bootstrap: popover.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#popovers
* ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
+ * Copyright 2011-2019 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
@@ -1782,7 +1982,7 @@ if (typeof jQuery === 'undefined') {
if (!$.fn.tooltip) throw new Error('Popover requires tooltip.js')
- Popover.VERSION = '3.3.6'
+ Popover.VERSION = '3.4.1'
Popover.DEFAULTS = $.extend({}, $.fn.tooltip.Constructor.DEFAULTS, {
placement: 'right',
@@ -1808,10 +2008,25 @@ if (typeof jQuery === 'undefined') {
var title = this.getTitle()
var content = this.getContent()
- $tip.find('.popover-title')[this.options.html ? 'html' : 'text'](title)
- $tip.find('.popover-content').children().detach().end()[ // we use append for html objects to maintain js events
- this.options.html ? (typeof content == 'string' ? 'html' : 'append') : 'text'
- ](content)
+ if (this.options.html) {
+ var typeContent = typeof content
+
+ if (this.options.sanitize) {
+ title = this.sanitizeHtml(title)
+
+ if (typeContent === 'string') {
+ content = this.sanitizeHtml(content)
+ }
+ }
+
+ $tip.find('.popover-title').html(title)
+ $tip.find('.popover-content').children().detach().end()[
+ typeContent === 'string' ? 'html' : 'append'
+ ](content)
+ } else {
+ $tip.find('.popover-title').text(title)
+ $tip.find('.popover-content').children().detach().end().text(content)
+ }
$tip.removeClass('fade top bottom left right in')
@@ -1830,8 +2045,8 @@ if (typeof jQuery === 'undefined') {
return $e.attr('data-content')
|| (typeof o.content == 'function' ?
- o.content.call($e[0]) :
- o.content)
+ o.content.call($e[0]) :
+ o.content)
}
Popover.prototype.arrow = function () {
@@ -1871,10 +2086,10 @@ if (typeof jQuery === 'undefined') {
}(jQuery);
/* ========================================================================
- * Bootstrap: scrollspy.js v3.3.6
- * http://getbootstrap.com/javascript/#scrollspy
+ * Bootstrap: scrollspy.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#scrollspy
* ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
+ * Copyright 2011-2019 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
@@ -1900,7 +2115,7 @@ if (typeof jQuery === 'undefined') {
this.process()
}
- ScrollSpy.VERSION = '3.3.6'
+ ScrollSpy.VERSION = '3.4.1'
ScrollSpy.DEFAULTS = {
offset: 10
@@ -2044,10 +2259,10 @@ if (typeof jQuery === 'undefined') {
}(jQuery);
/* ========================================================================
- * Bootstrap: tab.js v3.3.6
- * http://getbootstrap.com/javascript/#tabs
+ * Bootstrap: tab.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#tabs
* ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
+ * Copyright 2011-2019 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
@@ -2064,7 +2279,7 @@ if (typeof jQuery === 'undefined') {
// jscs:enable requireDollarBeforejQueryAssignment
}
- Tab.VERSION = '3.3.6'
+ Tab.VERSION = '3.4.1'
Tab.TRANSITION_DURATION = 150
@@ -2093,7 +2308,7 @@ if (typeof jQuery === 'undefined') {
if (showEvent.isDefaultPrevented() || hideEvent.isDefaultPrevented()) return
- var $target = $(selector)
+ var $target = $(document).find(selector)
this.activate($this.closest('li'), $ul)
this.activate($target, $target.parent(), function () {
@@ -2118,15 +2333,15 @@ if (typeof jQuery === 'undefined') {
$active
.removeClass('active')
.find('> .dropdown-menu > .active')
- .removeClass('active')
+ .removeClass('active')
.end()
.find('[data-toggle="tab"]')
- .attr('aria-expanded', false)
+ .attr('aria-expanded', false)
element
.addClass('active')
.find('[data-toggle="tab"]')
- .attr('aria-expanded', true)
+ .attr('aria-expanded', true)
if (transition) {
element[0].offsetWidth // reflow for transition
@@ -2138,10 +2353,10 @@ if (typeof jQuery === 'undefined') {
if (element.parent('.dropdown-menu').length) {
element
.closest('li.dropdown')
- .addClass('active')
+ .addClass('active')
.end()
.find('[data-toggle="tab"]')
- .attr('aria-expanded', true)
+ .attr('aria-expanded', true)
}
callback && callback()
@@ -2200,10 +2415,10 @@ if (typeof jQuery === 'undefined') {
}(jQuery);
/* ========================================================================
- * Bootstrap: affix.js v3.3.6
- * http://getbootstrap.com/javascript/#affix
+ * Bootstrap: affix.js v3.4.1
+ * https://getbootstrap.com/docs/3.4/javascript/#affix
* ========================================================================
- * Copyright 2011-2015 Twitter, Inc.
+ * Copyright 2011-2019 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
@@ -2217,7 +2432,9 @@ if (typeof jQuery === 'undefined') {
var Affix = function (element, options) {
this.options = $.extend({}, Affix.DEFAULTS, options)
- this.$target = $(this.options.target)
+ var target = this.options.target === Affix.DEFAULTS.target ? $(this.options.target) : $(document).find(this.options.target)
+
+ this.$target = target
.on('scroll.bs.affix.data-api', $.proxy(this.checkPosition, this))
.on('click.bs.affix.data-api', $.proxy(this.checkPositionWithEventLoop, this))
@@ -2229,7 +2446,7 @@ if (typeof jQuery === 'undefined') {
this.checkPosition()
}
- Affix.VERSION = '3.3.6'
+ Affix.VERSION = '3.4.1'
Affix.RESET = 'affix affix-top affix-bottom'
diff --git a/lib/toaster/toastergui/static/js/bootstrap-3.4.1.min.js b/lib/toaster/toastergui/static/js/bootstrap-3.4.1.min.js
new file mode 100644
index 000000000..eb0a8b410
--- /dev/null
+++ b/lib/toaster/toastergui/static/js/bootstrap-3.4.1.min.js
@@ -0,0 +1,6 @@
+/*!
+ * Bootstrap v3.4.1 (https://getbootstrap.com/)
+ * Copyright 2011-2019 Twitter, Inc.
+ * Licensed under the MIT license
+ */
+if("undefined"==typeof jQuery)throw new Error("Bootstrap's JavaScript requires jQuery");!function(t){"use strict";var e=jQuery.fn.jquery.split(" ")[0].split(".");if(e[0]<2&&e[1]<9||1==e[0]&&9==e[1]&&e[2]<1||3<e[0])throw new Error("Bootstrap's JavaScript requires jQuery version 1.9.1 or higher, but lower than version 4")}(),function(n){"use strict";n.fn.emulateTransitionEnd=function(t){var e=!1,i=this;n(this).one("bsTransitionEnd",function(){e=!0});return setTimeout(function(){e||n(i).trigger(n.support.transition.end)},t),this},n(function(){n.support.transition=function o(){var t=document.createElement("bootstrap"),e={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"};for(var i in e)if(t.style[i]!==undefined)return{end:e[i]};return!1}(),n.support.transition&&(n.event.special.bsTransitionEnd={bindType:n.support.transition.end,delegateType:n.support.transition.end,handle:function(t){if(n(t.target).is(this))return t.handleObj.handler.apply(this,arguments)}})})}(jQuery),function(s){"use strict";var e='[data-dismiss="alert"]',a=function(t){s(t).on("click",e,this.close)};a.VERSION="3.4.1",a.TRANSITION_DURATION=150,a.prototype.close=function(t){var e=s(this),i=e.attr("data-target");i||(i=(i=e.attr("href"))&&i.replace(/.*(?=#[^\s]*$)/,"")),i="#"===i?[]:i;var o=s(document).find(i);function n(){o.detach().trigger("closed.bs.alert").remove()}t&&t.preventDefault(),o.length||(o=e.closest(".alert")),o.trigger(t=s.Event("close.bs.alert")),t.isDefaultPrevented()||(o.removeClass("in"),s.support.transition&&o.hasClass("fade")?o.one("bsTransitionEnd",n).emulateTransitionEnd(a.TRANSITION_DURATION):n())};var t=s.fn.alert;s.fn.alert=function o(i){return this.each(function(){var t=s(this),e=t.data("bs.alert");e||t.data("bs.alert",e=new a(this)),"string"==typeof i&&e[i].call(t)})},s.fn.alert.Constructor=a,s.fn.alert.noConflict=function(){return s.fn.alert=t,this},s(document).on("click.bs.alert.data-api",e,a.prototype.close)}(jQuery),function(s){"use strict";var n=function(t,e){this.$element=s(t),this.options=s.extend({},n.DEFAULTS,e),this.isLoading=!1};function i(o){return this.each(function(){var t=s(this),e=t.data("bs.button"),i="object"==typeof o&&o;e||t.data("bs.button",e=new n(this,i)),"toggle"==o?e.toggle():o&&e.setState(o)})}n.VERSION="3.4.1",n.DEFAULTS={loadingText:"loading..."},n.prototype.setState=function(t){var e="disabled",i=this.$element,o=i.is("input")?"val":"html",n=i.data();t+="Text",null==n.resetText&&i.data("resetText",i[o]()),setTimeout(s.proxy(function(){i[o](null==n[t]?this.options[t]:n[t]),"loadingText"==t?(this.isLoading=!0,i.addClass(e).attr(e,e).prop(e,!0)):this.isLoading&&(this.isLoading=!1,i.removeClass(e).removeAttr(e).prop(e,!1))},this),0)},n.prototype.toggle=function(){var t=!0,e=this.$element.closest('[data-toggle="buttons"]');if(e.length){var i=this.$element.find("input");"radio"==i.prop("type")?(i.prop("checked")&&(t=!1),e.find(".active").removeClass("active"),this.$element.addClass("active")):"checkbox"==i.prop("type")&&(i.prop("checked")!==this.$element.hasClass("active")&&(t=!1),this.$element.toggleClass("active")),i.prop("checked",this.$element.hasClass("active")),t&&i.trigger("change")}else this.$element.attr("aria-pressed",!this.$element.hasClass("active")),this.$element.toggleClass("active")};var t=s.fn.button;s.fn.button=i,s.fn.button.Constructor=n,s.fn.button.noConflict=function(){return s.fn.button=t,this},s(document).on("click.bs.button.data-api",'[data-toggle^="button"]',function(t){var e=s(t.target).closest(".btn");i.call(e,"toggle"),s(t.target).is('input[type="radio"], input[type="checkbox"]')||(t.preventDefault(),e.is("input,button")?e.trigger("focus"):e.find("input:visible,button:visible").first().trigger("focus"))}).on("focus.bs.button.data-api blur.bs.button.data-api",'[data-toggle^="button"]',function(t){s(t.target).closest(".btn").toggleClass("focus",/^focus(in)?$/.test(t.type))})}(jQuery),function(p){"use strict";var c=function(t,e){this.$element=p(t),this.$indicators=this.$element.find(".carousel-indicators"),this.options=e,this.paused=null,this.sliding=null,this.interval=null,this.$active=null,this.$items=null,this.options.keyboard&&this.$element.on("keydown.bs.carousel",p.proxy(this.keydown,this)),"hover"==this.options.pause&&!("ontouchstart"in document.documentElement)&&this.$element.on("mouseenter.bs.carousel",p.proxy(this.pause,this)).on("mouseleave.bs.carousel",p.proxy(this.cycle,this))};function r(n){return this.each(function(){var t=p(this),e=t.data("bs.carousel"),i=p.extend({},c.DEFAULTS,t.data(),"object"==typeof n&&n),o="string"==typeof n?n:i.slide;e||t.data("bs.carousel",e=new c(this,i)),"number"==typeof n?e.to(n):o?e[o]():i.interval&&e.pause().cycle()})}c.VERSION="3.4.1",c.TRANSITION_DURATION=600,c.DEFAULTS={interval:5e3,pause:"hover",wrap:!0,keyboard:!0},c.prototype.keydown=function(t){if(!/input|textarea/i.test(t.target.tagName)){switch(t.which){case 37:this.prev();break;case 39:this.next();break;default:return}t.preventDefault()}},c.prototype.cycle=function(t){return t||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(p.proxy(this.next,this),this.options.interval)),this},c.prototype.getItemIndex=function(t){return this.$items=t.parent().children(".item"),this.$items.index(t||this.$active)},c.prototype.getItemForDirection=function(t,e){var i=this.getItemIndex(e);if(("prev"==t&&0===i||"next"==t&&i==this.$items.length-1)&&!this.options.wrap)return e;var o=(i+("prev"==t?-1:1))%this.$items.length;return this.$items.eq(o)},c.prototype.to=function(t){var e=this,i=this.getItemIndex(this.$active=this.$element.find(".item.active"));if(!(t>this.$items.length-1||t<0))return this.sliding?this.$element.one("slid.bs.carousel",function(){e.to(t)}):i==t?this.pause().cycle():this.slide(i<t?"next":"prev",this.$items.eq(t))},c.prototype.pause=function(t){return t||(this.paused=!0),this.$element.find(".next, .prev").length&&p.support.transition&&(this.$element.trigger(p.support.transition.end),this.cycle(!0)),this.interval=clearInterval(this.interval),this},c.prototype.next=function(){if(!this.sliding)return this.slide("next")},c.prototype.prev=function(){if(!this.sliding)return this.slide("prev")},c.prototype.slide=function(t,e){var i=this.$element.find(".item.active"),o=e||this.getItemForDirection(t,i),n=this.interval,s="next"==t?"left":"right",a=this;if(o.hasClass("active"))return this.sliding=!1;var r=o[0],l=p.Event("slide.bs.carousel",{relatedTarget:r,direction:s});if(this.$element.trigger(l),!l.isDefaultPrevented()){if(this.sliding=!0,n&&this.pause(),this.$indicators.length){this.$indicators.find(".active").removeClass("active");var h=p(this.$indicators.children()[this.getItemIndex(o)]);h&&h.addClass("active")}var d=p.Event("slid.bs.carousel",{relatedTarget:r,direction:s});return p.support.transition&&this.$element.hasClass("slide")?(o.addClass(t),"object"==typeof o&&o.length&&o[0].offsetWidth,i.addClass(s),o.addClass(s),i.one("bsTransitionEnd",function(){o.removeClass([t,s].join(" ")).addClass("active"),i.removeClass(["active",s].join(" ")),a.sliding=!1,setTimeout(function(){a.$element.trigger(d)},0)}).emulateTransitionEnd(c.TRANSITION_DURATION)):(i.removeClass("active"),o.addClass("active"),this.sliding=!1,this.$element.trigger(d)),n&&this.cycle(),this}};var t=p.fn.carousel;p.fn.carousel=r,p.fn.carousel.Constructor=c,p.fn.carousel.noConflict=function(){return p.fn.carousel=t,this};var e=function(t){var e=p(this),i=e.attr("href");i&&(i=i.replace(/.*(?=#[^\s]+$)/,""));var o=e.attr("data-target")||i,n=p(document).find(o);if(n.hasClass("carousel")){var s=p.extend({},n.data(),e.data()),a=e.attr("data-slide-to");a&&(s.interval=!1),r.call(n,s),a&&n.data("bs.carousel").to(a),t.preventDefault()}};p(document).on("click.bs.carousel.data-api","[data-slide]",e).on("click.bs.carousel.data-api","[data-slide-to]",e),p(window).on("load",function(){p('[data-ride="carousel"]').each(function(){var t=p(this);r.call(t,t.data())})})}(jQuery),function(a){"use strict";var r=function(t,e){this.$element=a(t),this.options=a.extend({},r.DEFAULTS,e),this.$trigger=a('[data-toggle="collapse"][href="#'+t.id+'"],[data-toggle="collapse"][data-target="#'+t.id+'"]'),this.transitioning=null,this.options.parent?this.$parent=this.getParent():this.addAriaAndCollapsedClass(this.$element,this.$trigger),this.options.toggle&&this.toggle()};function n(t){var e,i=t.attr("data-target")||(e=t.attr("href"))&&e.replace(/.*(?=#[^\s]+$)/,"");return a(document).find(i)}function l(o){return this.each(function(){var t=a(this),e=t.data("bs.collapse"),i=a.extend({},r.DEFAULTS,t.data(),"object"==typeof o&&o);!e&&i.toggle&&/show|hide/.test(o)&&(i.toggle=!1),e||t.data("bs.collapse",e=new r(this,i)),"string"==typeof o&&e[o]()})}r.VERSION="3.4.1",r.TRANSITION_DURATION=350,r.DEFAULTS={toggle:!0},r.prototype.dimension=function(){return this.$element.hasClass("width")?"width":"height"},r.prototype.show=function(){if(!this.transitioning&&!this.$element.hasClass("in")){var t,e=this.$parent&&this.$parent.children(".panel").children(".in, .collapsing");if(!(e&&e.length&&(t=e.data("bs.collapse"))&&t.transitioning)){var i=a.Event("show.bs.collapse");if(this.$element.trigger(i),!i.isDefaultPrevented()){e&&e.length&&(l.call(e,"hide"),t||e.data("bs.collapse",null));var o=this.dimension();this.$element.removeClass("collapse").addClass("collapsing")[o](0).attr("aria-expanded",!0),this.$trigger.removeClass("collapsed").attr("aria-expanded",!0),this.transitioning=1;var n=function(){this.$element.removeClass("collapsing").addClass("collapse in")[o](""),this.transitioning=0,this.$element.trigger("shown.bs.collapse")};if(!a.support.transition)return n.call(this);var s=a.camelCase(["scroll",o].join("-"));this.$element.one("bsTransitionEnd",a.proxy(n,this)).emulateTransitionEnd(r.TRANSITION_DURATION)[o](this.$element[0][s])}}}},r.prototype.hide=function(){if(!this.transitioning&&this.$element.hasClass("in")){var t=a.Event("hide.bs.collapse");if(this.$element.trigger(t),!t.isDefaultPrevented()){var e=this.dimension();this.$element[e](this.$element[e]())[0].offsetHeight,this.$element.addClass("collapsing").removeClass("collapse in").attr("aria-expanded",!1),this.$trigger.addClass("collapsed").attr("aria-expanded",!1),this.transitioning=1;var i=function(){this.transitioning=0,this.$element.removeClass("collapsing").addClass("collapse").trigger("hidden.bs.collapse")};if(!a.support.transition)return i.call(this);this.$element[e](0).one("bsTransitionEnd",a.proxy(i,this)).emulateTransitionEnd(r.TRANSITION_DURATION)}}},r.prototype.toggle=function(){this[this.$element.hasClass("in")?"hide":"show"]()},r.prototype.getParent=function(){return a(document).find(this.options.parent).find('[data-toggle="collapse"][data-parent="'+this.options.parent+'"]').each(a.proxy(function(t,e){var i=a(e);this.addAriaAndCollapsedClass(n(i),i)},this)).end()},r.prototype.addAriaAndCollapsedClass=function(t,e){var i=t.hasClass("in");t.attr("aria-expanded",i),e.toggleClass("collapsed",!i).attr("aria-expanded",i)};var t=a.fn.collapse;a.fn.collapse=l,a.fn.collapse.Constructor=r,a.fn.collapse.noConflict=function(){return a.fn.collapse=t,this},a(document).on("click.bs.collapse.data-api",'[data-toggle="collapse"]',function(t){var e=a(this);e.attr("data-target")||t.preventDefault();var i=n(e),o=i.data("bs.collapse")?"toggle":e.data();l.call(i,o)})}(jQuery),function(a){"use strict";var r='[data-toggle="dropdown"]',o=function(t){a(t).on("click.bs.dropdown",this.toggle)};function l(t){var e=t.attr("data-target");e||(e=(e=t.attr("href"))&&/#[A-Za-z]/.test(e)&&e.replace(/.*(?=#[^\s]*$)/,""));var i="#"!==e?a(document).find(e):null;return i&&i.length?i:t.parent()}function s(o){o&&3===o.which||(a(".dropdown-backdrop").remove(),a(r).each(function(){var t=a(this),e=l(t),i={relatedTarget:this};e.hasClass("open")&&(o&&"click"==o.type&&/input|textarea/i.test(o.target.tagName)&&a.contains(e[0],o.target)||(e.trigger(o=a.Event("hide.bs.dropdown",i)),o.isDefaultPrevented()||(t.attr("aria-expanded","false"),e.removeClass("open").trigger(a.Event("hidden.bs.dropdown",i)))))}))}o.VERSION="3.4.1",o.prototype.toggle=function(t){var e=a(this);if(!e.is(".disabled, :disabled")){var i=l(e),o=i.hasClass("open");if(s(),!o){"ontouchstart"in document.documentElement&&!i.closest(".navbar-nav").length&&a(document.createElement("div")).addClass("dropdown-backdrop").insertAfter(a(this)).on("click",s);var n={relatedTarget:this};if(i.trigger(t=a.Event("show.bs.dropdown",n)),t.isDefaultPrevented())return;e.trigger("focus").attr("aria-expanded","true"),i.toggleClass("open").trigger(a.Event("shown.bs.dropdown",n))}return!1}},o.prototype.keydown=function(t){if(/(38|40|27|32)/.test(t.which)&&!/input|textarea/i.test(t.target.tagName)){var e=a(this);if(t.preventDefault(),t.stopPropagation(),!e.is(".disabled, :disabled")){var i=l(e),o=i.hasClass("open");if(!o&&27!=t.which||o&&27==t.which)return 27==t.which&&i.find(r).trigger("focus"),e.trigger("click");var n=i.find(".dropdown-menu li:not(.disabled):visible a");if(n.length){var s=n.index(t.target);38==t.which&&0<s&&s--,40==t.which&&s<n.length-1&&s++,~s||(s=0),n.eq(s).trigger("focus")}}}};var t=a.fn.dropdown;a.fn.dropdown=function e(i){return this.each(function(){var t=a(this),e=t.data("bs.dropdown");e||t.data("bs.dropdown",e=new o(this)),"string"==typeof i&&e[i].call(t)})},a.fn.dropdown.Constructor=o,a.fn.dropdown.noConflict=function(){return a.fn.dropdown=t,this},a(document).on("click.bs.dropdown.data-api",s).on("click.bs.dropdown.data-api",".dropdown form",function(t){t.stopPropagation()}).on("click.bs.dropdown.data-api",r,o.prototype.toggle).on("keydown.bs.dropdown.data-api",r,o.prototype.keydown).on("keydown.bs.dropdown.data-api",".dropdown-menu",o.prototype.keydown)}(jQuery),function(a){"use strict";var s=function(t,e){this.options=e,this.$body=a(document.body),this.$element=a(t),this.$dialog=this.$element.find(".modal-dialog"),this.$backdrop=null,this.isShown=null,this.originalBodyPad=null,this.scrollbarWidth=0,this.ignoreBackdropClick=!1,this.fixedContent=".navbar-fixed-top, .navbar-fixed-bottom",this.options.remote&&this.$element.find(".modal-content").load(this.options.remote,a.proxy(function(){this.$element.trigger("loaded.bs.modal")},this))};function r(o,n){return this.each(function(){var t=a(this),e=t.data("bs.modal"),i=a.extend({},s.DEFAULTS,t.data(),"object"==typeof o&&o);e||t.data("bs.modal",e=new s(this,i)),"string"==typeof o?e[o](n):i.show&&e.show(n)})}s.VERSION="3.4.1",s.TRANSITION_DURATION=300,s.BACKDROP_TRANSITION_DURATION=150,s.DEFAULTS={backdrop:!0,keyboard:!0,show:!0},s.prototype.toggle=function(t){return this.isShown?this.hide():this.show(t)},s.prototype.show=function(i){var o=this,t=a.Event("show.bs.modal",{relatedTarget:i});this.$element.trigger(t),this.isShown||t.isDefaultPrevented()||(this.isShown=!0,this.checkScrollbar(),this.setScrollbar(),this.$body.addClass("modal-open"),this.escape(),this.resize(),this.$element.on("click.dismiss.bs.modal",'[data-dismiss="modal"]',a.proxy(this.hide,this)),this.$dialog.on("mousedown.dismiss.bs.modal",function(){o.$element.one("mouseup.dismiss.bs.modal",function(t){a(t.target).is(o.$element)&&(o.ignoreBackdropClick=!0)})}),this.backdrop(function(){var t=a.support.transition&&o.$element.hasClass("fade");o.$element.parent().length||o.$element.appendTo(o.$body),o.$element.show().scrollTop(0),o.adjustDialog(),t&&o.$element[0].offsetWidth,o.$element.addClass("in"),o.enforceFocus();var e=a.Event("shown.bs.modal",{relatedTarget:i});t?o.$dialog.one("bsTransitionEnd",function(){o.$element.trigger("focus").trigger(e)}).emulateTransitionEnd(s.TRANSITION_DURATION):o.$element.trigger("focus").trigger(e)}))},s.prototype.hide=function(t){t&&t.preventDefault(),t=a.Event("hide.bs.modal"),this.$element.trigger(t),this.isShown&&!t.isDefaultPrevented()&&(this.isShown=!1,this.escape(),this.resize(),a(document).off("focusin.bs.modal"),this.$element.removeClass("in").off("click.dismiss.bs.modal").off("mouseup.dismiss.bs.modal"),this.$dialog.off("mousedown.dismiss.bs.modal"),a.support.transition&&this.$element.hasClass("fade")?this.$element.one("bsTransitionEnd",a.proxy(this.hideModal,this)).emulateTransitionEnd(s.TRANSITION_DURATION):this.hideModal())},s.prototype.enforceFocus=function(){a(document).off("focusin.bs.modal").on("focusin.bs.modal",a.proxy(function(t){document===t.target||this.$element[0]===t.target||this.$element.has(t.target).length||this.$element.trigger("focus")},this))},s.prototype.escape=function(){this.isShown&&this.options.keyboard?this.$element.on("keydown.dismiss.bs.modal",a.proxy(function(t){27==t.which&&this.hide()},this)):this.isShown||this.$element.off("keydown.dismiss.bs.modal")},s.prototype.resize=function(){this.isShown?a(window).on("resize.bs.modal",a.proxy(this.handleUpdate,this)):a(window).off("resize.bs.modal")},s.prototype.hideModal=function(){var t=this;this.$element.hide(),this.backdrop(function(){t.$body.removeClass("modal-open"),t.resetAdjustments(),t.resetScrollbar(),t.$element.trigger("hidden.bs.modal")})},s.prototype.removeBackdrop=function(){this.$backdrop&&this.$backdrop.remove(),this.$backdrop=null},s.prototype.backdrop=function(t){var e=this,i=this.$element.hasClass("fade")?"fade":"";if(this.isShown&&this.options.backdrop){var o=a.support.transition&&i;if(this.$backdrop=a(document.createElement("div")).addClass("modal-backdrop "+i).appendTo(this.$body),this.$element.on("click.dismiss.bs.modal",a.proxy(function(t){this.ignoreBackdropClick?this.ignoreBackdropClick=!1:t.target===t.currentTarget&&("static"==this.options.backdrop?this.$element[0].focus():this.hide())},this)),o&&this.$backdrop[0].offsetWidth,this.$backdrop.addClass("in"),!t)return;o?this.$backdrop.one("bsTransitionEnd",t).emulateTransitionEnd(s.BACKDROP_TRANSITION_DURATION):t()}else if(!this.isShown&&this.$backdrop){this.$backdrop.removeClass("in");var n=function(){e.removeBackdrop(),t&&t()};a.support.transition&&this.$element.hasClass("fade")?this.$backdrop.one("bsTransitionEnd",n).emulateTransitionEnd(s.BACKDROP_TRANSITION_DURATION):n()}else t&&t()},s.prototype.handleUpdate=function(){this.adjustDialog()},s.prototype.adjustDialog=function(){var t=this.$element[0].scrollHeight>document.documentElement.clientHeight;this.$element.css({paddingLeft:!this.bodyIsOverflowing&&t?this.scrollbarWidth:"",paddingRight:this.bodyIsOverflowing&&!t?this.scrollbarWidth:""})},s.prototype.resetAdjustments=function(){this.$element.css({paddingLeft:"",paddingRight:""})},s.prototype.checkScrollbar=function(){var t=window.innerWidth;if(!t){var e=document.documentElement.getBoundingClientRect();t=e.right-Math.abs(e.left)}this.bodyIsOverflowing=document.body.clientWidth<t,this.scrollbarWidth=this.measureScrollbar()},s.prototype.setScrollbar=function(){var t=parseInt(this.$body.css("padding-right")||0,10);this.originalBodyPad=document.body.style.paddingRight||"";var n=this.scrollbarWidth;this.bodyIsOverflowing&&(this.$body.css("padding-right",t+n),a(this.fixedContent).each(function(t,e){var i=e.style.paddingRight,o=a(e).css("padding-right");a(e).data("padding-right",i).css("padding-right",parseFloat(o)+n+"px")}))},s.prototype.resetScrollbar=function(){this.$body.css("padding-right",this.originalBodyPad),a(this.fixedContent).each(function(t,e){var i=a(e).data("padding-right");a(e).removeData("padding-right"),e.style.paddingRight=i||""})},s.prototype.measureScrollbar=function(){var t=document.createElement("div");t.className="modal-scrollbar-measure",this.$body.append(t);var e=t.offsetWidth-t.clientWidth;return this.$body[0].removeChild(t),e};var t=a.fn.modal;a.fn.modal=r,a.fn.modal.Constructor=s,a.fn.modal.noConflict=function(){return a.fn.modal=t,this},a(document).on("click.bs.modal.data-api",'[data-toggle="modal"]',function(t){var e=a(this),i=e.attr("href"),o=e.attr("data-target")||i&&i.replace(/.*(?=#[^\s]+$)/,""),n=a(document).find(o),s=n.data("bs.modal")?"toggle":a.extend({remote:!/#/.test(i)&&i},n.data(),e.data());e.is("a")&&t.preventDefault(),n.one("show.bs.modal",function(t){t.isDefaultPrevented()||n.one("hidden.bs.modal",function(){e.is(":visible")&&e.trigger("focus")})}),r.call(n,s,this)})}(jQuery),function(g){"use strict";var o=["sanitize","whiteList","sanitizeFn"],a=["background","cite","href","itemtype","longdesc","poster","src","xlink:href"],t={"*":["class","dir","id","lang","role",/^aria-[\w-]*$/i],a:["target","href","title","rel"],area:[],b:[],br:[],col:[],code:[],div:[],em:[],hr:[],h1:[],h2:[],h3:[],h4:[],h5:[],h6:[],i:[],img:["src","alt","title","width","height"],li:[],ol:[],p:[],pre:[],s:[],small:[],span:[],sub:[],sup:[],strong:[],u:[],ul:[]},r=/^(?:(?:https?|mailto|ftp|tel|file):|[^&:/?#]*(?:[/?#]|$))/gi,l=/^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[a-z0-9+/]+=*$/i;function u(t,e){var i=t.nodeName.toLowerCase();if(-1!==g.inArray(i,e))return-1===g.inArray(i,a)||Boolean(t.nodeValue.match(r)||t.nodeValue.match(l));for(var o=g(e).filter(function(t,e){return e instanceof RegExp}),n=0,s=o.length;n<s;n++)if(i.match(o[n]))return!0;return!1}function n(t,e,i){if(0===t.length)return t;if(i&&"function"==typeof i)return i(t);if(!document.implementation||!document.implementation.createHTMLDocument)return t;var o=document.implementation.createHTMLDocument("sanitization");o.body.innerHTML=t;for(var n=g.map(e,function(t,e){return e}),s=g(o.body).find("*"),a=0,r=s.length;a<r;a++){var l=s[a],h=l.nodeName.toLowerCase();if(-1!==g.inArray(h,n))for(var d=g.map(l.attributes,function(t){return t}),p=[].concat(e["*"]||[],e[h]||[]),c=0,f=d.length;c<f;c++)u(d[c],p)||l.removeAttribute(d[c].nodeName);else l.parentNode.removeChild(l)}return o.body.innerHTML}var m=function(t,e){this.type=null,this.options=null,this.enabled=null,this.timeout=null,this.hoverState=null,this.$element=null,this.inState=null,this.init("tooltip",t,e)};m.VERSION="3.4.1",m.TRANSITION_DURATION=150,m.DEFAULTS={animation:!0,placement:"top",selector:!1,template:'<div class="tooltip" role="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',trigger:"hover focus",title:"",delay:0,html:!1,container:!1,viewport:{selector:"body",padding:0},sanitize:!0,sanitizeFn:null,whiteList:t},m.prototype.init=function(t,e,i){if(this.enabled=!0,this.type=t,this.$element=g(e),this.options=this.getOptions(i),this.$viewport=this.options.viewport&&g(document).find(g.isFunction(this.options.viewport)?this.options.viewport.call(this,this.$element):this.options.viewport.selector||this.options.viewport),this.inState={click:!1,hover:!1,focus:!1},this.$element[0]instanceof document.constructor&&!this.options.selector)throw new Error("`selector` option must be specified when initializing "+this.type+" on the window.document object!");for(var o=this.options.trigger.split(" "),n=o.length;n--;){var s=o[n];if("click"==s)this.$element.on("click."+this.type,this.options.selector,g.proxy(this.toggle,this));else if("manual"!=s){var a="hover"==s?"mouseenter":"focusin",r="hover"==s?"mouseleave":"focusout";this.$element.on(a+"."+this.type,this.options.selector,g.proxy(this.enter,this)),this.$element.on(r+"."+this.type,this.options.selector,g.proxy(this.leave,this))}}this.options.selector?this._options=g.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},m.prototype.getDefaults=function(){return m.DEFAULTS},m.prototype.getOptions=function(t){var e=this.$element.data();for(var i in e)e.hasOwnProperty(i)&&-1!==g.inArray(i,o)&&delete e[i];return(t=g.extend({},this.getDefaults(),e,t)).delay&&"number"==typeof t.delay&&(t.delay={show:t.delay,hide:t.delay}),t.sanitize&&(t.template=n(t.template,t.whiteList,t.sanitizeFn)),t},m.prototype.getDelegateOptions=function(){var i={},o=this.getDefaults();return this._options&&g.each(this._options,function(t,e){o[t]!=e&&(i[t]=e)}),i},m.prototype.enter=function(t){var e=t instanceof this.constructor?t:g(t.currentTarget).data("bs."+this.type);if(e||(e=new this.constructor(t.currentTarget,this.getDelegateOptions()),g(t.currentTarget).data("bs."+this.type,e)),t instanceof g.Event&&(e.inState["focusin"==t.type?"focus":"hover"]=!0),e.tip().hasClass("in")||"in"==e.hoverState)e.hoverState="in";else{if(clearTimeout(e.timeout),e.hoverState="in",!e.options.delay||!e.options.delay.show)return e.show();e.timeout=setTimeout(function(){"in"==e.hoverState&&e.show()},e.options.delay.show)}},m.prototype.isInStateTrue=function(){for(var t in this.inState)if(this.inState[t])return!0;return!1},m.prototype.leave=function(t){var e=t instanceof this.constructor?t:g(t.currentTarget).data("bs."+this.type);if(e||(e=new this.constructor(t.currentTarget,this.getDelegateOptions()),g(t.currentTarget).data("bs."+this.type,e)),t instanceof g.Event&&(e.inState["focusout"==t.type?"focus":"hover"]=!1),!e.isInStateTrue()){if(clearTimeout(e.timeout),e.hoverState="out",!e.options.delay||!e.options.delay.hide)return e.hide();e.timeout=setTimeout(function(){"out"==e.hoverState&&e.hide()},e.options.delay.hide)}},m.prototype.show=function(){var t=g.Event("show.bs."+this.type);if(this.hasContent()&&this.enabled){this.$element.trigger(t);var e=g.contains(this.$element[0].ownerDocument.documentElement,this.$element[0]);if(t.isDefaultPrevented()||!e)return;var i=this,o=this.tip(),n=this.getUID(this.type);this.setContent(),o.attr("id",n),this.$element.attr("aria-describedby",n),this.options.animation&&o.addClass("fade");var s="function"==typeof this.options.placement?this.options.placement.call(this,o[0],this.$element[0]):this.options.placement,a=/\s?auto?\s?/i,r=a.test(s);r&&(s=s.replace(a,"")||"top"),o.detach().css({top:0,left:0,display:"block"}).addClass(s).data("bs."+this.type,this),this.options.container?o.appendTo(g(document).find(this.options.container)):o.insertAfter(this.$element),this.$element.trigger("inserted.bs."+this.type);var l=this.getPosition(),h=o[0].offsetWidth,d=o[0].offsetHeight;if(r){var p=s,c=this.getPosition(this.$viewport);s="bottom"==s&&l.bottom+d>c.bottom?"top":"top"==s&&l.top-d<c.top?"bottom":"right"==s&&l.right+h>c.width?"left":"left"==s&&l.left-h<c.left?"right":s,o.removeClass(p).addClass(s)}var f=this.getCalculatedOffset(s,l,h,d);this.applyPlacement(f,s);var u=function(){var t=i.hoverState;i.$element.trigger("shown.bs."+i.type),i.hoverState=null,"out"==t&&i.leave(i)};g.support.transition&&this.$tip.hasClass("fade")?o.one("bsTransitionEnd",u).emulateTransitionEnd(m.TRANSITION_DURATION):u()}},m.prototype.applyPlacement=function(t,e){var i=this.tip(),o=i[0].offsetWidth,n=i[0].offsetHeight,s=parseInt(i.css("margin-top"),10),a=parseInt(i.css("margin-left"),10);isNaN(s)&&(s=0),isNaN(a)&&(a=0),t.top+=s,t.left+=a,g.offset.setOffset(i[0],g.extend({using:function(t){i.css({top:Math.round(t.top),left:Math.round(t.left)})}},t),0),i.addClass("in");var r=i[0].offsetWidth,l=i[0].offsetHeight;"top"==e&&l!=n&&(t.top=t.top+n-l);var h=this.getViewportAdjustedDelta(e,t,r,l);h.left?t.left+=h.left:t.top+=h.top;var d=/top|bottom/.test(e),p=d?2*h.left-o+r:2*h.top-n+l,c=d?"offsetWidth":"offsetHeight";i.offset(t),this.replaceArrow(p,i[0][c],d)},m.prototype.replaceArrow=function(t,e,i){this.arrow().css(i?"left":"top",50*(1-t/e)+"%").css(i?"top":"left","")},m.prototype.setContent=function(){var t=this.tip(),e=this.getTitle();this.options.html?(this.options.sanitize&&(e=n(e,this.options.whiteList,this.options.sanitizeFn)),t.find(".tooltip-inner").html(e)):t.find(".tooltip-inner").text(e),t.removeClass("fade in top bottom left right")},m.prototype.hide=function(t){var e=this,i=g(this.$tip),o=g.Event("hide.bs."+this.type);function n(){"in"!=e.hoverState&&i.detach(),e.$element&&e.$element.removeAttr("aria-describedby").trigger("hidden.bs."+e.type),t&&t()}if(this.$element.trigger(o),!o.isDefaultPrevented())return i.removeClass("in"),g.support.transition&&i.hasClass("fade")?i.one("bsTransitionEnd",n).emulateTransitionEnd(m.TRANSITION_DURATION):n(),this.hoverState=null,this},m.prototype.fixTitle=function(){var t=this.$element;(t.attr("title")||"string"!=typeof t.attr("data-original-title"))&&t.attr("data-original-title",t.attr("title")||"").attr("title","")},m.prototype.hasContent=function(){return this.getTitle()},m.prototype.getPosition=function(t){var e=(t=t||this.$element)[0],i="BODY"==e.tagName,o=e.getBoundingClientRect();null==o.width&&(o=g.extend({},o,{width:o.right-o.left,height:o.bottom-o.top}));var n=window.SVGElement&&e instanceof window.SVGElement,s=i?{top:0,left:0}:n?null:t.offset(),a={scroll:i?document.documentElement.scrollTop||document.body.scrollTop:t.scrollTop()},r=i?{width:g(window).width(),height:g(window).height()}:null;return g.extend({},o,a,r,s)},m.prototype.getCalculatedOffset=function(t,e,i,o){return"bottom"==t?{top:e.top+e.height,left:e.left+e.width/2-i/2}:"top"==t?{top:e.top-o,left:e.left+e.width/2-i/2}:"left"==t?{top:e.top+e.height/2-o/2,left:e.left-i}:{top:e.top+e.height/2-o/2,left:e.left+e.width}},m.prototype.getViewportAdjustedDelta=function(t,e,i,o){var n={top:0,left:0};if(!this.$viewport)return n;var s=this.options.viewport&&this.options.viewport.padding||0,a=this.getPosition(this.$viewport);if(/right|left/.test(t)){var r=e.top-s-a.scroll,l=e.top+s-a.scroll+o;r<a.top?n.top=a.top-r:l>a.top+a.height&&(n.top=a.top+a.height-l)}else{var h=e.left-s,d=e.left+s+i;h<a.left?n.left=a.left-h:d>a.right&&(n.left=a.left+a.width-d)}return n},m.prototype.getTitle=function(){var t=this.$element,e=this.options;return t.attr("data-original-title")||("function"==typeof e.title?e.title.call(t[0]):e.title)},m.prototype.getUID=function(t){for(;t+=~~(1e6*Math.random()),document.getElementById(t););return t},m.prototype.tip=function(){if(!this.$tip&&(this.$tip=g(this.options.template),1!=this.$tip.length))throw new Error(this.type+" `template` option must consist of exactly 1 top-level element!");return this.$tip},m.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},m.prototype.enable=function(){this.enabled=!0},m.prototype.disable=function(){this.enabled=!1},m.prototype.toggleEnabled=function(){this.enabled=!this.enabled},m.prototype.toggle=function(t){var e=this;t&&((e=g(t.currentTarget).data("bs."+this.type))||(e=new this.constructor(t.currentTarget,this.getDelegateOptions()),g(t.currentTarget).data("bs."+this.type,e))),t?(e.inState.click=!e.inState.click,e.isInStateTrue()?e.enter(e):e.leave(e)):e.tip().hasClass("in")?e.leave(e):e.enter(e)},m.prototype.destroy=function(){var t=this;clearTimeout(this.timeout),this.hide(function(){t.$element.off("."+t.type).removeData("bs."+t.type),t.$tip&&t.$tip.detach(),t.$tip=null,t.$arrow=null,t.$viewport=null,t.$element=null})},m.prototype.sanitizeHtml=function(t){return n(t,this.options.whiteList,this.options.sanitizeFn)};var e=g.fn.tooltip;g.fn.tooltip=function i(o){return this.each(function(){var t=g(this),e=t.data("bs.tooltip"),i="object"==typeof o&&o;!e&&/destroy|hide/.test(o)||(e||t.data("bs.tooltip",e=new m(this,i)),"string"==typeof o&&e[o]())})},g.fn.tooltip.Constructor=m,g.fn.tooltip.noConflict=function(){return g.fn.tooltip=e,this}}(jQuery),function(n){"use strict";var s=function(t,e){this.init("popover",t,e)};if(!n.fn.tooltip)throw new Error("Popover requires tooltip.js");s.VERSION="3.4.1",s.DEFAULTS=n.extend({},n.fn.tooltip.Constructor.DEFAULTS,{placement:"right",trigger:"click",content:"",template:'<div class="popover" role="tooltip"><div class="arrow"></div><h3 class="popover-title"></h3><div class="popover-content"></div></div>'}),((s.prototype=n.extend({},n.fn.tooltip.Constructor.prototype)).constructor=s).prototype.getDefaults=function(){return s.DEFAULTS},s.prototype.setContent=function(){var t=this.tip(),e=this.getTitle(),i=this.getContent();if(this.options.html){var o=typeof i;this.options.sanitize&&(e=this.sanitizeHtml(e),"string"===o&&(i=this.sanitizeHtml(i))),t.find(".popover-title").html(e),t.find(".popover-content").children().detach().end()["string"===o?"html":"append"](i)}else t.find(".popover-title").text(e),t.find(".popover-content").children().detach().end().text(i);t.removeClass("fade top bottom left right in"),t.find(".popover-title").html()||t.find(".popover-title").hide()},s.prototype.hasContent=function(){return this.getTitle()||this.getContent()},s.prototype.getContent=function(){var t=this.$element,e=this.options;return t.attr("data-content")||("function"==typeof e.content?e.content.call(t[0]):e.content)},s.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".arrow")};var t=n.fn.popover;n.fn.popover=function e(o){return this.each(function(){var t=n(this),e=t.data("bs.popover"),i="object"==typeof o&&o;!e&&/destroy|hide/.test(o)||(e||t.data("bs.popover",e=new s(this,i)),"string"==typeof o&&e[o]())})},n.fn.popover.Constructor=s,n.fn.popover.noConflict=function(){return n.fn.popover=t,this}}(jQuery),function(s){"use strict";function n(t,e){this.$body=s(document.body),this.$scrollElement=s(t).is(document.body)?s(window):s(t),this.options=s.extend({},n.DEFAULTS,e),this.selector=(this.options.target||"")+" .nav li > a",this.offsets=[],this.targets=[],this.activeTarget=null,this.scrollHeight=0,this.$scrollElement.on("scroll.bs.scrollspy",s.proxy(this.process,this)),this.refresh(),this.process()}function e(o){return this.each(function(){var t=s(this),e=t.data("bs.scrollspy"),i="object"==typeof o&&o;e||t.data("bs.scrollspy",e=new n(this,i)),"string"==typeof o&&e[o]()})}n.VERSION="3.4.1",n.DEFAULTS={offset:10},n.prototype.getScrollHeight=function(){return this.$scrollElement[0].scrollHeight||Math.max(this.$body[0].scrollHeight,document.documentElement.scrollHeight)},n.prototype.refresh=function(){var t=this,o="offset",n=0;this.offsets=[],this.targets=[],this.scrollHeight=this.getScrollHeight(),s.isWindow(this.$scrollElement[0])||(o="position",n=this.$scrollElement.scrollTop()),this.$body.find(this.selector).map(function(){var t=s(this),e=t.data("target")||t.attr("href"),i=/^#./.test(e)&&s(e);return i&&i.length&&i.is(":visible")&&[[i[o]().top+n,e]]||null}).sort(function(t,e){return t[0]-e[0]}).each(function(){t.offsets.push(this[0]),t.targets.push(this[1])})},n.prototype.process=function(){var t,e=this.$scrollElement.scrollTop()+this.options.offset,i=this.getScrollHeight(),o=this.options.offset+i-this.$scrollElement.height(),n=this.offsets,s=this.targets,a=this.activeTarget;if(this.scrollHeight!=i&&this.refresh(),o<=e)return a!=(t=s[s.length-1])&&this.activate(t);if(a&&e<n[0])return this.activeTarget=null,this.clear();for(t=n.length;t--;)a!=s[t]&&e>=n[t]&&(n[t+1]===undefined||e<n[t+1])&&this.activate(s[t])},n.prototype.activate=function(t){this.activeTarget=t,this.clear();var e=this.selector+'[data-target="'+t+'"],'+this.selector+'[href="'+t+'"]',i=s(e).parents("li").addClass("active");i.parent(".dropdown-menu").length&&(i=i.closest("li.dropdown").addClass("active")),i.trigger("activate.bs.scrollspy")},n.prototype.clear=function(){s(this.selector).parentsUntil(this.options.target,".active").removeClass("active")};var t=s.fn.scrollspy;s.fn.scrollspy=e,s.fn.scrollspy.Constructor=n,s.fn.scrollspy.noConflict=function(){return s.fn.scrollspy=t,this},s(window).on("load.bs.scrollspy.data-api",function(){s('[data-spy="scroll"]').each(function(){var t=s(this);e.call(t,t.data())})})}(jQuery),function(r){"use strict";var a=function(t){this.element=r(t)};function e(i){return this.each(function(){var t=r(this),e=t.data("bs.tab");e||t.data("bs.tab",e=new a(this)),"string"==typeof i&&e[i]()})}a.VERSION="3.4.1",a.TRANSITION_DURATION=150,a.prototype.show=function(){var t=this.element,e=t.closest("ul:not(.dropdown-menu)"),i=t.data("target");if(i||(i=(i=t.attr("href"))&&i.replace(/.*(?=#[^\s]*$)/,"")),!t.parent("li").hasClass("active")){var o=e.find(".active:last a"),n=r.Event("hide.bs.tab",{relatedTarget:t[0]}),s=r.Event("show.bs.tab",{relatedTarget:o[0]});if(o.trigger(n),t.trigger(s),!s.isDefaultPrevented()&&!n.isDefaultPrevented()){var a=r(document).find(i);this.activate(t.closest("li"),e),this.activate(a,a.parent(),function(){o.trigger({type:"hidden.bs.tab",relatedTarget:t[0]}),t.trigger({type:"shown.bs.tab",relatedTarget:o[0]})})}}},a.prototype.activate=function(t,e,i){var o=e.find("> .active"),n=i&&r.support.transition&&(o.length&&o.hasClass("fade")||!!e.find("> .fade").length);function s(){o.removeClass("active").find("> .dropdown-menu > .active").removeClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!1),t.addClass("active").find('[data-toggle="tab"]').attr("aria-expanded",!0),n?(t[0].offsetWidth,t.addClass("in")):t.removeClass("fade"),t.parent(".dropdown-menu").length&&t.closest("li.dropdown").addClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!0),i&&i()}o.length&&n?o.one("bsTransitionEnd",s).emulateTransitionEnd(a.TRANSITION_DURATION):s(),o.removeClass("in")};var t=r.fn.tab;r.fn.tab=e,r.fn.tab.Constructor=a,r.fn.tab.noConflict=function(){return r.fn.tab=t,this};var i=function(t){t.preventDefault(),e.call(r(this),"show")};r(document).on("click.bs.tab.data-api",'[data-toggle="tab"]',i).on("click.bs.tab.data-api",'[data-toggle="pill"]',i)}(jQuery),function(l){"use strict";var h=function(t,e){this.options=l.extend({},h.DEFAULTS,e);var i=this.options.target===h.DEFAULTS.target?l(this.options.target):l(document).find(this.options.target);this.$target=i.on("scroll.bs.affix.data-api",l.proxy(this.checkPosition,this)).on("click.bs.affix.data-api",l.proxy(this.checkPositionWithEventLoop,this)),this.$element=l(t),this.affixed=null,this.unpin=null,this.pinnedOffset=null,this.checkPosition()};function i(o){return this.each(function(){var t=l(this),e=t.data("bs.affix"),i="object"==typeof o&&o;e||t.data("bs.affix",e=new h(this,i)),"string"==typeof o&&e[o]()})}h.VERSION="3.4.1",h.RESET="affix affix-top affix-bottom",h.DEFAULTS={offset:0,target:window},h.prototype.getState=function(t,e,i,o){var n=this.$target.scrollTop(),s=this.$element.offset(),a=this.$target.height();if(null!=i&&"top"==this.affixed)return n<i&&"top";if("bottom"==this.affixed)return null!=i?!(n+this.unpin<=s.top)&&"bottom":!(n+a<=t-o)&&"bottom";var r=null==this.affixed,l=r?n:s.top;return null!=i&&n<=i?"top":null!=o&&t-o<=l+(r?a:e)&&"bottom"},h.prototype.getPinnedOffset=function(){if(this.pinnedOffset)return this.pinnedOffset;this.$element.removeClass(h.RESET).addClass("affix");var t=this.$target.scrollTop(),e=this.$element.offset();return this.pinnedOffset=e.top-t},h.prototype.checkPositionWithEventLoop=function(){setTimeout(l.proxy(this.checkPosition,this),1)},h.prototype.checkPosition=function(){if(this.$element.is(":visible")){var t=this.$element.height(),e=this.options.offset,i=e.top,o=e.bottom,n=Math.max(l(document).height(),l(document.body).height());"object"!=typeof e&&(o=i=e),"function"==typeof i&&(i=e.top(this.$element)),"function"==typeof o&&(o=e.bottom(this.$element));var s=this.getState(n,t,i,o);if(this.affixed!=s){null!=this.unpin&&this.$element.css("top","");var a="affix"+(s?"-"+s:""),r=l.Event(a+".bs.affix");if(this.$element.trigger(r),r.isDefaultPrevented())return;this.affixed=s,this.unpin="bottom"==s?this.getPinnedOffset():null,this.$element.removeClass(h.RESET).addClass(a).trigger(a.replace("affix","affixed")+".bs.affix")}"bottom"==s&&this.$element.offset({top:n-t-o})}};var t=l.fn.affix;l.fn.affix=i,l.fn.affix.Constructor=h,l.fn.affix.noConflict=function(){return l.fn.affix=t,this},l(window).on("load",function(){l('[data-spy="affix"]').each(function(){var t=l(this),e=t.data();e.offset=e.offset||{},null!=e.offsetBottom&&(e.offset.bottom=e.offsetBottom),null!=e.offsetTop&&(e.offset.top=e.offsetTop),i.call(t,e)})})}(jQuery); \ No newline at end of file
diff --git a/lib/toaster/toastergui/static/js/bootstrap.min.js b/lib/toaster/toastergui/static/js/bootstrap.min.js
deleted file mode 100644
index c4a924160..000000000
--- a/lib/toaster/toastergui/static/js/bootstrap.min.js
+++ /dev/null
@@ -1,7 +0,0 @@
-/*!
- * Bootstrap v3.3.6 (http://getbootstrap.com)
- * Copyright 2011-2016 Twitter, Inc.
- * Licensed under the MIT license
- */
-if("undefined"==typeof jQuery)throw new Error("Bootstrap's JavaScript requires jQuery");+function(a){"use strict";var b=a.fn.jquery.split(" ")[0].split(".");if(b[0]<2&&b[1]<9||1==b[0]&&9==b[1]&&b[2]<1||b[0]>2)throw new Error("Bootstrap's JavaScript requires jQuery version 1.9.1 or higher, but lower than version 3")}(jQuery),+function(a){"use strict";function b(){var a=document.createElement("bootstrap"),b={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"};for(var c in b)if(void 0!==a.style[c])return{end:b[c]};return!1}a.fn.emulateTransitionEnd=function(b){var c=!1,d=this;a(this).one("bsTransitionEnd",function(){c=!0});var e=function(){c||a(d).trigger(a.support.transition.end)};return setTimeout(e,b),this},a(function(){a.support.transition=b(),a.support.transition&&(a.event.special.bsTransitionEnd={bindType:a.support.transition.end,delegateType:a.support.transition.end,handle:function(b){return a(b.target).is(this)?b.handleObj.handler.apply(this,arguments):void 0}})})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var c=a(this),e=c.data("bs.alert");e||c.data("bs.alert",e=new d(this)),"string"==typeof b&&e[b].call(c)})}var c='[data-dismiss="alert"]',d=function(b){a(b).on("click",c,this.close)};d.VERSION="3.3.6",d.TRANSITION_DURATION=150,d.prototype.close=function(b){function c(){g.detach().trigger("closed.bs.alert").remove()}var e=a(this),f=e.attr("data-target");f||(f=e.attr("href"),f=f&&f.replace(/.*(?=#[^\s]*$)/,""));var g=a(f);b&&b.preventDefault(),g.length||(g=e.closest(".alert")),g.trigger(b=a.Event("close.bs.alert")),b.isDefaultPrevented()||(g.removeClass("in"),a.support.transition&&g.hasClass("fade")?g.one("bsTransitionEnd",c).emulateTransitionEnd(d.TRANSITION_DURATION):c())};var e=a.fn.alert;a.fn.alert=b,a.fn.alert.Constructor=d,a.fn.alert.noConflict=function(){return a.fn.alert=e,this},a(document).on("click.bs.alert.data-api",c,d.prototype.close)}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.button"),f="object"==typeof b&&b;e||d.data("bs.button",e=new c(this,f)),"toggle"==b?e.toggle():b&&e.setState(b)})}var c=function(b,d){this.$element=a(b),this.options=a.extend({},c.DEFAULTS,d),this.isLoading=!1};c.VERSION="3.3.6",c.DEFAULTS={loadingText:"loading..."},c.prototype.setState=function(b){var c="disabled",d=this.$element,e=d.is("input")?"val":"html",f=d.data();b+="Text",null==f.resetText&&d.data("resetText",d[e]()),setTimeout(a.proxy(function(){d[e](null==f[b]?this.options[b]:f[b]),"loadingText"==b?(this.isLoading=!0,d.addClass(c).attr(c,c)):this.isLoading&&(this.isLoading=!1,d.removeClass(c).removeAttr(c))},this),0)},c.prototype.toggle=function(){var a=!0,b=this.$element.closest('[data-toggle="buttons"]');if(b.length){var c=this.$element.find("input");"radio"==c.prop("type")?(c.prop("checked")&&(a=!1),b.find(".active").removeClass("active"),this.$element.addClass("active")):"checkbox"==c.prop("type")&&(c.prop("checked")!==this.$element.hasClass("active")&&(a=!1),this.$element.toggleClass("active")),c.prop("checked",this.$element.hasClass("active")),a&&c.trigger("change")}else this.$element.attr("aria-pressed",!this.$element.hasClass("active")),this.$element.toggleClass("active")};var d=a.fn.button;a.fn.button=b,a.fn.button.Constructor=c,a.fn.button.noConflict=function(){return a.fn.button=d,this},a(document).on("click.bs.button.data-api",'[data-toggle^="button"]',function(c){var d=a(c.target);d.hasClass("btn")||(d=d.closest(".btn")),b.call(d,"toggle"),a(c.target).is('input[type="radio"]')||a(c.target).is('input[type="checkbox"]')||c.preventDefault()}).on("focus.bs.button.data-api blur.bs.button.data-api",'[data-toggle^="button"]',function(b){a(b.target).closest(".btn").toggleClass("focus",/^focus(in)?$/.test(b.type))})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.carousel"),f=a.extend({},c.DEFAULTS,d.data(),"object"==typeof b&&b),g="string"==typeof b?b:f.slide;e||d.data("bs.carousel",e=new c(this,f)),"number"==typeof b?e.to(b):g?e[g]():f.interval&&e.pause().cycle()})}var c=function(b,c){this.$element=a(b),this.$indicators=this.$element.find(".carousel-indicators"),this.options=c,this.paused=null,this.sliding=null,this.interval=null,this.$active=null,this.$items=null,this.options.keyboard&&this.$element.on("keydown.bs.carousel",a.proxy(this.keydown,this)),"hover"==this.options.pause&&!("ontouchstart"in document.documentElement)&&this.$element.on("mouseenter.bs.carousel",a.proxy(this.pause,this)).on("mouseleave.bs.carousel",a.proxy(this.cycle,this))};c.VERSION="3.3.6",c.TRANSITION_DURATION=600,c.DEFAULTS={interval:5e3,pause:"hover",wrap:!0,keyboard:!0},c.prototype.keydown=function(a){if(!/input|textarea/i.test(a.target.tagName)){switch(a.which){case 37:this.prev();break;case 39:this.next();break;default:return}a.preventDefault()}},c.prototype.cycle=function(b){return b||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(a.proxy(this.next,this),this.options.interval)),this},c.prototype.getItemIndex=function(a){return this.$items=a.parent().children(".item"),this.$items.index(a||this.$active)},c.prototype.getItemForDirection=function(a,b){var c=this.getItemIndex(b),d="prev"==a&&0===c||"next"==a&&c==this.$items.length-1;if(d&&!this.options.wrap)return b;var e="prev"==a?-1:1,f=(c+e)%this.$items.length;return this.$items.eq(f)},c.prototype.to=function(a){var b=this,c=this.getItemIndex(this.$active=this.$element.find(".item.active"));return a>this.$items.length-1||0>a?void 0:this.sliding?this.$element.one("slid.bs.carousel",function(){b.to(a)}):c==a?this.pause().cycle():this.slide(a>c?"next":"prev",this.$items.eq(a))},c.prototype.pause=function(b){return b||(this.paused=!0),this.$element.find(".next, .prev").length&&a.support.transition&&(this.$element.trigger(a.support.transition.end),this.cycle(!0)),this.interval=clearInterval(this.interval),this},c.prototype.next=function(){return this.sliding?void 0:this.slide("next")},c.prototype.prev=function(){return this.sliding?void 0:this.slide("prev")},c.prototype.slide=function(b,d){var e=this.$element.find(".item.active"),f=d||this.getItemForDirection(b,e),g=this.interval,h="next"==b?"left":"right",i=this;if(f.hasClass("active"))return this.sliding=!1;var j=f[0],k=a.Event("slide.bs.carousel",{relatedTarget:j,direction:h});if(this.$element.trigger(k),!k.isDefaultPrevented()){if(this.sliding=!0,g&&this.pause(),this.$indicators.length){this.$indicators.find(".active").removeClass("active");var l=a(this.$indicators.children()[this.getItemIndex(f)]);l&&l.addClass("active")}var m=a.Event("slid.bs.carousel",{relatedTarget:j,direction:h});return a.support.transition&&this.$element.hasClass("slide")?(f.addClass(b),f[0].offsetWidth,e.addClass(h),f.addClass(h),e.one("bsTransitionEnd",function(){f.removeClass([b,h].join(" ")).addClass("active"),e.removeClass(["active",h].join(" ")),i.sliding=!1,setTimeout(function(){i.$element.trigger(m)},0)}).emulateTransitionEnd(c.TRANSITION_DURATION)):(e.removeClass("active"),f.addClass("active"),this.sliding=!1,this.$element.trigger(m)),g&&this.cycle(),this}};var d=a.fn.carousel;a.fn.carousel=b,a.fn.carousel.Constructor=c,a.fn.carousel.noConflict=function(){return a.fn.carousel=d,this};var e=function(c){var d,e=a(this),f=a(e.attr("data-target")||(d=e.attr("href"))&&d.replace(/.*(?=#[^\s]+$)/,""));if(f.hasClass("carousel")){var g=a.extend({},f.data(),e.data()),h=e.attr("data-slide-to");h&&(g.interval=!1),b.call(f,g),h&&f.data("bs.carousel").to(h),c.preventDefault()}};a(document).on("click.bs.carousel.data-api","[data-slide]",e).on("click.bs.carousel.data-api","[data-slide-to]",e),a(window).on("load",function(){a('[data-ride="carousel"]').each(function(){var c=a(this);b.call(c,c.data())})})}(jQuery),+function(a){"use strict";function b(b){var c,d=b.attr("data-target")||(c=b.attr("href"))&&c.replace(/.*(?=#[^\s]+$)/,"");return a(d)}function c(b){return this.each(function(){var c=a(this),e=c.data("bs.collapse"),f=a.extend({},d.DEFAULTS,c.data(),"object"==typeof b&&b);!e&&f.toggle&&/show|hide/.test(b)&&(f.toggle=!1),e||c.data("bs.collapse",e=new d(this,f)),"string"==typeof b&&e[b]()})}var d=function(b,c){this.$element=a(b),this.options=a.extend({},d.DEFAULTS,c),this.$trigger=a('[data-toggle="collapse"][href="#'+b.id+'"],[data-toggle="collapse"][data-target="#'+b.id+'"]'),this.transitioning=null,this.options.parent?this.$parent=this.getParent():this.addAriaAndCollapsedClass(this.$element,this.$trigger),this.options.toggle&&this.toggle()};d.VERSION="3.3.6",d.TRANSITION_DURATION=350,d.DEFAULTS={toggle:!0},d.prototype.dimension=function(){var a=this.$element.hasClass("width");return a?"width":"height"},d.prototype.show=function(){if(!this.transitioning&&!this.$element.hasClass("in")){var b,e=this.$parent&&this.$parent.children(".panel").children(".in, .collapsing");if(!(e&&e.length&&(b=e.data("bs.collapse"),b&&b.transitioning))){var f=a.Event("show.bs.collapse");if(this.$element.trigger(f),!f.isDefaultPrevented()){e&&e.length&&(c.call(e,"hide"),b||e.data("bs.collapse",null));var g=this.dimension();this.$element.removeClass("collapse").addClass("collapsing")[g](0).attr("aria-expanded",!0),this.$trigger.removeClass("collapsed").attr("aria-expanded",!0),this.transitioning=1;var h=function(){this.$element.removeClass("collapsing").addClass("collapse in")[g](""),this.transitioning=0,this.$element.trigger("shown.bs.collapse")};if(!a.support.transition)return h.call(this);var i=a.camelCase(["scroll",g].join("-"));this.$element.one("bsTransitionEnd",a.proxy(h,this)).emulateTransitionEnd(d.TRANSITION_DURATION)[g](this.$element[0][i])}}}},d.prototype.hide=function(){if(!this.transitioning&&this.$element.hasClass("in")){var b=a.Event("hide.bs.collapse");if(this.$element.trigger(b),!b.isDefaultPrevented()){var c=this.dimension();this.$element[c](this.$element[c]())[0].offsetHeight,this.$element.addClass("collapsing").removeClass("collapse in").attr("aria-expanded",!1),this.$trigger.addClass("collapsed").attr("aria-expanded",!1),this.transitioning=1;var e=function(){this.transitioning=0,this.$element.removeClass("collapsing").addClass("collapse").trigger("hidden.bs.collapse")};return a.support.transition?void this.$element[c](0).one("bsTransitionEnd",a.proxy(e,this)).emulateTransitionEnd(d.TRANSITION_DURATION):e.call(this)}}},d.prototype.toggle=function(){this[this.$element.hasClass("in")?"hide":"show"]()},d.prototype.getParent=function(){return a(this.options.parent).find('[data-toggle="collapse"][data-parent="'+this.options.parent+'"]').each(a.proxy(function(c,d){var e=a(d);this.addAriaAndCollapsedClass(b(e),e)},this)).end()},d.prototype.addAriaAndCollapsedClass=function(a,b){var c=a.hasClass("in");a.attr("aria-expanded",c),b.toggleClass("collapsed",!c).attr("aria-expanded",c)};var e=a.fn.collapse;a.fn.collapse=c,a.fn.collapse.Constructor=d,a.fn.collapse.noConflict=function(){return a.fn.collapse=e,this},a(document).on("click.bs.collapse.data-api",'[data-toggle="collapse"]',function(d){var e=a(this);e.attr("data-target")||d.preventDefault();var f=b(e),g=f.data("bs.collapse"),h=g?"toggle":e.data();c.call(f,h)})}(jQuery),+function(a){"use strict";function b(b){var c=b.attr("data-target");c||(c=b.attr("href"),c=c&&/#[A-Za-z]/.test(c)&&c.replace(/.*(?=#[^\s]*$)/,""));var d=c&&a(c);return d&&d.length?d:b.parent()}function c(c){c&&3===c.which||(a(e).remove(),a(f).each(function(){var d=a(this),e=b(d),f={relatedTarget:this};e.hasClass("open")&&(c&&"click"==c.type&&/input|textarea/i.test(c.target.tagName)&&a.contains(e[0],c.target)||(e.trigger(c=a.Event("hide.bs.dropdown",f)),c.isDefaultPrevented()||(d.attr("aria-expanded","false"),e.removeClass("open").trigger(a.Event("hidden.bs.dropdown",f)))))}))}function d(b){return this.each(function(){var c=a(this),d=c.data("bs.dropdown");d||c.data("bs.dropdown",d=new g(this)),"string"==typeof b&&d[b].call(c)})}var e=".dropdown-backdrop",f='[data-toggle="dropdown"]',g=function(b){a(b).on("click.bs.dropdown",this.toggle)};g.VERSION="3.3.6",g.prototype.toggle=function(d){var e=a(this);if(!e.is(".disabled, :disabled")){var f=b(e),g=f.hasClass("open");if(c(),!g){"ontouchstart"in document.documentElement&&!f.closest(".navbar-nav").length&&a(document.createElement("div")).addClass("dropdown-backdrop").insertAfter(a(this)).on("click",c);var h={relatedTarget:this};if(f.trigger(d=a.Event("show.bs.dropdown",h)),d.isDefaultPrevented())return;e.trigger("focus").attr("aria-expanded","true"),f.toggleClass("open").trigger(a.Event("shown.bs.dropdown",h))}return!1}},g.prototype.keydown=function(c){if(/(38|40|27|32)/.test(c.which)&&!/input|textarea/i.test(c.target.tagName)){var d=a(this);if(c.preventDefault(),c.stopPropagation(),!d.is(".disabled, :disabled")){var e=b(d),g=e.hasClass("open");if(!g&&27!=c.which||g&&27==c.which)return 27==c.which&&e.find(f).trigger("focus"),d.trigger("click");var h=" li:not(.disabled):visible a",i=e.find(".dropdown-menu"+h);if(i.length){var j=i.index(c.target);38==c.which&&j>0&&j--,40==c.which&&j<i.length-1&&j++,~j||(j=0),i.eq(j).trigger("focus")}}}};var h=a.fn.dropdown;a.fn.dropdown=d,a.fn.dropdown.Constructor=g,a.fn.dropdown.noConflict=function(){return a.fn.dropdown=h,this},a(document).on("click.bs.dropdown.data-api",c).on("click.bs.dropdown.data-api",".dropdown form",function(a){a.stopPropagation()}).on("click.bs.dropdown.data-api",f,g.prototype.toggle).on("keydown.bs.dropdown.data-api",f,g.prototype.keydown).on("keydown.bs.dropdown.data-api",".dropdown-menu",g.prototype.keydown)}(jQuery),+function(a){"use strict";function b(b,d){return this.each(function(){var e=a(this),f=e.data("bs.modal"),g=a.extend({},c.DEFAULTS,e.data(),"object"==typeof b&&b);f||e.data("bs.modal",f=new c(this,g)),"string"==typeof b?f[b](d):g.show&&f.show(d)})}var c=function(b,c){this.options=c,this.$body=a(document.body),this.$element=a(b),this.$dialog=this.$element.find(".modal-dialog"),this.$backdrop=null,this.isShown=null,this.originalBodyPad=null,this.scrollbarWidth=0,this.ignoreBackdropClick=!1,this.options.remote&&this.$element.find(".modal-content").load(this.options.remote,a.proxy(function(){this.$element.trigger("loaded.bs.modal")},this))};c.VERSION="3.3.6",c.TRANSITION_DURATION=300,c.BACKDROP_TRANSITION_DURATION=150,c.DEFAULTS={backdrop:!0,keyboard:!0,show:!0},c.prototype.toggle=function(a){return this.isShown?this.hide():this.show(a)},c.prototype.show=function(b){var d=this,e=a.Event("show.bs.modal",{relatedTarget:b});this.$element.trigger(e),this.isShown||e.isDefaultPrevented()||(this.isShown=!0,this.checkScrollbar(),this.setScrollbar(),this.$body.addClass("modal-open"),this.escape(),this.resize(),this.$element.on("click.dismiss.bs.modal",'[data-dismiss="modal"]',a.proxy(this.hide,this)),this.$dialog.on("mousedown.dismiss.bs.modal",function(){d.$element.one("mouseup.dismiss.bs.modal",function(b){a(b.target).is(d.$element)&&(d.ignoreBackdropClick=!0)})}),this.backdrop(function(){var e=a.support.transition&&d.$element.hasClass("fade");d.$element.parent().length||d.$element.appendTo(d.$body),d.$element.show().scrollTop(0),d.adjustDialog(),e&&d.$element[0].offsetWidth,d.$element.addClass("in"),d.enforceFocus();var f=a.Event("shown.bs.modal",{relatedTarget:b});e?d.$dialog.one("bsTransitionEnd",function(){d.$element.trigger("focus").trigger(f)}).emulateTransitionEnd(c.TRANSITION_DURATION):d.$element.trigger("focus").trigger(f)}))},c.prototype.hide=function(b){b&&b.preventDefault(),b=a.Event("hide.bs.modal"),this.$element.trigger(b),this.isShown&&!b.isDefaultPrevented()&&(this.isShown=!1,this.escape(),this.resize(),a(document).off("focusin.bs.modal"),this.$element.removeClass("in").off("click.dismiss.bs.modal").off("mouseup.dismiss.bs.modal"),this.$dialog.off("mousedown.dismiss.bs.modal"),a.support.transition&&this.$element.hasClass("fade")?this.$element.one("bsTransitionEnd",a.proxy(this.hideModal,this)).emulateTransitionEnd(c.TRANSITION_DURATION):this.hideModal())},c.prototype.enforceFocus=function(){a(document).off("focusin.bs.modal").on("focusin.bs.modal",a.proxy(function(a){this.$element[0]===a.target||this.$element.has(a.target).length||this.$element.trigger("focus")},this))},c.prototype.escape=function(){this.isShown&&this.options.keyboard?this.$element.on("keydown.dismiss.bs.modal",a.proxy(function(a){27==a.which&&this.hide()},this)):this.isShown||this.$element.off("keydown.dismiss.bs.modal")},c.prototype.resize=function(){this.isShown?a(window).on("resize.bs.modal",a.proxy(this.handleUpdate,this)):a(window).off("resize.bs.modal")},c.prototype.hideModal=function(){var a=this;this.$element.hide(),this.backdrop(function(){a.$body.removeClass("modal-open"),a.resetAdjustments(),a.resetScrollbar(),a.$element.trigger("hidden.bs.modal")})},c.prototype.removeBackdrop=function(){this.$backdrop&&this.$backdrop.remove(),this.$backdrop=null},c.prototype.backdrop=function(b){var d=this,e=this.$element.hasClass("fade")?"fade":"";if(this.isShown&&this.options.backdrop){var f=a.support.transition&&e;if(this.$backdrop=a(document.createElement("div")).addClass("modal-backdrop "+e).appendTo(this.$body),this.$element.on("click.dismiss.bs.modal",a.proxy(function(a){return this.ignoreBackdropClick?void(this.ignoreBackdropClick=!1):void(a.target===a.currentTarget&&("static"==this.options.backdrop?this.$element[0].focus():this.hide()))},this)),f&&this.$backdrop[0].offsetWidth,this.$backdrop.addClass("in"),!b)return;f?this.$backdrop.one("bsTransitionEnd",b).emulateTransitionEnd(c.BACKDROP_TRANSITION_DURATION):b()}else if(!this.isShown&&this.$backdrop){this.$backdrop.removeClass("in");var g=function(){d.removeBackdrop(),b&&b()};a.support.transition&&this.$element.hasClass("fade")?this.$backdrop.one("bsTransitionEnd",g).emulateTransitionEnd(c.BACKDROP_TRANSITION_DURATION):g()}else b&&b()},c.prototype.handleUpdate=function(){this.adjustDialog()},c.prototype.adjustDialog=function(){var a=this.$element[0].scrollHeight>document.documentElement.clientHeight;this.$element.css({paddingLeft:!this.bodyIsOverflowing&&a?this.scrollbarWidth:"",paddingRight:this.bodyIsOverflowing&&!a?this.scrollbarWidth:""})},c.prototype.resetAdjustments=function(){this.$element.css({paddingLeft:"",paddingRight:""})},c.prototype.checkScrollbar=function(){var a=window.innerWidth;if(!a){var b=document.documentElement.getBoundingClientRect();a=b.right-Math.abs(b.left)}this.bodyIsOverflowing=document.body.clientWidth<a,this.scrollbarWidth=this.measureScrollbar()},c.prototype.setScrollbar=function(){var a=parseInt(this.$body.css("padding-right")||0,10);this.originalBodyPad=document.body.style.paddingRight||"",this.bodyIsOverflowing&&this.$body.css("padding-right",a+this.scrollbarWidth)},c.prototype.resetScrollbar=function(){this.$body.css("padding-right",this.originalBodyPad)},c.prototype.measureScrollbar=function(){var a=document.createElement("div");a.className="modal-scrollbar-measure",this.$body.append(a);var b=a.offsetWidth-a.clientWidth;return this.$body[0].removeChild(a),b};var d=a.fn.modal;a.fn.modal=b,a.fn.modal.Constructor=c,a.fn.modal.noConflict=function(){return a.fn.modal=d,this},a(document).on("click.bs.modal.data-api",'[data-toggle="modal"]',function(c){var d=a(this),e=d.attr("href"),f=a(d.attr("data-target")||e&&e.replace(/.*(?=#[^\s]+$)/,"")),g=f.data("bs.modal")?"toggle":a.extend({remote:!/#/.test(e)&&e},f.data(),d.data());d.is("a")&&c.preventDefault(),f.one("show.bs.modal",function(a){a.isDefaultPrevented()||f.one("hidden.bs.modal",function(){d.is(":visible")&&d.trigger("focus")})}),b.call(f,g,this)})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.tooltip"),f="object"==typeof b&&b;!e&&/destroy|hide/.test(b)||(e||d.data("bs.tooltip",e=new c(this,f)),"string"==typeof b&&e[b]())})}var c=function(a,b){this.type=null,this.options=null,this.enabled=null,this.timeout=null,this.hoverState=null,this.$element=null,this.inState=null,this.init("tooltip",a,b)};c.VERSION="3.3.6",c.TRANSITION_DURATION=150,c.DEFAULTS={animation:!0,placement:"top",selector:!1,template:'<div class="tooltip" role="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',trigger:"hover focus",title:"",delay:0,html:!1,container:!1,viewport:{selector:"body",padding:0}},c.prototype.init=function(b,c,d){if(this.enabled=!0,this.type=b,this.$element=a(c),this.options=this.getOptions(d),this.$viewport=this.options.viewport&&a(a.isFunction(this.options.viewport)?this.options.viewport.call(this,this.$element):this.options.viewport.selector||this.options.viewport),this.inState={click:!1,hover:!1,focus:!1},this.$element[0]instanceof document.constructor&&!this.options.selector)throw new Error("`selector` option must be specified when initializing "+this.type+" on the window.document object!");for(var e=this.options.trigger.split(" "),f=e.length;f--;){var g=e[f];if("click"==g)this.$element.on("click."+this.type,this.options.selector,a.proxy(this.toggle,this));else if("manual"!=g){var h="hover"==g?"mouseenter":"focusin",i="hover"==g?"mouseleave":"focusout";this.$element.on(h+"."+this.type,this.options.selector,a.proxy(this.enter,this)),this.$element.on(i+"."+this.type,this.options.selector,a.proxy(this.leave,this))}}this.options.selector?this._options=a.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},c.prototype.getDefaults=function(){return c.DEFAULTS},c.prototype.getOptions=function(b){return b=a.extend({},this.getDefaults(),this.$element.data(),b),b.delay&&"number"==typeof b.delay&&(b.delay={show:b.delay,hide:b.delay}),b},c.prototype.getDelegateOptions=function(){var b={},c=this.getDefaults();return this._options&&a.each(this._options,function(a,d){c[a]!=d&&(b[a]=d)}),b},c.prototype.enter=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget).data("bs."+this.type);return c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c)),b instanceof a.Event&&(c.inState["focusin"==b.type?"focus":"hover"]=!0),c.tip().hasClass("in")||"in"==c.hoverState?void(c.hoverState="in"):(clearTimeout(c.timeout),c.hoverState="in",c.options.delay&&c.options.delay.show?void(c.timeout=setTimeout(function(){"in"==c.hoverState&&c.show()},c.options.delay.show)):c.show())},c.prototype.isInStateTrue=function(){for(var a in this.inState)if(this.inState[a])return!0;return!1},c.prototype.leave=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget).data("bs."+this.type);return c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c)),b instanceof a.Event&&(c.inState["focusout"==b.type?"focus":"hover"]=!1),c.isInStateTrue()?void 0:(clearTimeout(c.timeout),c.hoverState="out",c.options.delay&&c.options.delay.hide?void(c.timeout=setTimeout(function(){"out"==c.hoverState&&c.hide()},c.options.delay.hide)):c.hide())},c.prototype.show=function(){var b=a.Event("show.bs."+this.type);if(this.hasContent()&&this.enabled){this.$element.trigger(b);var d=a.contains(this.$element[0].ownerDocument.documentElement,this.$element[0]);if(b.isDefaultPrevented()||!d)return;var e=this,f=this.tip(),g=this.getUID(this.type);this.setContent(),f.attr("id",g),this.$element.attr("aria-describedby",g),this.options.animation&&f.addClass("fade");var h="function"==typeof this.options.placement?this.options.placement.call(this,f[0],this.$element[0]):this.options.placement,i=/\s?auto?\s?/i,j=i.test(h);j&&(h=h.replace(i,"")||"top"),f.detach().css({top:0,left:0,display:"block"}).addClass(h).data("bs."+this.type,this),this.options.container?f.appendTo(this.options.container):f.insertAfter(this.$element),this.$element.trigger("inserted.bs."+this.type);var k=this.getPosition(),l=f[0].offsetWidth,m=f[0].offsetHeight;if(j){var n=h,o=this.getPosition(this.$viewport);h="bottom"==h&&k.bottom+m>o.bottom?"top":"top"==h&&k.top-m<o.top?"bottom":"right"==h&&k.right+l>o.width?"left":"left"==h&&k.left-l<o.left?"right":h,f.removeClass(n).addClass(h)}var p=this.getCalculatedOffset(h,k,l,m);this.applyPlacement(p,h);var q=function(){var a=e.hoverState;e.$element.trigger("shown.bs."+e.type),e.hoverState=null,"out"==a&&e.leave(e)};a.support.transition&&this.$tip.hasClass("fade")?f.one("bsTransitionEnd",q).emulateTransitionEnd(c.TRANSITION_DURATION):q()}},c.prototype.applyPlacement=function(b,c){var d=this.tip(),e=d[0].offsetWidth,f=d[0].offsetHeight,g=parseInt(d.css("margin-top"),10),h=parseInt(d.css("margin-left"),10);isNaN(g)&&(g=0),isNaN(h)&&(h=0),b.top+=g,b.left+=h,a.offset.setOffset(d[0],a.extend({using:function(a){d.css({top:Math.round(a.top),left:Math.round(a.left)})}},b),0),d.addClass("in");var i=d[0].offsetWidth,j=d[0].offsetHeight;"top"==c&&j!=f&&(b.top=b.top+f-j);var k=this.getViewportAdjustedDelta(c,b,i,j);k.left?b.left+=k.left:b.top+=k.top;var l=/top|bottom/.test(c),m=l?2*k.left-e+i:2*k.top-f+j,n=l?"offsetWidth":"offsetHeight";d.offset(b),this.replaceArrow(m,d[0][n],l)},c.prototype.replaceArrow=function(a,b,c){this.arrow().css(c?"left":"top",50*(1-a/b)+"%").css(c?"top":"left","")},c.prototype.setContent=function(){var a=this.tip(),b=this.getTitle();a.find(".tooltip-inner")[this.options.html?"html":"text"](b),a.removeClass("fade in top bottom left right")},c.prototype.hide=function(b){function d(){"in"!=e.hoverState&&f.detach(),e.$element.removeAttr("aria-describedby").trigger("hidden.bs."+e.type),b&&b()}var e=this,f=a(this.$tip),g=a.Event("hide.bs."+this.type);return this.$element.trigger(g),g.isDefaultPrevented()?void 0:(f.removeClass("in"),a.support.transition&&f.hasClass("fade")?f.one("bsTransitionEnd",d).emulateTransitionEnd(c.TRANSITION_DURATION):d(),this.hoverState=null,this)},c.prototype.fixTitle=function(){var a=this.$element;(a.attr("title")||"string"!=typeof a.attr("data-original-title"))&&a.attr("data-original-title",a.attr("title")||"").attr("title","")},c.prototype.hasContent=function(){return this.getTitle()},c.prototype.getPosition=function(b){b=b||this.$element;var c=b[0],d="BODY"==c.tagName,e=c.getBoundingClientRect();null==e.width&&(e=a.extend({},e,{width:e.right-e.left,height:e.bottom-e.top}));var f=d?{top:0,left:0}:b.offset(),g={scroll:d?document.documentElement.scrollTop||document.body.scrollTop:b.scrollTop()},h=d?{width:a(window).width(),height:a(window).height()}:null;return a.extend({},e,g,h,f)},c.prototype.getCalculatedOffset=function(a,b,c,d){return"bottom"==a?{top:b.top+b.height,left:b.left+b.width/2-c/2}:"top"==a?{top:b.top-d,left:b.left+b.width/2-c/2}:"left"==a?{top:b.top+b.height/2-d/2,left:b.left-c}:{top:b.top+b.height/2-d/2,left:b.left+b.width}},c.prototype.getViewportAdjustedDelta=function(a,b,c,d){var e={top:0,left:0};if(!this.$viewport)return e;var f=this.options.viewport&&this.options.viewport.padding||0,g=this.getPosition(this.$viewport);if(/right|left/.test(a)){var h=b.top-f-g.scroll,i=b.top+f-g.scroll+d;h<g.top?e.top=g.top-h:i>g.top+g.height&&(e.top=g.top+g.height-i)}else{var j=b.left-f,k=b.left+f+c;j<g.left?e.left=g.left-j:k>g.right&&(e.left=g.left+g.width-k)}return e},c.prototype.getTitle=function(){var a,b=this.$element,c=this.options;return a=b.attr("data-original-title")||("function"==typeof c.title?c.title.call(b[0]):c.title)},c.prototype.getUID=function(a){do a+=~~(1e6*Math.random());while(document.getElementById(a));return a},c.prototype.tip=function(){if(!this.$tip&&(this.$tip=a(this.options.template),1!=this.$tip.length))throw new Error(this.type+" `template` option must consist of exactly 1 top-level element!");return this.$tip},c.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},c.prototype.enable=function(){this.enabled=!0},c.prototype.disable=function(){this.enabled=!1},c.prototype.toggleEnabled=function(){this.enabled=!this.enabled},c.prototype.toggle=function(b){var c=this;b&&(c=a(b.currentTarget).data("bs."+this.type),c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c))),b?(c.inState.click=!c.inState.click,c.isInStateTrue()?c.enter(c):c.leave(c)):c.tip().hasClass("in")?c.leave(c):c.enter(c)},c.prototype.destroy=function(){var a=this;clearTimeout(this.timeout),this.hide(function(){a.$element.off("."+a.type).removeData("bs."+a.type),a.$tip&&a.$tip.detach(),a.$tip=null,a.$arrow=null,a.$viewport=null})};var d=a.fn.tooltip;a.fn.tooltip=b,a.fn.tooltip.Constructor=c,a.fn.tooltip.noConflict=function(){return a.fn.tooltip=d,this}}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.popover"),f="object"==typeof b&&b;!e&&/destroy|hide/.test(b)||(e||d.data("bs.popover",e=new c(this,f)),"string"==typeof b&&e[b]())})}var c=function(a,b){this.init("popover",a,b)};if(!a.fn.tooltip)throw new Error("Popover requires tooltip.js");c.VERSION="3.3.6",c.DEFAULTS=a.extend({},a.fn.tooltip.Constructor.DEFAULTS,{placement:"right",trigger:"click",content:"",template:'<div class="popover" role="tooltip"><div class="arrow"></div><h3 class="popover-title"></h3><div class="popover-content"></div></div>'}),c.prototype=a.extend({},a.fn.tooltip.Constructor.prototype),c.prototype.constructor=c,c.prototype.getDefaults=function(){return c.DEFAULTS},c.prototype.setContent=function(){var a=this.tip(),b=this.getTitle(),c=this.getContent();a.find(".popover-title")[this.options.html?"html":"text"](b),a.find(".popover-content").children().detach().end()[this.options.html?"string"==typeof c?"html":"append":"text"](c),a.removeClass("fade top bottom left right in"),a.find(".popover-title").html()||a.find(".popover-title").hide()},c.prototype.hasContent=function(){return this.getTitle()||this.getContent()},c.prototype.getContent=function(){var a=this.$element,b=this.options;return a.attr("data-content")||("function"==typeof b.content?b.content.call(a[0]):b.content)},c.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".arrow")};var d=a.fn.popover;a.fn.popover=b,a.fn.popover.Constructor=c,a.fn.popover.noConflict=function(){return a.fn.popover=d,this}}(jQuery),+function(a){"use strict";function b(c,d){this.$body=a(document.body),this.$scrollElement=a(a(c).is(document.body)?window:c),this.options=a.extend({},b.DEFAULTS,d),this.selector=(this.options.target||"")+" .nav li > a",this.offsets=[],this.targets=[],this.activeTarget=null,this.scrollHeight=0,this.$scrollElement.on("scroll.bs.scrollspy",a.proxy(this.process,this)),this.refresh(),this.process()}function c(c){return this.each(function(){var d=a(this),e=d.data("bs.scrollspy"),f="object"==typeof c&&c;e||d.data("bs.scrollspy",e=new b(this,f)),"string"==typeof c&&e[c]()})}b.VERSION="3.3.6",b.DEFAULTS={offset:10},b.prototype.getScrollHeight=function(){return this.$scrollElement[0].scrollHeight||Math.max(this.$body[0].scrollHeight,document.documentElement.scrollHeight)},b.prototype.refresh=function(){var b=this,c="offset",d=0;this.offsets=[],this.targets=[],this.scrollHeight=this.getScrollHeight(),a.isWindow(this.$scrollElement[0])||(c="position",d=this.$scrollElement.scrollTop()),this.$body.find(this.selector).map(function(){var b=a(this),e=b.data("target")||b.attr("href"),f=/^#./.test(e)&&a(e);return f&&f.length&&f.is(":visible")&&[[f[c]().top+d,e]]||null}).sort(function(a,b){return a[0]-b[0]}).each(function(){b.offsets.push(this[0]),b.targets.push(this[1])})},b.prototype.process=function(){var a,b=this.$scrollElement.scrollTop()+this.options.offset,c=this.getScrollHeight(),d=this.options.offset+c-this.$scrollElement.height(),e=this.offsets,f=this.targets,g=this.activeTarget;if(this.scrollHeight!=c&&this.refresh(),b>=d)return g!=(a=f[f.length-1])&&this.activate(a);if(g&&b<e[0])return this.activeTarget=null,this.clear();for(a=e.length;a--;)g!=f[a]&&b>=e[a]&&(void 0===e[a+1]||b<e[a+1])&&this.activate(f[a])},b.prototype.activate=function(b){this.activeTarget=b,this.clear();var c=this.selector+'[data-target="'+b+'"],'+this.selector+'[href="'+b+'"]',d=a(c).parents("li").addClass("active");d.parent(".dropdown-menu").length&&(d=d.closest("li.dropdown").addClass("active")),
-d.trigger("activate.bs.scrollspy")},b.prototype.clear=function(){a(this.selector).parentsUntil(this.options.target,".active").removeClass("active")};var d=a.fn.scrollspy;a.fn.scrollspy=c,a.fn.scrollspy.Constructor=b,a.fn.scrollspy.noConflict=function(){return a.fn.scrollspy=d,this},a(window).on("load.bs.scrollspy.data-api",function(){a('[data-spy="scroll"]').each(function(){var b=a(this);c.call(b,b.data())})})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.tab");e||d.data("bs.tab",e=new c(this)),"string"==typeof b&&e[b]()})}var c=function(b){this.element=a(b)};c.VERSION="3.3.6",c.TRANSITION_DURATION=150,c.prototype.show=function(){var b=this.element,c=b.closest("ul:not(.dropdown-menu)"),d=b.data("target");if(d||(d=b.attr("href"),d=d&&d.replace(/.*(?=#[^\s]*$)/,"")),!b.parent("li").hasClass("active")){var e=c.find(".active:last a"),f=a.Event("hide.bs.tab",{relatedTarget:b[0]}),g=a.Event("show.bs.tab",{relatedTarget:e[0]});if(e.trigger(f),b.trigger(g),!g.isDefaultPrevented()&&!f.isDefaultPrevented()){var h=a(d);this.activate(b.closest("li"),c),this.activate(h,h.parent(),function(){e.trigger({type:"hidden.bs.tab",relatedTarget:b[0]}),b.trigger({type:"shown.bs.tab",relatedTarget:e[0]})})}}},c.prototype.activate=function(b,d,e){function f(){g.removeClass("active").find("> .dropdown-menu > .active").removeClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!1),b.addClass("active").find('[data-toggle="tab"]').attr("aria-expanded",!0),h?(b[0].offsetWidth,b.addClass("in")):b.removeClass("fade"),b.parent(".dropdown-menu").length&&b.closest("li.dropdown").addClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!0),e&&e()}var g=d.find("> .active"),h=e&&a.support.transition&&(g.length&&g.hasClass("fade")||!!d.find("> .fade").length);g.length&&h?g.one("bsTransitionEnd",f).emulateTransitionEnd(c.TRANSITION_DURATION):f(),g.removeClass("in")};var d=a.fn.tab;a.fn.tab=b,a.fn.tab.Constructor=c,a.fn.tab.noConflict=function(){return a.fn.tab=d,this};var e=function(c){c.preventDefault(),b.call(a(this),"show")};a(document).on("click.bs.tab.data-api",'[data-toggle="tab"]',e).on("click.bs.tab.data-api",'[data-toggle="pill"]',e)}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.affix"),f="object"==typeof b&&b;e||d.data("bs.affix",e=new c(this,f)),"string"==typeof b&&e[b]()})}var c=function(b,d){this.options=a.extend({},c.DEFAULTS,d),this.$target=a(this.options.target).on("scroll.bs.affix.data-api",a.proxy(this.checkPosition,this)).on("click.bs.affix.data-api",a.proxy(this.checkPositionWithEventLoop,this)),this.$element=a(b),this.affixed=null,this.unpin=null,this.pinnedOffset=null,this.checkPosition()};c.VERSION="3.3.6",c.RESET="affix affix-top affix-bottom",c.DEFAULTS={offset:0,target:window},c.prototype.getState=function(a,b,c,d){var e=this.$target.scrollTop(),f=this.$element.offset(),g=this.$target.height();if(null!=c&&"top"==this.affixed)return c>e?"top":!1;if("bottom"==this.affixed)return null!=c?e+this.unpin<=f.top?!1:"bottom":a-d>=e+g?!1:"bottom";var h=null==this.affixed,i=h?e:f.top,j=h?g:b;return null!=c&&c>=e?"top":null!=d&&i+j>=a-d?"bottom":!1},c.prototype.getPinnedOffset=function(){if(this.pinnedOffset)return this.pinnedOffset;this.$element.removeClass(c.RESET).addClass("affix");var a=this.$target.scrollTop(),b=this.$element.offset();return this.pinnedOffset=b.top-a},c.prototype.checkPositionWithEventLoop=function(){setTimeout(a.proxy(this.checkPosition,this),1)},c.prototype.checkPosition=function(){if(this.$element.is(":visible")){var b=this.$element.height(),d=this.options.offset,e=d.top,f=d.bottom,g=Math.max(a(document).height(),a(document.body).height());"object"!=typeof d&&(f=e=d),"function"==typeof e&&(e=d.top(this.$element)),"function"==typeof f&&(f=d.bottom(this.$element));var h=this.getState(g,b,e,f);if(this.affixed!=h){null!=this.unpin&&this.$element.css("top","");var i="affix"+(h?"-"+h:""),j=a.Event(i+".bs.affix");if(this.$element.trigger(j),j.isDefaultPrevented())return;this.affixed=h,this.unpin="bottom"==h?this.getPinnedOffset():null,this.$element.removeClass(c.RESET).addClass(i).trigger(i.replace("affix","affixed")+".bs.affix")}"bottom"==h&&this.$element.offset({top:g-b-f})}};var d=a.fn.affix;a.fn.affix=b,a.fn.affix.Constructor=c,a.fn.affix.noConflict=function(){return a.fn.affix=d,this},a(window).on("load",function(){a('[data-spy="affix"]').each(function(){var c=a(this),d=c.data();d.offset=d.offset||{},null!=d.offsetBottom&&(d.offset.bottom=d.offsetBottom),null!=d.offsetTop&&(d.offset.top=d.offsetTop),b.call(c,d)})})}(jQuery); \ No newline at end of file
diff --git a/lib/toaster/toastergui/static/js/jquery-3.7.1.min.js b/lib/toaster/toastergui/static/js/jquery-3.7.1.min.js
new file mode 100644
index 000000000..7f37b5d99
--- /dev/null
+++ b/lib/toaster/toastergui/static/js/jquery-3.7.1.min.js
@@ -0,0 +1,2 @@
+/*! jQuery v3.7.1 | (c) OpenJS Foundation and other contributors | jquery.org/license */
+!function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(ie,e){"use strict";var oe=[],r=Object.getPrototypeOf,ae=oe.slice,g=oe.flat?function(e){return oe.flat.call(e)}:function(e){return oe.concat.apply([],e)},s=oe.push,se=oe.indexOf,n={},i=n.toString,ue=n.hasOwnProperty,o=ue.toString,a=o.call(Object),le={},v=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType&&"function"!=typeof e.item},y=function(e){return null!=e&&e===e.window},C=ie.document,u={type:!0,src:!0,nonce:!0,noModule:!0};function m(e,t,n){var r,i,o=(n=n||C).createElement("script");if(o.text=e,t)for(r in u)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function x(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[i.call(e)]||"object":typeof e}var t="3.7.1",l=/HTML$/i,ce=function(e,t){return new ce.fn.init(e,t)};function c(e){var t=!!e&&"length"in e&&e.length,n=x(e);return!v(e)&&!y(e)&&("array"===n||0===t||"number"==typeof t&&0<t&&t-1 in e)}function fe(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()}ce.fn=ce.prototype={jquery:t,constructor:ce,length:0,toArray:function(){return ae.call(this)},get:function(e){return null==e?ae.call(this):e<0?this[e+this.length]:this[e]},pushStack:function(e){var t=ce.merge(this.constructor(),e);return t.prevObject=this,t},each:function(e){return ce.each(this,e)},map:function(n){return this.pushStack(ce.map(this,function(e,t){return n.call(e,t,e)}))},slice:function(){return this.pushStack(ae.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},even:function(){return this.pushStack(ce.grep(this,function(e,t){return(t+1)%2}))},odd:function(){return this.pushStack(ce.grep(this,function(e,t){return t%2}))},eq:function(e){var t=this.length,n=+e+(e<0?t:0);return this.pushStack(0<=n&&n<t?[this[n]]:[])},end:function(){return this.prevObject||this.constructor()},push:s,sort:oe.sort,splice:oe.splice},ce.extend=ce.fn.extend=function(){var e,t,n,r,i,o,a=arguments[0]||{},s=1,u=arguments.length,l=!1;for("boolean"==typeof a&&(l=a,a=arguments[s]||{},s++),"object"==typeof a||v(a)||(a={}),s===u&&(a=this,s--);s<u;s++)if(null!=(e=arguments[s]))for(t in e)r=e[t],"__proto__"!==t&&a!==r&&(l&&r&&(ce.isPlainObject(r)||(i=Array.isArray(r)))?(n=a[t],o=i&&!Array.isArray(n)?[]:i||ce.isPlainObject(n)?n:{},i=!1,a[t]=ce.extend(l,o,r)):void 0!==r&&(a[t]=r));return a},ce.extend({expando:"jQuery"+(t+Math.random()).replace(/\D/g,""),isReady:!0,error:function(e){throw new Error(e)},noop:function(){},isPlainObject:function(e){var t,n;return!(!e||"[object Object]"!==i.call(e))&&(!(t=r(e))||"function"==typeof(n=ue.call(t,"constructor")&&t.constructor)&&o.call(n)===a)},isEmptyObject:function(e){var t;for(t in e)return!1;return!0},globalEval:function(e,t,n){m(e,{nonce:t&&t.nonce},n)},each:function(e,t){var n,r=0;if(c(e)){for(n=e.length;r<n;r++)if(!1===t.call(e[r],r,e[r]))break}else for(r in e)if(!1===t.call(e[r],r,e[r]))break;return e},text:function(e){var t,n="",r=0,i=e.nodeType;if(!i)while(t=e[r++])n+=ce.text(t);return 1===i||11===i?e.textContent:9===i?e.documentElement.textContent:3===i||4===i?e.nodeValue:n},makeArray:function(e,t){var n=t||[];return null!=e&&(c(Object(e))?ce.merge(n,"string"==typeof e?[e]:e):s.call(n,e)),n},inArray:function(e,t,n){return null==t?-1:se.call(t,e,n)},isXMLDoc:function(e){var t=e&&e.namespaceURI,n=e&&(e.ownerDocument||e).documentElement;return!l.test(t||n&&n.nodeName||"HTML")},merge:function(e,t){for(var n=+t.length,r=0,i=e.length;r<n;r++)e[i++]=t[r];return e.length=i,e},grep:function(e,t,n){for(var r=[],i=0,o=e.length,a=!n;i<o;i++)!t(e[i],i)!==a&&r.push(e[i]);return r},map:function(e,t,n){var r,i,o=0,a=[];if(c(e))for(r=e.length;o<r;o++)null!=(i=t(e[o],o,n))&&a.push(i);else for(o in e)null!=(i=t(e[o],o,n))&&a.push(i);return g(a)},guid:1,support:le}),"function"==typeof Symbol&&(ce.fn[Symbol.iterator]=oe[Symbol.iterator]),ce.each("Boolean Number String Function Array Date RegExp Object Error Symbol".split(" "),function(e,t){n["[object "+t+"]"]=t.toLowerCase()});var pe=oe.pop,de=oe.sort,he=oe.splice,ge="[\\x20\\t\\r\\n\\f]",ve=new RegExp("^"+ge+"+|((?:^|[^\\\\])(?:\\\\.)*)"+ge+"+$","g");ce.contains=function(e,t){var n=t&&t.parentNode;return e===n||!(!n||1!==n.nodeType||!(e.contains?e.contains(n):e.compareDocumentPosition&&16&e.compareDocumentPosition(n)))};var f=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\x80-\uFFFF\w-]/g;function p(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e}ce.escapeSelector=function(e){return(e+"").replace(f,p)};var ye=C,me=s;!function(){var e,b,w,o,a,T,r,C,d,i,k=me,S=ce.expando,E=0,n=0,s=W(),c=W(),u=W(),h=W(),l=function(e,t){return e===t&&(a=!0),0},f="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",t="(?:\\\\[\\da-fA-F]{1,6}"+ge+"?|\\\\[^\\r\\n\\f]|[\\w-]|[^\0-\\x7f])+",p="\\["+ge+"*("+t+")(?:"+ge+"*([*^$|!~]?=)"+ge+"*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|("+t+"))|)"+ge+"*\\]",g=":("+t+")(?:\\((('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|((?:\\\\.|[^\\\\()[\\]]|"+p+")*)|.*)\\)|)",v=new RegExp(ge+"+","g"),y=new RegExp("^"+ge+"*,"+ge+"*"),m=new RegExp("^"+ge+"*([>+~]|"+ge+")"+ge+"*"),x=new RegExp(ge+"|>"),j=new RegExp(g),A=new RegExp("^"+t+"$"),D={ID:new RegExp("^#("+t+")"),CLASS:new RegExp("^\\.("+t+")"),TAG:new RegExp("^("+t+"|[*])"),ATTR:new RegExp("^"+p),PSEUDO:new RegExp("^"+g),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+ge+"*(even|odd|(([+-]|)(\\d*)n|)"+ge+"*(?:([+-]|)"+ge+"*(\\d+)|))"+ge+"*\\)|)","i"),bool:new RegExp("^(?:"+f+")$","i"),needsContext:new RegExp("^"+ge+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+ge+"*((?:-\\d)?\\d*)"+ge+"*\\)|)(?=[^-]|$)","i")},N=/^(?:input|select|textarea|button)$/i,q=/^h\d$/i,L=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,H=/[+~]/,O=new RegExp("\\\\[\\da-fA-F]{1,6}"+ge+"?|\\\\([^\\r\\n\\f])","g"),P=function(e,t){var n="0x"+e.slice(1)-65536;return t||(n<0?String.fromCharCode(n+65536):String.fromCharCode(n>>10|55296,1023&n|56320))},M=function(){V()},R=J(function(e){return!0===e.disabled&&fe(e,"fieldset")},{dir:"parentNode",next:"legend"});try{k.apply(oe=ae.call(ye.childNodes),ye.childNodes),oe[ye.childNodes.length].nodeType}catch(e){k={apply:function(e,t){me.apply(e,ae.call(t))},call:function(e){me.apply(e,ae.call(arguments,1))}}}function I(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&(V(e),e=e||T,C)){if(11!==p&&(u=L.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return k.call(n,a),n}else if(f&&(a=f.getElementById(i))&&I.contains(e,a)&&a.id===i)return k.call(n,a),n}else{if(u[2])return k.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&e.getElementsByClassName)return k.apply(n,e.getElementsByClassName(i)),n}if(!(h[t+" "]||d&&d.test(t))){if(c=t,f=e,1===p&&(x.test(t)||m.test(t))){(f=H.test(t)&&U(e.parentNode)||e)==e&&le.scope||((s=e.getAttribute("id"))?s=ce.escapeSelector(s):e.setAttribute("id",s=S)),o=(l=Y(t)).length;while(o--)l[o]=(s?"#"+s:":scope")+" "+Q(l[o]);c=l.join(",")}try{return k.apply(n,f.querySelectorAll(c)),n}catch(e){h(t,!0)}finally{s===S&&e.removeAttribute("id")}}}return re(t.replace(ve,"$1"),e,n,r)}function W(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function F(e){return e[S]=!0,e}function $(e){var t=T.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function B(t){return function(e){return fe(e,"input")&&e.type===t}}function _(t){return function(e){return(fe(e,"input")||fe(e,"button"))&&e.type===t}}function z(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&R(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function X(a){return F(function(o){return o=+o,F(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function U(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}function V(e){var t,n=e?e.ownerDocument||e:ye;return n!=T&&9===n.nodeType&&n.documentElement&&(r=(T=n).documentElement,C=!ce.isXMLDoc(T),i=r.matches||r.webkitMatchesSelector||r.msMatchesSelector,r.msMatchesSelector&&ye!=T&&(t=T.defaultView)&&t.top!==t&&t.addEventListener("unload",M),le.getById=$(function(e){return r.appendChild(e).id=ce.expando,!T.getElementsByName||!T.getElementsByName(ce.expando).length}),le.disconnectedMatch=$(function(e){return i.call(e,"*")}),le.scope=$(function(){return T.querySelectorAll(":scope")}),le.cssHas=$(function(){try{return T.querySelector(":has(*,:jqfake)"),!1}catch(e){return!0}}),le.getById?(b.filter.ID=function(e){var t=e.replace(O,P);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&C){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(O,P);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&C){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):t.querySelectorAll(e)},b.find.CLASS=function(e,t){if("undefined"!=typeof t.getElementsByClassName&&C)return t.getElementsByClassName(e)},d=[],$(function(e){var t;r.appendChild(e).innerHTML="<a id='"+S+"' href='' disabled='disabled'></a><select id='"+S+"-\r\\' disabled='disabled'><option selected=''></option></select>",e.querySelectorAll("[selected]").length||d.push("\\["+ge+"*(?:value|"+f+")"),e.querySelectorAll("[id~="+S+"-]").length||d.push("~="),e.querySelectorAll("a#"+S+"+*").length||d.push(".#.+[+~]"),e.querySelectorAll(":checked").length||d.push(":checked"),(t=T.createElement("input")).setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),r.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&d.push(":enabled",":disabled"),(t=T.createElement("input")).setAttribute("name",""),e.appendChild(t),e.querySelectorAll("[name='']").length||d.push("\\["+ge+"*name"+ge+"*="+ge+"*(?:''|\"\")")}),le.cssHas||d.push(":has"),d=d.length&&new RegExp(d.join("|")),l=function(e,t){if(e===t)return a=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)==(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!le.sortDetached&&t.compareDocumentPosition(e)===n?e===T||e.ownerDocument==ye&&I.contains(ye,e)?-1:t===T||t.ownerDocument==ye&&I.contains(ye,t)?1:o?se.call(o,e)-se.call(o,t):0:4&n?-1:1)}),T}for(e in I.matches=function(e,t){return I(e,null,null,t)},I.matchesSelector=function(e,t){if(V(e),C&&!h[t+" "]&&(!d||!d.test(t)))try{var n=i.call(e,t);if(n||le.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){h(t,!0)}return 0<I(t,T,null,[e]).length},I.contains=function(e,t){return(e.ownerDocument||e)!=T&&V(e),ce.contains(e,t)},I.attr=function(e,t){(e.ownerDocument||e)!=T&&V(e);var n=b.attrHandle[t.toLowerCase()],r=n&&ue.call(b.attrHandle,t.toLowerCase())?n(e,t,!C):void 0;return void 0!==r?r:e.getAttribute(t)},I.error=function(e){throw new Error("Syntax error, unrecognized expression: "+e)},ce.uniqueSort=function(e){var t,n=[],r=0,i=0;if(a=!le.sortStable,o=!le.sortStable&&ae.call(e,0),de.call(e,l),a){while(t=e[i++])t===e[i]&&(r=n.push(i));while(r--)he.call(e,n[r],1)}return o=null,e},ce.fn.uniqueSort=function(){return this.pushStack(ce.uniqueSort(ae.apply(this)))},(b=ce.expr={cacheLength:50,createPseudo:F,match:D,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(O,P),e[3]=(e[3]||e[4]||e[5]||"").replace(O,P),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||I.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&I.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return D.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&j.test(n)&&(t=Y(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(O,P).toLowerCase();return"*"===e?function(){return!0}:function(e){return fe(e,t)}},CLASS:function(e){var t=s[e+" "];return t||(t=new RegExp("(^|"+ge+")"+e+"("+ge+"|$)"))&&s(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=I.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1<t.indexOf(i):"$="===r?i&&t.slice(-i.length)===i:"~="===r?-1<(" "+t.replace(v," ")+" ").indexOf(i):"|="===r&&(t===i||t.slice(0,i.length+1)===i+"-"))}},CHILD:function(d,e,t,h,g){var v="nth"!==d.slice(0,3),y="last"!==d.slice(-4),m="of-type"===e;return 1===h&&0===g?function(e){return!!e.parentNode}:function(e,t,n){var r,i,o,a,s,u=v!==y?"nextSibling":"previousSibling",l=e.parentNode,c=m&&e.nodeName.toLowerCase(),f=!n&&!m,p=!1;if(l){if(v){while(u){o=e;while(o=o[u])if(m?fe(o,c):1===o.nodeType)return!1;s=u="only"===d&&!s&&"nextSibling"}return!0}if(s=[y?l.firstChild:l.lastChild],y&&f){p=(a=(r=(i=l[S]||(l[S]={}))[d]||[])[0]===E&&r[1])&&r[2],o=a&&l.childNodes[a];while(o=++a&&o&&o[u]||(p=a=0)||s.pop())if(1===o.nodeType&&++p&&o===e){i[d]=[E,a,p];break}}else if(f&&(p=a=(r=(i=e[S]||(e[S]={}))[d]||[])[0]===E&&r[1]),!1===p)while(o=++a&&o&&o[u]||(p=a=0)||s.pop())if((m?fe(o,c):1===o.nodeType)&&++p&&(f&&((i=o[S]||(o[S]={}))[d]=[E,p]),o===e))break;return(p-=g)===h||p%h==0&&0<=p/h}}},PSEUDO:function(e,o){var t,a=b.pseudos[e]||b.setFilters[e.toLowerCase()]||I.error("unsupported pseudo: "+e);return a[S]?a(o):1<a.length?(t=[e,e,"",o],b.setFilters.hasOwnProperty(e.toLowerCase())?F(function(e,t){var n,r=a(e,o),i=r.length;while(i--)e[n=se.call(e,r[i])]=!(t[n]=r[i])}):function(e){return a(e,0,t)}):a}},pseudos:{not:F(function(e){var r=[],i=[],s=ne(e.replace(ve,"$1"));return s[S]?F(function(e,t,n,r){var i,o=s(e,null,r,[]),a=e.length;while(a--)(i=o[a])&&(e[a]=!(t[a]=i))}):function(e,t,n){return r[0]=e,s(r,null,n,i),r[0]=null,!i.pop()}}),has:F(function(t){return function(e){return 0<I(t,e).length}}),contains:F(function(t){return t=t.replace(O,P),function(e){return-1<(e.textContent||ce.text(e)).indexOf(t)}}),lang:F(function(n){return A.test(n||"")||I.error("unsupported lang: "+n),n=n.replace(O,P).toLowerCase(),function(e){var t;do{if(t=C?e.lang:e.getAttribute("xml:lang")||e.getAttribute("lang"))return(t=t.toLowerCase())===n||0===t.indexOf(n+"-")}while((e=e.parentNode)&&1===e.nodeType);return!1}}),target:function(e){var t=ie.location&&ie.location.hash;return t&&t.slice(1)===e.id},root:function(e){return e===r},focus:function(e){return e===function(){try{return T.activeElement}catch(e){}}()&&T.hasFocus()&&!!(e.type||e.href||~e.tabIndex)},enabled:z(!1),disabled:z(!0),checked:function(e){return fe(e,"input")&&!!e.checked||fe(e,"option")&&!!e.selected},selected:function(e){return e.parentNode&&e.parentNode.selectedIndex,!0===e.selected},empty:function(e){for(e=e.firstChild;e;e=e.nextSibling)if(e.nodeType<6)return!1;return!0},parent:function(e){return!b.pseudos.empty(e)},header:function(e){return q.test(e.nodeName)},input:function(e){return N.test(e.nodeName)},button:function(e){return fe(e,"input")&&"button"===e.type||fe(e,"button")},text:function(e){var t;return fe(e,"input")&&"text"===e.type&&(null==(t=e.getAttribute("type"))||"text"===t.toLowerCase())},first:X(function(){return[0]}),last:X(function(e,t){return[t-1]}),eq:X(function(e,t,n){return[n<0?n+t:n]}),even:X(function(e,t){for(var n=0;n<t;n+=2)e.push(n);return e}),odd:X(function(e,t){for(var n=1;n<t;n+=2)e.push(n);return e}),lt:X(function(e,t,n){var r;for(r=n<0?n+t:t<n?t:n;0<=--r;)e.push(r);return e}),gt:X(function(e,t,n){for(var r=n<0?n+t:n;++r<t;)e.push(r);return e})}}).pseudos.nth=b.pseudos.eq,{radio:!0,checkbox:!0,file:!0,password:!0,image:!0})b.pseudos[e]=B(e);for(e in{submit:!0,reset:!0})b.pseudos[e]=_(e);function G(){}function Y(e,t){var n,r,i,o,a,s,u,l=c[e+" "];if(l)return t?0:l.slice(0);a=e,s=[],u=b.preFilter;while(a){for(o in n&&!(r=y.exec(a))||(r&&(a=a.slice(r[0].length)||a),s.push(i=[])),n=!1,(r=m.exec(a))&&(n=r.shift(),i.push({value:n,type:r[0].replace(ve," ")}),a=a.slice(n.length)),b.filter)!(r=D[o].exec(a))||u[o]&&!(r=u[o](r))||(n=r.shift(),i.push({value:n,type:o,matches:r}),a=a.slice(n.length));if(!n)break}return t?a.length:a?I.error(e):c(e,s).slice(0)}function Q(e){for(var t=0,n=e.length,r="";t<n;t++)r+=e[t].value;return r}function J(a,e,t){var s=e.dir,u=e.next,l=u||s,c=t&&"parentNode"===l,f=n++;return e.first?function(e,t,n){while(e=e[s])if(1===e.nodeType||c)return a(e,t,n);return!1}:function(e,t,n){var r,i,o=[E,f];if(n){while(e=e[s])if((1===e.nodeType||c)&&a(e,t,n))return!0}else while(e=e[s])if(1===e.nodeType||c)if(i=e[S]||(e[S]={}),u&&fe(e,u))e=e[s]||e;else{if((r=i[l])&&r[0]===E&&r[1]===f)return o[2]=r[2];if((i[l]=o)[2]=a(e,t,n))return!0}return!1}}function K(i){return 1<i.length?function(e,t,n){var r=i.length;while(r--)if(!i[r](e,t,n))return!1;return!0}:i[0]}function Z(e,t,n,r,i){for(var o,a=[],s=0,u=e.length,l=null!=t;s<u;s++)(o=e[s])&&(n&&!n(o,r,i)||(a.push(o),l&&t.push(s)));return a}function ee(d,h,g,v,y,e){return v&&!v[S]&&(v=ee(v)),y&&!y[S]&&(y=ee(y,e)),F(function(e,t,n,r){var i,o,a,s,u=[],l=[],c=t.length,f=e||function(e,t,n){for(var r=0,i=t.length;r<i;r++)I(e,t[r],n);return n}(h||"*",n.nodeType?[n]:n,[]),p=!d||!e&&h?f:Z(f,u,d,n,r);if(g?g(p,s=y||(e?d:c||v)?[]:t,n,r):s=p,v){i=Z(s,l),v(i,[],n,r),o=i.length;while(o--)(a=i[o])&&(s[l[o]]=!(p[l[o]]=a))}if(e){if(y||d){if(y){i=[],o=s.length;while(o--)(a=s[o])&&i.push(p[o]=a);y(null,s=[],i,r)}o=s.length;while(o--)(a=s[o])&&-1<(i=y?se.call(e,a):u[o])&&(e[i]=!(t[i]=a))}}else s=Z(s===t?s.splice(c,s.length):s),y?y(null,t,s,r):k.apply(t,s)})}function te(e){for(var i,t,n,r=e.length,o=b.relative[e[0].type],a=o||b.relative[" "],s=o?1:0,u=J(function(e){return e===i},a,!0),l=J(function(e){return-1<se.call(i,e)},a,!0),c=[function(e,t,n){var r=!o&&(n||t!=w)||((i=t).nodeType?u(e,t,n):l(e,t,n));return i=null,r}];s<r;s++)if(t=b.relative[e[s].type])c=[J(K(c),t)];else{if((t=b.filter[e[s].type].apply(null,e[s].matches))[S]){for(n=++s;n<r;n++)if(b.relative[e[n].type])break;return ee(1<s&&K(c),1<s&&Q(e.slice(0,s-1).concat({value:" "===e[s-2].type?"*":""})).replace(ve,"$1"),t,s<n&&te(e.slice(s,n)),n<r&&te(e=e.slice(n)),n<r&&Q(e))}c.push(t)}return K(c)}function ne(e,t){var n,v,y,m,x,r,i=[],o=[],a=u[e+" "];if(!a){t||(t=Y(e)),n=t.length;while(n--)(a=te(t[n]))[S]?i.push(a):o.push(a);(a=u(e,(v=o,m=0<(y=i).length,x=0<v.length,r=function(e,t,n,r,i){var o,a,s,u=0,l="0",c=e&&[],f=[],p=w,d=e||x&&b.find.TAG("*",i),h=E+=null==p?1:Math.random()||.1,g=d.length;for(i&&(w=t==T||t||i);l!==g&&null!=(o=d[l]);l++){if(x&&o){a=0,t||o.ownerDocument==T||(V(o),n=!C);while(s=v[a++])if(s(o,t||T,n)){k.call(r,o);break}i&&(E=h)}m&&((o=!s&&o)&&u--,e&&c.push(o))}if(u+=l,m&&l!==u){a=0;while(s=y[a++])s(c,f,t,n);if(e){if(0<u)while(l--)c[l]||f[l]||(f[l]=pe.call(r));f=Z(f)}k.apply(r,f),i&&!e&&0<f.length&&1<u+y.length&&ce.uniqueSort(r)}return i&&(E=h,w=p),c},m?F(r):r))).selector=e}return a}function re(e,t,n,r){var i,o,a,s,u,l="function"==typeof e&&e,c=!r&&Y(e=l.selector||e);if(n=n||[],1===c.length){if(2<(o=c[0]=c[0].slice(0)).length&&"ID"===(a=o[0]).type&&9===t.nodeType&&C&&b.relative[o[1].type]){if(!(t=(b.find.ID(a.matches[0].replace(O,P),t)||[])[0]))return n;l&&(t=t.parentNode),e=e.slice(o.shift().value.length)}i=D.needsContext.test(e)?0:o.length;while(i--){if(a=o[i],b.relative[s=a.type])break;if((u=b.find[s])&&(r=u(a.matches[0].replace(O,P),H.test(o[0].type)&&U(t.parentNode)||t))){if(o.splice(i,1),!(e=r.length&&Q(o)))return k.apply(n,r),n;break}}}return(l||ne(e,c))(r,t,!C,n,!t||H.test(e)&&U(t.parentNode)||t),n}G.prototype=b.filters=b.pseudos,b.setFilters=new G,le.sortStable=S.split("").sort(l).join("")===S,V(),le.sortDetached=$(function(e){return 1&e.compareDocumentPosition(T.createElement("fieldset"))}),ce.find=I,ce.expr[":"]=ce.expr.pseudos,ce.unique=ce.uniqueSort,I.compile=ne,I.select=re,I.setDocument=V,I.tokenize=Y,I.escape=ce.escapeSelector,I.getText=ce.text,I.isXML=ce.isXMLDoc,I.selectors=ce.expr,I.support=ce.support,I.uniqueSort=ce.uniqueSort}();var d=function(e,t,n){var r=[],i=void 0!==n;while((e=e[t])&&9!==e.nodeType)if(1===e.nodeType){if(i&&ce(e).is(n))break;r.push(e)}return r},h=function(e,t){for(var n=[];e;e=e.nextSibling)1===e.nodeType&&e!==t&&n.push(e);return n},b=ce.expr.match.needsContext,w=/^<([a-z][^\/\0>:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function T(e,n,r){return v(n)?ce.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?ce.grep(e,function(e){return e===n!==r}):"string"!=typeof n?ce.grep(e,function(e){return-1<se.call(n,e)!==r}):ce.filter(n,e,r)}ce.filter=function(e,t,n){var r=t[0];return n&&(e=":not("+e+")"),1===t.length&&1===r.nodeType?ce.find.matchesSelector(r,e)?[r]:[]:ce.find.matches(e,ce.grep(t,function(e){return 1===e.nodeType}))},ce.fn.extend({find:function(e){var t,n,r=this.length,i=this;if("string"!=typeof e)return this.pushStack(ce(e).filter(function(){for(t=0;t<r;t++)if(ce.contains(i[t],this))return!0}));for(n=this.pushStack([]),t=0;t<r;t++)ce.find(e,i[t],n);return 1<r?ce.uniqueSort(n):n},filter:function(e){return this.pushStack(T(this,e||[],!1))},not:function(e){return this.pushStack(T(this,e||[],!0))},is:function(e){return!!T(this,"string"==typeof e&&b.test(e)?ce(e):e||[],!1).length}});var k,S=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]+))$/;(ce.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||k,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:S.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof ce?t[0]:t,ce.merge(this,ce.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:C,!0)),w.test(r[1])&&ce.isPlainObject(t))for(r in t)v(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=C.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):v(e)?void 0!==n.ready?n.ready(e):e(ce):ce.makeArray(e,this)}).prototype=ce.fn,k=ce(C);var E=/^(?:parents|prev(?:Until|All))/,j={children:!0,contents:!0,next:!0,prev:!0};function A(e,t){while((e=e[t])&&1!==e.nodeType);return e}ce.fn.extend({has:function(e){var t=ce(e,this),n=t.length;return this.filter(function(){for(var e=0;e<n;e++)if(ce.contains(this,t[e]))return!0})},closest:function(e,t){var n,r=0,i=this.length,o=[],a="string"!=typeof e&&ce(e);if(!b.test(e))for(;r<i;r++)for(n=this[r];n&&n!==t;n=n.parentNode)if(n.nodeType<11&&(a?-1<a.index(n):1===n.nodeType&&ce.find.matchesSelector(n,e))){o.push(n);break}return this.pushStack(1<o.length?ce.uniqueSort(o):o)},index:function(e){return e?"string"==typeof e?se.call(ce(e),this[0]):se.call(this,e.jquery?e[0]:e):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(e,t){return this.pushStack(ce.uniqueSort(ce.merge(this.get(),ce(e,t))))},addBack:function(e){return this.add(null==e?this.prevObject:this.prevObject.filter(e))}}),ce.each({parent:function(e){var t=e.parentNode;return t&&11!==t.nodeType?t:null},parents:function(e){return d(e,"parentNode")},parentsUntil:function(e,t,n){return d(e,"parentNode",n)},next:function(e){return A(e,"nextSibling")},prev:function(e){return A(e,"previousSibling")},nextAll:function(e){return d(e,"nextSibling")},prevAll:function(e){return d(e,"previousSibling")},nextUntil:function(e,t,n){return d(e,"nextSibling",n)},prevUntil:function(e,t,n){return d(e,"previousSibling",n)},siblings:function(e){return h((e.parentNode||{}).firstChild,e)},children:function(e){return h(e.firstChild)},contents:function(e){return null!=e.contentDocument&&r(e.contentDocument)?e.contentDocument:(fe(e,"template")&&(e=e.content||e),ce.merge([],e.childNodes))}},function(r,i){ce.fn[r]=function(e,t){var n=ce.map(this,i,e);return"Until"!==r.slice(-5)&&(t=e),t&&"string"==typeof t&&(n=ce.filter(t,n)),1<this.length&&(j[r]||ce.uniqueSort(n),E.test(r)&&n.reverse()),this.pushStack(n)}});var D=/[^\x20\t\r\n\f]+/g;function N(e){return e}function q(e){throw e}function L(e,t,n,r){var i;try{e&&v(i=e.promise)?i.call(e).done(t).fail(n):e&&v(i=e.then)?i.call(e,t,n):t.apply(void 0,[e].slice(r))}catch(e){n.apply(void 0,[e])}}ce.Callbacks=function(r){var e,n;r="string"==typeof r?(e=r,n={},ce.each(e.match(D)||[],function(e,t){n[t]=!0}),n):ce.extend({},r);var i,t,o,a,s=[],u=[],l=-1,c=function(){for(a=a||r.once,o=i=!0;u.length;l=-1){t=u.shift();while(++l<s.length)!1===s[l].apply(t[0],t[1])&&r.stopOnFalse&&(l=s.length,t=!1)}r.memory||(t=!1),i=!1,a&&(s=t?[]:"")},f={add:function(){return s&&(t&&!i&&(l=s.length-1,u.push(t)),function n(e){ce.each(e,function(e,t){v(t)?r.unique&&f.has(t)||s.push(t):t&&t.length&&"string"!==x(t)&&n(t)})}(arguments),t&&!i&&c()),this},remove:function(){return ce.each(arguments,function(e,t){var n;while(-1<(n=ce.inArray(t,s,n)))s.splice(n,1),n<=l&&l--}),this},has:function(e){return e?-1<ce.inArray(e,s):0<s.length},empty:function(){return s&&(s=[]),this},disable:function(){return a=u=[],s=t="",this},disabled:function(){return!s},lock:function(){return a=u=[],t||i||(s=t=""),this},locked:function(){return!!a},fireWith:function(e,t){return a||(t=[e,(t=t||[]).slice?t.slice():t],u.push(t),i||c()),this},fire:function(){return f.fireWith(this,arguments),this},fired:function(){return!!o}};return f},ce.extend({Deferred:function(e){var o=[["notify","progress",ce.Callbacks("memory"),ce.Callbacks("memory"),2],["resolve","done",ce.Callbacks("once memory"),ce.Callbacks("once memory"),0,"resolved"],["reject","fail",ce.Callbacks("once memory"),ce.Callbacks("once memory"),1,"rejected"]],i="pending",a={state:function(){return i},always:function(){return s.done(arguments).fail(arguments),this},"catch":function(e){return a.then(null,e)},pipe:function(){var i=arguments;return ce.Deferred(function(r){ce.each(o,function(e,t){var n=v(i[t[4]])&&i[t[4]];s[t[1]](function(){var e=n&&n.apply(this,arguments);e&&v(e.promise)?e.promise().progress(r.notify).done(r.resolve).fail(r.reject):r[t[0]+"With"](this,n?[e]:arguments)})}),i=null}).promise()},then:function(t,n,r){var u=0;function l(i,o,a,s){return function(){var n=this,r=arguments,e=function(){var e,t;if(!(i<u)){if((e=a.apply(n,r))===o.promise())throw new TypeError("Thenable self-resolution");t=e&&("object"==typeof e||"function"==typeof e)&&e.then,v(t)?s?t.call(e,l(u,o,N,s),l(u,o,q,s)):(u++,t.call(e,l(u,o,N,s),l(u,o,q,s),l(u,o,N,o.notifyWith))):(a!==N&&(n=void 0,r=[e]),(s||o.resolveWith)(n,r))}},t=s?e:function(){try{e()}catch(e){ce.Deferred.exceptionHook&&ce.Deferred.exceptionHook(e,t.error),u<=i+1&&(a!==q&&(n=void 0,r=[e]),o.rejectWith(n,r))}};i?t():(ce.Deferred.getErrorHook?t.error=ce.Deferred.getErrorHook():ce.Deferred.getStackHook&&(t.error=ce.Deferred.getStackHook()),ie.setTimeout(t))}}return ce.Deferred(function(e){o[0][3].add(l(0,e,v(r)?r:N,e.notifyWith)),o[1][3].add(l(0,e,v(t)?t:N)),o[2][3].add(l(0,e,v(n)?n:q))}).promise()},promise:function(e){return null!=e?ce.extend(e,a):a}},s={};return ce.each(o,function(e,t){var n=t[2],r=t[5];a[t[1]]=n.add,r&&n.add(function(){i=r},o[3-e][2].disable,o[3-e][3].disable,o[0][2].lock,o[0][3].lock),n.add(t[3].fire),s[t[0]]=function(){return s[t[0]+"With"](this===s?void 0:this,arguments),this},s[t[0]+"With"]=n.fireWith}),a.promise(s),e&&e.call(s,s),s},when:function(e){var n=arguments.length,t=n,r=Array(t),i=ae.call(arguments),o=ce.Deferred(),a=function(t){return function(e){r[t]=this,i[t]=1<arguments.length?ae.call(arguments):e,--n||o.resolveWith(r,i)}};if(n<=1&&(L(e,o.done(a(t)).resolve,o.reject,!n),"pending"===o.state()||v(i[t]&&i[t].then)))return o.then();while(t--)L(i[t],a(t),o.reject);return o.promise()}});var H=/^(Eval|Internal|Range|Reference|Syntax|Type|URI)Error$/;ce.Deferred.exceptionHook=function(e,t){ie.console&&ie.console.warn&&e&&H.test(e.name)&&ie.console.warn("jQuery.Deferred exception: "+e.message,e.stack,t)},ce.readyException=function(e){ie.setTimeout(function(){throw e})};var O=ce.Deferred();function P(){C.removeEventListener("DOMContentLoaded",P),ie.removeEventListener("load",P),ce.ready()}ce.fn.ready=function(e){return O.then(e)["catch"](function(e){ce.readyException(e)}),this},ce.extend({isReady:!1,readyWait:1,ready:function(e){(!0===e?--ce.readyWait:ce.isReady)||(ce.isReady=!0)!==e&&0<--ce.readyWait||O.resolveWith(C,[ce])}}),ce.ready.then=O.then,"complete"===C.readyState||"loading"!==C.readyState&&!C.documentElement.doScroll?ie.setTimeout(ce.ready):(C.addEventListener("DOMContentLoaded",P),ie.addEventListener("load",P));var M=function(e,t,n,r,i,o,a){var s=0,u=e.length,l=null==n;if("object"===x(n))for(s in i=!0,n)M(e,t,s,n[s],!0,o,a);else if(void 0!==r&&(i=!0,v(r)||(a=!0),l&&(a?(t.call(e,r),t=null):(l=t,t=function(e,t,n){return l.call(ce(e),n)})),t))for(;s<u;s++)t(e[s],n,a?r:r.call(e[s],s,t(e[s],n)));return i?e:l?t.call(e):u?t(e[0],n):o},R=/^-ms-/,I=/-([a-z])/g;function W(e,t){return t.toUpperCase()}function F(e){return e.replace(R,"ms-").replace(I,W)}var $=function(e){return 1===e.nodeType||9===e.nodeType||!+e.nodeType};function B(){this.expando=ce.expando+B.uid++}B.uid=1,B.prototype={cache:function(e){var t=e[this.expando];return t||(t={},$(e)&&(e.nodeType?e[this.expando]=t:Object.defineProperty(e,this.expando,{value:t,configurable:!0}))),t},set:function(e,t,n){var r,i=this.cache(e);if("string"==typeof t)i[F(t)]=n;else for(r in t)i[F(r)]=t[r];return i},get:function(e,t){return void 0===t?this.cache(e):e[this.expando]&&e[this.expando][F(t)]},access:function(e,t,n){return void 0===t||t&&"string"==typeof t&&void 0===n?this.get(e,t):(this.set(e,t,n),void 0!==n?n:t)},remove:function(e,t){var n,r=e[this.expando];if(void 0!==r){if(void 0!==t){n=(t=Array.isArray(t)?t.map(F):(t=F(t))in r?[t]:t.match(D)||[]).length;while(n--)delete r[t[n]]}(void 0===t||ce.isEmptyObject(r))&&(e.nodeType?e[this.expando]=void 0:delete e[this.expando])}},hasData:function(e){var t=e[this.expando];return void 0!==t&&!ce.isEmptyObject(t)}};var _=new B,z=new B,X=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,U=/[A-Z]/g;function V(e,t,n){var r,i;if(void 0===n&&1===e.nodeType)if(r="data-"+t.replace(U,"-$&").toLowerCase(),"string"==typeof(n=e.getAttribute(r))){try{n="true"===(i=n)||"false"!==i&&("null"===i?null:i===+i+""?+i:X.test(i)?JSON.parse(i):i)}catch(e){}z.set(e,t,n)}else n=void 0;return n}ce.extend({hasData:function(e){return z.hasData(e)||_.hasData(e)},data:function(e,t,n){return z.access(e,t,n)},removeData:function(e,t){z.remove(e,t)},_data:function(e,t,n){return _.access(e,t,n)},_removeData:function(e,t){_.remove(e,t)}}),ce.fn.extend({data:function(n,e){var t,r,i,o=this[0],a=o&&o.attributes;if(void 0===n){if(this.length&&(i=z.get(o),1===o.nodeType&&!_.get(o,"hasDataAttrs"))){t=a.length;while(t--)a[t]&&0===(r=a[t].name).indexOf("data-")&&(r=F(r.slice(5)),V(o,r,i[r]));_.set(o,"hasDataAttrs",!0)}return i}return"object"==typeof n?this.each(function(){z.set(this,n)}):M(this,function(e){var t;if(o&&void 0===e)return void 0!==(t=z.get(o,n))?t:void 0!==(t=V(o,n))?t:void 0;this.each(function(){z.set(this,n,e)})},null,e,1<arguments.length,null,!0)},removeData:function(e){return this.each(function(){z.remove(this,e)})}}),ce.extend({queue:function(e,t,n){var r;if(e)return t=(t||"fx")+"queue",r=_.get(e,t),n&&(!r||Array.isArray(n)?r=_.access(e,t,ce.makeArray(n)):r.push(n)),r||[]},dequeue:function(e,t){t=t||"fx";var n=ce.queue(e,t),r=n.length,i=n.shift(),o=ce._queueHooks(e,t);"inprogress"===i&&(i=n.shift(),r--),i&&("fx"===t&&n.unshift("inprogress"),delete o.stop,i.call(e,function(){ce.dequeue(e,t)},o)),!r&&o&&o.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return _.get(e,n)||_.access(e,n,{empty:ce.Callbacks("once memory").add(function(){_.remove(e,[t+"queue",n])})})}}),ce.fn.extend({queue:function(t,n){var e=2;return"string"!=typeof t&&(n=t,t="fx",e--),arguments.length<e?ce.queue(this[0],t):void 0===n?this:this.each(function(){var e=ce.queue(this,t,n);ce._queueHooks(this,t),"fx"===t&&"inprogress"!==e[0]&&ce.dequeue(this,t)})},dequeue:function(e){return this.each(function(){ce.dequeue(this,e)})},clearQueue:function(e){return this.queue(e||"fx",[])},promise:function(e,t){var n,r=1,i=ce.Deferred(),o=this,a=this.length,s=function(){--r||i.resolveWith(o,[o])};"string"!=typeof e&&(t=e,e=void 0),e=e||"fx";while(a--)(n=_.get(o[a],e+"queueHooks"))&&n.empty&&(r++,n.empty.add(s));return s(),i.promise(t)}});var G=/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,Y=new RegExp("^(?:([+-])=|)("+G+")([a-z%]*)$","i"),Q=["Top","Right","Bottom","Left"],J=C.documentElement,K=function(e){return ce.contains(e.ownerDocument,e)},Z={composed:!0};J.getRootNode&&(K=function(e){return ce.contains(e.ownerDocument,e)||e.getRootNode(Z)===e.ownerDocument});var ee=function(e,t){return"none"===(e=t||e).style.display||""===e.style.display&&K(e)&&"none"===ce.css(e,"display")};function te(e,t,n,r){var i,o,a=20,s=r?function(){return r.cur()}:function(){return ce.css(e,t,"")},u=s(),l=n&&n[3]||(ce.cssNumber[t]?"":"px"),c=e.nodeType&&(ce.cssNumber[t]||"px"!==l&&+u)&&Y.exec(ce.css(e,t));if(c&&c[3]!==l){u/=2,l=l||c[3],c=+u||1;while(a--)ce.style(e,t,c+l),(1-o)*(1-(o=s()/u||.5))<=0&&(a=0),c/=o;c*=2,ce.style(e,t,c+l),n=n||[]}return n&&(c=+c||+u||0,i=n[1]?c+(n[1]+1)*n[2]:+n[2],r&&(r.unit=l,r.start=c,r.end=i)),i}var ne={};function re(e,t){for(var n,r,i,o,a,s,u,l=[],c=0,f=e.length;c<f;c++)(r=e[c]).style&&(n=r.style.display,t?("none"===n&&(l[c]=_.get(r,"display")||null,l[c]||(r.style.display="")),""===r.style.display&&ee(r)&&(l[c]=(u=a=o=void 0,a=(i=r).ownerDocument,s=i.nodeName,(u=ne[s])||(o=a.body.appendChild(a.createElement(s)),u=ce.css(o,"display"),o.parentNode.removeChild(o),"none"===u&&(u="block"),ne[s]=u)))):"none"!==n&&(l[c]="none",_.set(r,"display",n)));for(c=0;c<f;c++)null!=l[c]&&(e[c].style.display=l[c]);return e}ce.fn.extend({show:function(){return re(this,!0)},hide:function(){return re(this)},toggle:function(e){return"boolean"==typeof e?e?this.show():this.hide():this.each(function(){ee(this)?ce(this).show():ce(this).hide()})}});var xe,be,we=/^(?:checkbox|radio)$/i,Te=/<([a-z][^\/\0>\x20\t\r\n\f]*)/i,Ce=/^$|^module$|\/(?:java|ecma)script/i;xe=C.createDocumentFragment().appendChild(C.createElement("div")),(be=C.createElement("input")).setAttribute("type","radio"),be.setAttribute("checked","checked"),be.setAttribute("name","t"),xe.appendChild(be),le.checkClone=xe.cloneNode(!0).cloneNode(!0).lastChild.checked,xe.innerHTML="<textarea>x</textarea>",le.noCloneChecked=!!xe.cloneNode(!0).lastChild.defaultValue,xe.innerHTML="<option></option>",le.option=!!xe.lastChild;var ke={thead:[1,"<table>","</table>"],col:[2,"<table><colgroup>","</colgroup></table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],_default:[0,"",""]};function Se(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&fe(e,t)?ce.merge([e],n):n}function Ee(e,t){for(var n=0,r=e.length;n<r;n++)_.set(e[n],"globalEval",!t||_.get(t[n],"globalEval"))}ke.tbody=ke.tfoot=ke.colgroup=ke.caption=ke.thead,ke.th=ke.td,le.option||(ke.optgroup=ke.option=[1,"<select multiple='multiple'>","</select>"]);var je=/<|&#?\w+;/;function Ae(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d<h;d++)if((o=e[d])||0===o)if("object"===x(o))ce.merge(p,o.nodeType?[o]:o);else if(je.test(o)){a=a||f.appendChild(t.createElement("div")),s=(Te.exec(o)||["",""])[1].toLowerCase(),u=ke[s]||ke._default,a.innerHTML=u[1]+ce.htmlPrefilter(o)+u[2],c=u[0];while(c--)a=a.lastChild;ce.merge(p,a.childNodes),(a=f.firstChild).textContent=""}else p.push(t.createTextNode(o));f.textContent="",d=0;while(o=p[d++])if(r&&-1<ce.inArray(o,r))i&&i.push(o);else if(l=K(o),a=Se(f.appendChild(o),"script"),l&&Ee(a),n){c=0;while(o=a[c++])Ce.test(o.type||"")&&n.push(o)}return f}var De=/^([^.]*)(?:\.(.+)|)/;function Ne(){return!0}function qe(){return!1}function Le(e,t,n,r,i,o){var a,s;if("object"==typeof t){for(s in"string"!=typeof n&&(r=r||n,n=void 0),t)Le(e,s,n,r,t[s],o);return e}if(null==r&&null==i?(i=n,r=n=void 0):null==i&&("string"==typeof n?(i=r,r=void 0):(i=r,r=n,n=void 0)),!1===i)i=qe;else if(!i)return e;return 1===o&&(a=i,(i=function(e){return ce().off(e),a.apply(this,arguments)}).guid=a.guid||(a.guid=ce.guid++)),e.each(function(){ce.event.add(this,t,i,r,n)})}function He(e,r,t){t?(_.set(e,r,!1),ce.event.add(e,r,{namespace:!1,handler:function(e){var t,n=_.get(this,r);if(1&e.isTrigger&&this[r]){if(n)(ce.event.special[r]||{}).delegateType&&e.stopPropagation();else if(n=ae.call(arguments),_.set(this,r,n),this[r](),t=_.get(this,r),_.set(this,r,!1),n!==t)return e.stopImmediatePropagation(),e.preventDefault(),t}else n&&(_.set(this,r,ce.event.trigger(n[0],n.slice(1),this)),e.stopPropagation(),e.isImmediatePropagationStopped=Ne)}})):void 0===_.get(e,r)&&ce.event.add(e,r,Ne)}ce.event={global:{},add:function(t,e,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,v=_.get(t);if($(t)){n.handler&&(n=(o=n).handler,i=o.selector),i&&ce.find.matchesSelector(J,i),n.guid||(n.guid=ce.guid++),(u=v.events)||(u=v.events=Object.create(null)),(a=v.handle)||(a=v.handle=function(e){return"undefined"!=typeof ce&&ce.event.triggered!==e.type?ce.event.dispatch.apply(t,arguments):void 0}),l=(e=(e||"").match(D)||[""]).length;while(l--)d=g=(s=De.exec(e[l])||[])[1],h=(s[2]||"").split(".").sort(),d&&(f=ce.event.special[d]||{},d=(i?f.delegateType:f.bindType)||d,f=ce.event.special[d]||{},c=ce.extend({type:d,origType:g,data:r,handler:n,guid:n.guid,selector:i,needsContext:i&&ce.expr.match.needsContext.test(i),namespace:h.join(".")},o),(p=u[d])||((p=u[d]=[]).delegateCount=0,f.setup&&!1!==f.setup.call(t,r,h,a)||t.addEventListener&&t.addEventListener(d,a)),f.add&&(f.add.call(t,c),c.handler.guid||(c.handler.guid=n.guid)),i?p.splice(p.delegateCount++,0,c):p.push(c),ce.event.global[d]=!0)}},remove:function(e,t,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,v=_.hasData(e)&&_.get(e);if(v&&(u=v.events)){l=(t=(t||"").match(D)||[""]).length;while(l--)if(d=g=(s=De.exec(t[l])||[])[1],h=(s[2]||"").split(".").sort(),d){f=ce.event.special[d]||{},p=u[d=(r?f.delegateType:f.bindType)||d]||[],s=s[2]&&new RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"),a=o=p.length;while(o--)c=p[o],!i&&g!==c.origType||n&&n.guid!==c.guid||s&&!s.test(c.namespace)||r&&r!==c.selector&&("**"!==r||!c.selector)||(p.splice(o,1),c.selector&&p.delegateCount--,f.remove&&f.remove.call(e,c));a&&!p.length&&(f.teardown&&!1!==f.teardown.call(e,h,v.handle)||ce.removeEvent(e,d,v.handle),delete u[d])}else for(d in u)ce.event.remove(e,d+t[l],n,r,!0);ce.isEmptyObject(u)&&_.remove(e,"handle events")}},dispatch:function(e){var t,n,r,i,o,a,s=new Array(arguments.length),u=ce.event.fix(e),l=(_.get(this,"events")||Object.create(null))[u.type]||[],c=ce.event.special[u.type]||{};for(s[0]=u,t=1;t<arguments.length;t++)s[t]=arguments[t];if(u.delegateTarget=this,!c.preDispatch||!1!==c.preDispatch.call(this,u)){a=ce.event.handlers.call(this,u,l),t=0;while((i=a[t++])&&!u.isPropagationStopped()){u.currentTarget=i.elem,n=0;while((o=i.handlers[n++])&&!u.isImmediatePropagationStopped())u.rnamespace&&!1!==o.namespace&&!u.rnamespace.test(o.namespace)||(u.handleObj=o,u.data=o.data,void 0!==(r=((ce.event.special[o.origType]||{}).handle||o.handler).apply(i.elem,s))&&!1===(u.result=r)&&(u.preventDefault(),u.stopPropagation()))}return c.postDispatch&&c.postDispatch.call(this,u),u.result}},handlers:function(e,t){var n,r,i,o,a,s=[],u=t.delegateCount,l=e.target;if(u&&l.nodeType&&!("click"===e.type&&1<=e.button))for(;l!==this;l=l.parentNode||this)if(1===l.nodeType&&("click"!==e.type||!0!==l.disabled)){for(o=[],a={},n=0;n<u;n++)void 0===a[i=(r=t[n]).selector+" "]&&(a[i]=r.needsContext?-1<ce(i,this).index(l):ce.find(i,this,null,[l]).length),a[i]&&o.push(r);o.length&&s.push({elem:l,handlers:o})}return l=this,u<t.length&&s.push({elem:l,handlers:t.slice(u)}),s},addProp:function(t,e){Object.defineProperty(ce.Event.prototype,t,{enumerable:!0,configurable:!0,get:v(e)?function(){if(this.originalEvent)return e(this.originalEvent)}:function(){if(this.originalEvent)return this.originalEvent[t]},set:function(e){Object.defineProperty(this,t,{enumerable:!0,configurable:!0,writable:!0,value:e})}})},fix:function(e){return e[ce.expando]?e:new ce.Event(e)},special:{load:{noBubble:!0},click:{setup:function(e){var t=this||e;return we.test(t.type)&&t.click&&fe(t,"input")&&He(t,"click",!0),!1},trigger:function(e){var t=this||e;return we.test(t.type)&&t.click&&fe(t,"input")&&He(t,"click"),!0},_default:function(e){var t=e.target;return we.test(t.type)&&t.click&&fe(t,"input")&&_.get(t,"click")||fe(t,"a")}},beforeunload:{postDispatch:function(e){void 0!==e.result&&e.originalEvent&&(e.originalEvent.returnValue=e.result)}}}},ce.removeEvent=function(e,t,n){e.removeEventListener&&e.removeEventListener(t,n)},ce.Event=function(e,t){if(!(this instanceof ce.Event))return new ce.Event(e,t);e&&e.type?(this.originalEvent=e,this.type=e.type,this.isDefaultPrevented=e.defaultPrevented||void 0===e.defaultPrevented&&!1===e.returnValue?Ne:qe,this.target=e.target&&3===e.target.nodeType?e.target.parentNode:e.target,this.currentTarget=e.currentTarget,this.relatedTarget=e.relatedTarget):this.type=e,t&&ce.extend(this,t),this.timeStamp=e&&e.timeStamp||Date.now(),this[ce.expando]=!0},ce.Event.prototype={constructor:ce.Event,isDefaultPrevented:qe,isPropagationStopped:qe,isImmediatePropagationStopped:qe,isSimulated:!1,preventDefault:function(){var e=this.originalEvent;this.isDefaultPrevented=Ne,e&&!this.isSimulated&&e.preventDefault()},stopPropagation:function(){var e=this.originalEvent;this.isPropagationStopped=Ne,e&&!this.isSimulated&&e.stopPropagation()},stopImmediatePropagation:function(){var e=this.originalEvent;this.isImmediatePropagationStopped=Ne,e&&!this.isSimulated&&e.stopImmediatePropagation(),this.stopPropagation()}},ce.each({altKey:!0,bubbles:!0,cancelable:!0,changedTouches:!0,ctrlKey:!0,detail:!0,eventPhase:!0,metaKey:!0,pageX:!0,pageY:!0,shiftKey:!0,view:!0,"char":!0,code:!0,charCode:!0,key:!0,keyCode:!0,button:!0,buttons:!0,clientX:!0,clientY:!0,offsetX:!0,offsetY:!0,pointerId:!0,pointerType:!0,screenX:!0,screenY:!0,targetTouches:!0,toElement:!0,touches:!0,which:!0},ce.event.addProp),ce.each({focus:"focusin",blur:"focusout"},function(r,i){function o(e){if(C.documentMode){var t=_.get(this,"handle"),n=ce.event.fix(e);n.type="focusin"===e.type?"focus":"blur",n.isSimulated=!0,t(e),n.target===n.currentTarget&&t(n)}else ce.event.simulate(i,e.target,ce.event.fix(e))}ce.event.special[r]={setup:function(){var e;if(He(this,r,!0),!C.documentMode)return!1;(e=_.get(this,i))||this.addEventListener(i,o),_.set(this,i,(e||0)+1)},trigger:function(){return He(this,r),!0},teardown:function(){var e;if(!C.documentMode)return!1;(e=_.get(this,i)-1)?_.set(this,i,e):(this.removeEventListener(i,o),_.remove(this,i))},_default:function(e){return _.get(e.target,r)},delegateType:i},ce.event.special[i]={setup:function(){var e=this.ownerDocument||this.document||this,t=C.documentMode?this:e,n=_.get(t,i);n||(C.documentMode?this.addEventListener(i,o):e.addEventListener(r,o,!0)),_.set(t,i,(n||0)+1)},teardown:function(){var e=this.ownerDocument||this.document||this,t=C.documentMode?this:e,n=_.get(t,i)-1;n?_.set(t,i,n):(C.documentMode?this.removeEventListener(i,o):e.removeEventListener(r,o,!0),_.remove(t,i))}}}),ce.each({mouseenter:"mouseover",mouseleave:"mouseout",pointerenter:"pointerover",pointerleave:"pointerout"},function(e,i){ce.event.special[e]={delegateType:i,bindType:i,handle:function(e){var t,n=e.relatedTarget,r=e.handleObj;return n&&(n===this||ce.contains(this,n))||(e.type=r.origType,t=r.handler.apply(this,arguments),e.type=i),t}}}),ce.fn.extend({on:function(e,t,n,r){return Le(this,e,t,n,r)},one:function(e,t,n,r){return Le(this,e,t,n,r,1)},off:function(e,t,n){var r,i;if(e&&e.preventDefault&&e.handleObj)return r=e.handleObj,ce(e.delegateTarget).off(r.namespace?r.origType+"."+r.namespace:r.origType,r.selector,r.handler),this;if("object"==typeof e){for(i in e)this.off(i,t,e[i]);return this}return!1!==t&&"function"!=typeof t||(n=t,t=void 0),!1===n&&(n=qe),this.each(function(){ce.event.remove(this,e,n,t)})}});var Oe=/<script|<style|<link/i,Pe=/checked\s*(?:[^=]|=\s*.checked.)/i,Me=/^\s*<!\[CDATA\[|\]\]>\s*$/g;function Re(e,t){return fe(e,"table")&&fe(11!==t.nodeType?t:t.firstChild,"tr")&&ce(e).children("tbody")[0]||e}function Ie(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function We(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Fe(e,t){var n,r,i,o,a,s;if(1===t.nodeType){if(_.hasData(e)&&(s=_.get(e).events))for(i in _.remove(t,"handle events"),s)for(n=0,r=s[i].length;n<r;n++)ce.event.add(t,i,s[i][n]);z.hasData(e)&&(o=z.access(e),a=ce.extend({},o),z.set(t,a))}}function $e(n,r,i,o){r=g(r);var e,t,a,s,u,l,c=0,f=n.length,p=f-1,d=r[0],h=v(d);if(h||1<f&&"string"==typeof d&&!le.checkClone&&Pe.test(d))return n.each(function(e){var t=n.eq(e);h&&(r[0]=d.call(this,e,t.html())),$e(t,r,i,o)});if(f&&(t=(e=Ae(r,n[0].ownerDocument,!1,n,o)).firstChild,1===e.childNodes.length&&(e=t),t||o)){for(s=(a=ce.map(Se(e,"script"),Ie)).length;c<f;c++)u=e,c!==p&&(u=ce.clone(u,!0,!0),s&&ce.merge(a,Se(u,"script"))),i.call(n[c],u,c);if(s)for(l=a[a.length-1].ownerDocument,ce.map(a,We),c=0;c<s;c++)u=a[c],Ce.test(u.type||"")&&!_.access(u,"globalEval")&&ce.contains(l,u)&&(u.src&&"module"!==(u.type||"").toLowerCase()?ce._evalUrl&&!u.noModule&&ce._evalUrl(u.src,{nonce:u.nonce||u.getAttribute("nonce")},l):m(u.textContent.replace(Me,""),u,l))}return n}function Be(e,t,n){for(var r,i=t?ce.filter(t,e):e,o=0;null!=(r=i[o]);o++)n||1!==r.nodeType||ce.cleanData(Se(r)),r.parentNode&&(n&&K(r)&&Ee(Se(r,"script")),r.parentNode.removeChild(r));return e}ce.extend({htmlPrefilter:function(e){return e},clone:function(e,t,n){var r,i,o,a,s,u,l,c=e.cloneNode(!0),f=K(e);if(!(le.noCloneChecked||1!==e.nodeType&&11!==e.nodeType||ce.isXMLDoc(e)))for(a=Se(c),r=0,i=(o=Se(e)).length;r<i;r++)s=o[r],u=a[r],void 0,"input"===(l=u.nodeName.toLowerCase())&&we.test(s.type)?u.checked=s.checked:"input"!==l&&"textarea"!==l||(u.defaultValue=s.defaultValue);if(t)if(n)for(o=o||Se(e),a=a||Se(c),r=0,i=o.length;r<i;r++)Fe(o[r],a[r]);else Fe(e,c);return 0<(a=Se(c,"script")).length&&Ee(a,!f&&Se(e,"script")),c},cleanData:function(e){for(var t,n,r,i=ce.event.special,o=0;void 0!==(n=e[o]);o++)if($(n)){if(t=n[_.expando]){if(t.events)for(r in t.events)i[r]?ce.event.remove(n,r):ce.removeEvent(n,r,t.handle);n[_.expando]=void 0}n[z.expando]&&(n[z.expando]=void 0)}}}),ce.fn.extend({detach:function(e){return Be(this,e,!0)},remove:function(e){return Be(this,e)},text:function(e){return M(this,function(e){return void 0===e?ce.text(this):this.empty().each(function(){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||(this.textContent=e)})},null,e,arguments.length)},append:function(){return $e(this,arguments,function(e){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||Re(this,e).appendChild(e)})},prepend:function(){return $e(this,arguments,function(e){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var t=Re(this,e);t.insertBefore(e,t.firstChild)}})},before:function(){return $e(this,arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this)})},after:function(){return $e(this,arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this.nextSibling)})},empty:function(){for(var e,t=0;null!=(e=this[t]);t++)1===e.nodeType&&(ce.cleanData(Se(e,!1)),e.textContent="");return this},clone:function(e,t){return e=null!=e&&e,t=null==t?e:t,this.map(function(){return ce.clone(this,e,t)})},html:function(e){return M(this,function(e){var t=this[0]||{},n=0,r=this.length;if(void 0===e&&1===t.nodeType)return t.innerHTML;if("string"==typeof e&&!Oe.test(e)&&!ke[(Te.exec(e)||["",""])[1].toLowerCase()]){e=ce.htmlPrefilter(e);try{for(;n<r;n++)1===(t=this[n]||{}).nodeType&&(ce.cleanData(Se(t,!1)),t.innerHTML=e);t=0}catch(e){}}t&&this.empty().append(e)},null,e,arguments.length)},replaceWith:function(){var n=[];return $e(this,arguments,function(e){var t=this.parentNode;ce.inArray(this,n)<0&&(ce.cleanData(Se(this)),t&&t.replaceChild(e,this))},n)}}),ce.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(e,a){ce.fn[e]=function(e){for(var t,n=[],r=ce(e),i=r.length-1,o=0;o<=i;o++)t=o===i?this:this.clone(!0),ce(r[o])[a](t),s.apply(n,t.get());return this.pushStack(n)}});var _e=new RegExp("^("+G+")(?!px)[a-z%]+$","i"),ze=/^--/,Xe=function(e){var t=e.ownerDocument.defaultView;return t&&t.opener||(t=ie),t.getComputedStyle(e)},Ue=function(e,t,n){var r,i,o={};for(i in t)o[i]=e.style[i],e.style[i]=t[i];for(i in r=n.call(e),t)e.style[i]=o[i];return r},Ve=new RegExp(Q.join("|"),"i");function Ge(e,t,n){var r,i,o,a,s=ze.test(t),u=e.style;return(n=n||Xe(e))&&(a=n.getPropertyValue(t)||n[t],s&&a&&(a=a.replace(ve,"$1")||void 0),""!==a||K(e)||(a=ce.style(e,t)),!le.pixelBoxStyles()&&_e.test(a)&&Ve.test(t)&&(r=u.width,i=u.minWidth,o=u.maxWidth,u.minWidth=u.maxWidth=u.width=a,a=n.width,u.width=r,u.minWidth=i,u.maxWidth=o)),void 0!==a?a+"":a}function Ye(e,t){return{get:function(){if(!e())return(this.get=t).apply(this,arguments);delete this.get}}}!function(){function e(){if(l){u.style.cssText="position:absolute;left:-11111px;width:60px;margin-top:1px;padding:0;border:0",l.style.cssText="position:relative;display:block;box-sizing:border-box;overflow:scroll;margin:auto;border:1px;padding:1px;width:60%;top:1%",J.appendChild(u).appendChild(l);var e=ie.getComputedStyle(l);n="1%"!==e.top,s=12===t(e.marginLeft),l.style.right="60%",o=36===t(e.right),r=36===t(e.width),l.style.position="absolute",i=12===t(l.offsetWidth/3),J.removeChild(u),l=null}}function t(e){return Math.round(parseFloat(e))}var n,r,i,o,a,s,u=C.createElement("div"),l=C.createElement("div");l.style&&(l.style.backgroundClip="content-box",l.cloneNode(!0).style.backgroundClip="",le.clearCloneStyle="content-box"===l.style.backgroundClip,ce.extend(le,{boxSizingReliable:function(){return e(),r},pixelBoxStyles:function(){return e(),o},pixelPosition:function(){return e(),n},reliableMarginLeft:function(){return e(),s},scrollboxSize:function(){return e(),i},reliableTrDimensions:function(){var e,t,n,r;return null==a&&(e=C.createElement("table"),t=C.createElement("tr"),n=C.createElement("div"),e.style.cssText="position:absolute;left:-11111px;border-collapse:separate",t.style.cssText="box-sizing:content-box;border:1px solid",t.style.height="1px",n.style.height="9px",n.style.display="block",J.appendChild(e).appendChild(t).appendChild(n),r=ie.getComputedStyle(t),a=parseInt(r.height,10)+parseInt(r.borderTopWidth,10)+parseInt(r.borderBottomWidth,10)===t.offsetHeight,J.removeChild(e)),a}}))}();var Qe=["Webkit","Moz","ms"],Je=C.createElement("div").style,Ke={};function Ze(e){var t=ce.cssProps[e]||Ke[e];return t||(e in Je?e:Ke[e]=function(e){var t=e[0].toUpperCase()+e.slice(1),n=Qe.length;while(n--)if((e=Qe[n]+t)in Je)return e}(e)||e)}var et=/^(none|table(?!-c[ea]).+)/,tt={position:"absolute",visibility:"hidden",display:"block"},nt={letterSpacing:"0",fontWeight:"400"};function rt(e,t,n){var r=Y.exec(t);return r?Math.max(0,r[2]-(n||0))+(r[3]||"px"):t}function it(e,t,n,r,i,o){var a="width"===t?1:0,s=0,u=0,l=0;if(n===(r?"border":"content"))return 0;for(;a<4;a+=2)"margin"===n&&(l+=ce.css(e,n+Q[a],!0,i)),r?("content"===n&&(u-=ce.css(e,"padding"+Q[a],!0,i)),"margin"!==n&&(u-=ce.css(e,"border"+Q[a]+"Width",!0,i))):(u+=ce.css(e,"padding"+Q[a],!0,i),"padding"!==n?u+=ce.css(e,"border"+Q[a]+"Width",!0,i):s+=ce.css(e,"border"+Q[a]+"Width",!0,i));return!r&&0<=o&&(u+=Math.max(0,Math.ceil(e["offset"+t[0].toUpperCase()+t.slice(1)]-o-u-s-.5))||0),u+l}function ot(e,t,n){var r=Xe(e),i=(!le.boxSizingReliable()||n)&&"border-box"===ce.css(e,"boxSizing",!1,r),o=i,a=Ge(e,t,r),s="offset"+t[0].toUpperCase()+t.slice(1);if(_e.test(a)){if(!n)return a;a="auto"}return(!le.boxSizingReliable()&&i||!le.reliableTrDimensions()&&fe(e,"tr")||"auto"===a||!parseFloat(a)&&"inline"===ce.css(e,"display",!1,r))&&e.getClientRects().length&&(i="border-box"===ce.css(e,"boxSizing",!1,r),(o=s in e)&&(a=e[s])),(a=parseFloat(a)||0)+it(e,t,n||(i?"border":"content"),o,r,a)+"px"}function at(e,t,n,r,i){return new at.prototype.init(e,t,n,r,i)}ce.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=Ge(e,"opacity");return""===n?"1":n}}}},cssNumber:{animationIterationCount:!0,aspectRatio:!0,borderImageSlice:!0,columnCount:!0,flexGrow:!0,flexShrink:!0,fontWeight:!0,gridArea:!0,gridColumn:!0,gridColumnEnd:!0,gridColumnStart:!0,gridRow:!0,gridRowEnd:!0,gridRowStart:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,scale:!0,widows:!0,zIndex:!0,zoom:!0,fillOpacity:!0,floodOpacity:!0,stopOpacity:!0,strokeMiterlimit:!0,strokeOpacity:!0},cssProps:{},style:function(e,t,n,r){if(e&&3!==e.nodeType&&8!==e.nodeType&&e.style){var i,o,a,s=F(t),u=ze.test(t),l=e.style;if(u||(t=Ze(s)),a=ce.cssHooks[t]||ce.cssHooks[s],void 0===n)return a&&"get"in a&&void 0!==(i=a.get(e,!1,r))?i:l[t];"string"===(o=typeof n)&&(i=Y.exec(n))&&i[1]&&(n=te(e,t,i),o="number"),null!=n&&n==n&&("number"!==o||u||(n+=i&&i[3]||(ce.cssNumber[s]?"":"px")),le.clearCloneStyle||""!==n||0!==t.indexOf("background")||(l[t]="inherit"),a&&"set"in a&&void 0===(n=a.set(e,n,r))||(u?l.setProperty(t,n):l[t]=n))}},css:function(e,t,n,r){var i,o,a,s=F(t);return ze.test(t)||(t=Ze(s)),(a=ce.cssHooks[t]||ce.cssHooks[s])&&"get"in a&&(i=a.get(e,!0,n)),void 0===i&&(i=Ge(e,t,r)),"normal"===i&&t in nt&&(i=nt[t]),""===n||n?(o=parseFloat(i),!0===n||isFinite(o)?o||0:i):i}}),ce.each(["height","width"],function(e,u){ce.cssHooks[u]={get:function(e,t,n){if(t)return!et.test(ce.css(e,"display"))||e.getClientRects().length&&e.getBoundingClientRect().width?ot(e,u,n):Ue(e,tt,function(){return ot(e,u,n)})},set:function(e,t,n){var r,i=Xe(e),o=!le.scrollboxSize()&&"absolute"===i.position,a=(o||n)&&"border-box"===ce.css(e,"boxSizing",!1,i),s=n?it(e,u,n,a,i):0;return a&&o&&(s-=Math.ceil(e["offset"+u[0].toUpperCase()+u.slice(1)]-parseFloat(i[u])-it(e,u,"border",!1,i)-.5)),s&&(r=Y.exec(t))&&"px"!==(r[3]||"px")&&(e.style[u]=t,t=ce.css(e,u)),rt(0,t,s)}}}),ce.cssHooks.marginLeft=Ye(le.reliableMarginLeft,function(e,t){if(t)return(parseFloat(Ge(e,"marginLeft"))||e.getBoundingClientRect().left-Ue(e,{marginLeft:0},function(){return e.getBoundingClientRect().left}))+"px"}),ce.each({margin:"",padding:"",border:"Width"},function(i,o){ce.cssHooks[i+o]={expand:function(e){for(var t=0,n={},r="string"==typeof e?e.split(" "):[e];t<4;t++)n[i+Q[t]+o]=r[t]||r[t-2]||r[0];return n}},"margin"!==i&&(ce.cssHooks[i+o].set=rt)}),ce.fn.extend({css:function(e,t){return M(this,function(e,t,n){var r,i,o={},a=0;if(Array.isArray(t)){for(r=Xe(e),i=t.length;a<i;a++)o[t[a]]=ce.css(e,t[a],!1,r);return o}return void 0!==n?ce.style(e,t,n):ce.css(e,t)},e,t,1<arguments.length)}}),((ce.Tween=at).prototype={constructor:at,init:function(e,t,n,r,i,o){this.elem=e,this.prop=n,this.easing=i||ce.easing._default,this.options=t,this.start=this.now=this.cur(),this.end=r,this.unit=o||(ce.cssNumber[n]?"":"px")},cur:function(){var e=at.propHooks[this.prop];return e&&e.get?e.get(this):at.propHooks._default.get(this)},run:function(e){var t,n=at.propHooks[this.prop];return this.options.duration?this.pos=t=ce.easing[this.easing](e,this.options.duration*e,0,1,this.options.duration):this.pos=t=e,this.now=(this.end-this.start)*t+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),n&&n.set?n.set(this):at.propHooks._default.set(this),this}}).init.prototype=at.prototype,(at.propHooks={_default:{get:function(e){var t;return 1!==e.elem.nodeType||null!=e.elem[e.prop]&&null==e.elem.style[e.prop]?e.elem[e.prop]:(t=ce.css(e.elem,e.prop,""))&&"auto"!==t?t:0},set:function(e){ce.fx.step[e.prop]?ce.fx.step[e.prop](e):1!==e.elem.nodeType||!ce.cssHooks[e.prop]&&null==e.elem.style[Ze(e.prop)]?e.elem[e.prop]=e.now:ce.style(e.elem,e.prop,e.now+e.unit)}}}).scrollTop=at.propHooks.scrollLeft={set:function(e){e.elem.nodeType&&e.elem.parentNode&&(e.elem[e.prop]=e.now)}},ce.easing={linear:function(e){return e},swing:function(e){return.5-Math.cos(e*Math.PI)/2},_default:"swing"},ce.fx=at.prototype.init,ce.fx.step={};var st,ut,lt,ct,ft=/^(?:toggle|show|hide)$/,pt=/queueHooks$/;function dt(){ut&&(!1===C.hidden&&ie.requestAnimationFrame?ie.requestAnimationFrame(dt):ie.setTimeout(dt,ce.fx.interval),ce.fx.tick())}function ht(){return ie.setTimeout(function(){st=void 0}),st=Date.now()}function gt(e,t){var n,r=0,i={height:e};for(t=t?1:0;r<4;r+=2-t)i["margin"+(n=Q[r])]=i["padding"+n]=e;return t&&(i.opacity=i.width=e),i}function vt(e,t,n){for(var r,i=(yt.tweeners[t]||[]).concat(yt.tweeners["*"]),o=0,a=i.length;o<a;o++)if(r=i[o].call(n,t,e))return r}function yt(o,e,t){var n,a,r=0,i=yt.prefilters.length,s=ce.Deferred().always(function(){delete u.elem}),u=function(){if(a)return!1;for(var e=st||ht(),t=Math.max(0,l.startTime+l.duration-e),n=1-(t/l.duration||0),r=0,i=l.tweens.length;r<i;r++)l.tweens[r].run(n);return s.notifyWith(o,[l,n,t]),n<1&&i?t:(i||s.notifyWith(o,[l,1,0]),s.resolveWith(o,[l]),!1)},l=s.promise({elem:o,props:ce.extend({},e),opts:ce.extend(!0,{specialEasing:{},easing:ce.easing._default},t),originalProperties:e,originalOptions:t,startTime:st||ht(),duration:t.duration,tweens:[],createTween:function(e,t){var n=ce.Tween(o,l.opts,e,t,l.opts.specialEasing[e]||l.opts.easing);return l.tweens.push(n),n},stop:function(e){var t=0,n=e?l.tweens.length:0;if(a)return this;for(a=!0;t<n;t++)l.tweens[t].run(1);return e?(s.notifyWith(o,[l,1,0]),s.resolveWith(o,[l,e])):s.rejectWith(o,[l,e]),this}}),c=l.props;for(!function(e,t){var n,r,i,o,a;for(n in e)if(i=t[r=F(n)],o=e[n],Array.isArray(o)&&(i=o[1],o=e[n]=o[0]),n!==r&&(e[r]=o,delete e[n]),(a=ce.cssHooks[r])&&"expand"in a)for(n in o=a.expand(o),delete e[r],o)n in e||(e[n]=o[n],t[n]=i);else t[r]=i}(c,l.opts.specialEasing);r<i;r++)if(n=yt.prefilters[r].call(l,o,c,l.opts))return v(n.stop)&&(ce._queueHooks(l.elem,l.opts.queue).stop=n.stop.bind(n)),n;return ce.map(c,vt,l),v(l.opts.start)&&l.opts.start.call(o,l),l.progress(l.opts.progress).done(l.opts.done,l.opts.complete).fail(l.opts.fail).always(l.opts.always),ce.fx.timer(ce.extend(u,{elem:o,anim:l,queue:l.opts.queue})),l}ce.Animation=ce.extend(yt,{tweeners:{"*":[function(e,t){var n=this.createTween(e,t);return te(n.elem,e,Y.exec(t),n),n}]},tweener:function(e,t){v(e)?(t=e,e=["*"]):e=e.match(D);for(var n,r=0,i=e.length;r<i;r++)n=e[r],yt.tweeners[n]=yt.tweeners[n]||[],yt.tweeners[n].unshift(t)},prefilters:[function(e,t,n){var r,i,o,a,s,u,l,c,f="width"in t||"height"in t,p=this,d={},h=e.style,g=e.nodeType&&ee(e),v=_.get(e,"fxshow");for(r in n.queue||(null==(a=ce._queueHooks(e,"fx")).unqueued&&(a.unqueued=0,s=a.empty.fire,a.empty.fire=function(){a.unqueued||s()}),a.unqueued++,p.always(function(){p.always(function(){a.unqueued--,ce.queue(e,"fx").length||a.empty.fire()})})),t)if(i=t[r],ft.test(i)){if(delete t[r],o=o||"toggle"===i,i===(g?"hide":"show")){if("show"!==i||!v||void 0===v[r])continue;g=!0}d[r]=v&&v[r]||ce.style(e,r)}if((u=!ce.isEmptyObject(t))||!ce.isEmptyObject(d))for(r in f&&1===e.nodeType&&(n.overflow=[h.overflow,h.overflowX,h.overflowY],null==(l=v&&v.display)&&(l=_.get(e,"display")),"none"===(c=ce.css(e,"display"))&&(l?c=l:(re([e],!0),l=e.style.display||l,c=ce.css(e,"display"),re([e]))),("inline"===c||"inline-block"===c&&null!=l)&&"none"===ce.css(e,"float")&&(u||(p.done(function(){h.display=l}),null==l&&(c=h.display,l="none"===c?"":c)),h.display="inline-block")),n.overflow&&(h.overflow="hidden",p.always(function(){h.overflow=n.overflow[0],h.overflowX=n.overflow[1],h.overflowY=n.overflow[2]})),u=!1,d)u||(v?"hidden"in v&&(g=v.hidden):v=_.access(e,"fxshow",{display:l}),o&&(v.hidden=!g),g&&re([e],!0),p.done(function(){for(r in g||re([e]),_.remove(e,"fxshow"),d)ce.style(e,r,d[r])})),u=vt(g?v[r]:0,r,p),r in v||(v[r]=u.start,g&&(u.end=u.start,u.start=0))}],prefilter:function(e,t){t?yt.prefilters.unshift(e):yt.prefilters.push(e)}}),ce.speed=function(e,t,n){var r=e&&"object"==typeof e?ce.extend({},e):{complete:n||!n&&t||v(e)&&e,duration:e,easing:n&&t||t&&!v(t)&&t};return ce.fx.off?r.duration=0:"number"!=typeof r.duration&&(r.duration in ce.fx.speeds?r.duration=ce.fx.speeds[r.duration]:r.duration=ce.fx.speeds._default),null!=r.queue&&!0!==r.queue||(r.queue="fx"),r.old=r.complete,r.complete=function(){v(r.old)&&r.old.call(this),r.queue&&ce.dequeue(this,r.queue)},r},ce.fn.extend({fadeTo:function(e,t,n,r){return this.filter(ee).css("opacity",0).show().end().animate({opacity:t},e,n,r)},animate:function(t,e,n,r){var i=ce.isEmptyObject(t),o=ce.speed(e,n,r),a=function(){var e=yt(this,ce.extend({},t),o);(i||_.get(this,"finish"))&&e.stop(!0)};return a.finish=a,i||!1===o.queue?this.each(a):this.queue(o.queue,a)},stop:function(i,e,o){var a=function(e){var t=e.stop;delete e.stop,t(o)};return"string"!=typeof i&&(o=e,e=i,i=void 0),e&&this.queue(i||"fx",[]),this.each(function(){var e=!0,t=null!=i&&i+"queueHooks",n=ce.timers,r=_.get(this);if(t)r[t]&&r[t].stop&&a(r[t]);else for(t in r)r[t]&&r[t].stop&&pt.test(t)&&a(r[t]);for(t=n.length;t--;)n[t].elem!==this||null!=i&&n[t].queue!==i||(n[t].anim.stop(o),e=!1,n.splice(t,1));!e&&o||ce.dequeue(this,i)})},finish:function(a){return!1!==a&&(a=a||"fx"),this.each(function(){var e,t=_.get(this),n=t[a+"queue"],r=t[a+"queueHooks"],i=ce.timers,o=n?n.length:0;for(t.finish=!0,ce.queue(this,a,[]),r&&r.stop&&r.stop.call(this,!0),e=i.length;e--;)i[e].elem===this&&i[e].queue===a&&(i[e].anim.stop(!0),i.splice(e,1));for(e=0;e<o;e++)n[e]&&n[e].finish&&n[e].finish.call(this);delete t.finish})}}),ce.each(["toggle","show","hide"],function(e,r){var i=ce.fn[r];ce.fn[r]=function(e,t,n){return null==e||"boolean"==typeof e?i.apply(this,arguments):this.animate(gt(r,!0),e,t,n)}}),ce.each({slideDown:gt("show"),slideUp:gt("hide"),slideToggle:gt("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(e,r){ce.fn[e]=function(e,t,n){return this.animate(r,e,t,n)}}),ce.timers=[],ce.fx.tick=function(){var e,t=0,n=ce.timers;for(st=Date.now();t<n.length;t++)(e=n[t])()||n[t]!==e||n.splice(t--,1);n.length||ce.fx.stop(),st=void 0},ce.fx.timer=function(e){ce.timers.push(e),ce.fx.start()},ce.fx.interval=13,ce.fx.start=function(){ut||(ut=!0,dt())},ce.fx.stop=function(){ut=null},ce.fx.speeds={slow:600,fast:200,_default:400},ce.fn.delay=function(r,e){return r=ce.fx&&ce.fx.speeds[r]||r,e=e||"fx",this.queue(e,function(e,t){var n=ie.setTimeout(e,r);t.stop=function(){ie.clearTimeout(n)}})},lt=C.createElement("input"),ct=C.createElement("select").appendChild(C.createElement("option")),lt.type="checkbox",le.checkOn=""!==lt.value,le.optSelected=ct.selected,(lt=C.createElement("input")).value="t",lt.type="radio",le.radioValue="t"===lt.value;var mt,xt=ce.expr.attrHandle;ce.fn.extend({attr:function(e,t){return M(this,ce.attr,e,t,1<arguments.length)},removeAttr:function(e){return this.each(function(){ce.removeAttr(this,e)})}}),ce.extend({attr:function(e,t,n){var r,i,o=e.nodeType;if(3!==o&&8!==o&&2!==o)return"undefined"==typeof e.getAttribute?ce.prop(e,t,n):(1===o&&ce.isXMLDoc(e)||(i=ce.attrHooks[t.toLowerCase()]||(ce.expr.match.bool.test(t)?mt:void 0)),void 0!==n?null===n?void ce.removeAttr(e,t):i&&"set"in i&&void 0!==(r=i.set(e,n,t))?r:(e.setAttribute(t,n+""),n):i&&"get"in i&&null!==(r=i.get(e,t))?r:null==(r=ce.find.attr(e,t))?void 0:r)},attrHooks:{type:{set:function(e,t){if(!le.radioValue&&"radio"===t&&fe(e,"input")){var n=e.value;return e.setAttribute("type",t),n&&(e.value=n),t}}}},removeAttr:function(e,t){var n,r=0,i=t&&t.match(D);if(i&&1===e.nodeType)while(n=i[r++])e.removeAttribute(n)}}),mt={set:function(e,t,n){return!1===t?ce.removeAttr(e,n):e.setAttribute(n,n),n}},ce.each(ce.expr.match.bool.source.match(/\w+/g),function(e,t){var a=xt[t]||ce.find.attr;xt[t]=function(e,t,n){var r,i,o=t.toLowerCase();return n||(i=xt[o],xt[o]=r,r=null!=a(e,t,n)?o:null,xt[o]=i),r}});var bt=/^(?:input|select|textarea|button)$/i,wt=/^(?:a|area)$/i;function Tt(e){return(e.match(D)||[]).join(" ")}function Ct(e){return e.getAttribute&&e.getAttribute("class")||""}function kt(e){return Array.isArray(e)?e:"string"==typeof e&&e.match(D)||[]}ce.fn.extend({prop:function(e,t){return M(this,ce.prop,e,t,1<arguments.length)},removeProp:function(e){return this.each(function(){delete this[ce.propFix[e]||e]})}}),ce.extend({prop:function(e,t,n){var r,i,o=e.nodeType;if(3!==o&&8!==o&&2!==o)return 1===o&&ce.isXMLDoc(e)||(t=ce.propFix[t]||t,i=ce.propHooks[t]),void 0!==n?i&&"set"in i&&void 0!==(r=i.set(e,n,t))?r:e[t]=n:i&&"get"in i&&null!==(r=i.get(e,t))?r:e[t]},propHooks:{tabIndex:{get:function(e){var t=ce.find.attr(e,"tabindex");return t?parseInt(t,10):bt.test(e.nodeName)||wt.test(e.nodeName)&&e.href?0:-1}}},propFix:{"for":"htmlFor","class":"className"}}),le.optSelected||(ce.propHooks.selected={get:function(e){var t=e.parentNode;return t&&t.parentNode&&t.parentNode.selectedIndex,null},set:function(e){var t=e.parentNode;t&&(t.selectedIndex,t.parentNode&&t.parentNode.selectedIndex)}}),ce.each(["tabIndex","readOnly","maxLength","cellSpacing","cellPadding","rowSpan","colSpan","useMap","frameBorder","contentEditable"],function(){ce.propFix[this.toLowerCase()]=this}),ce.fn.extend({addClass:function(t){var e,n,r,i,o,a;return v(t)?this.each(function(e){ce(this).addClass(t.call(this,e,Ct(this)))}):(e=kt(t)).length?this.each(function(){if(r=Ct(this),n=1===this.nodeType&&" "+Tt(r)+" "){for(o=0;o<e.length;o++)i=e[o],n.indexOf(" "+i+" ")<0&&(n+=i+" ");a=Tt(n),r!==a&&this.setAttribute("class",a)}}):this},removeClass:function(t){var e,n,r,i,o,a;return v(t)?this.each(function(e){ce(this).removeClass(t.call(this,e,Ct(this)))}):arguments.length?(e=kt(t)).length?this.each(function(){if(r=Ct(this),n=1===this.nodeType&&" "+Tt(r)+" "){for(o=0;o<e.length;o++){i=e[o];while(-1<n.indexOf(" "+i+" "))n=n.replace(" "+i+" "," ")}a=Tt(n),r!==a&&this.setAttribute("class",a)}}):this:this.attr("class","")},toggleClass:function(t,n){var e,r,i,o,a=typeof t,s="string"===a||Array.isArray(t);return v(t)?this.each(function(e){ce(this).toggleClass(t.call(this,e,Ct(this),n),n)}):"boolean"==typeof n&&s?n?this.addClass(t):this.removeClass(t):(e=kt(t),this.each(function(){if(s)for(o=ce(this),i=0;i<e.length;i++)r=e[i],o.hasClass(r)?o.removeClass(r):o.addClass(r);else void 0!==t&&"boolean"!==a||((r=Ct(this))&&_.set(this,"__className__",r),this.setAttribute&&this.setAttribute("class",r||!1===t?"":_.get(this,"__className__")||""))}))},hasClass:function(e){var t,n,r=0;t=" "+e+" ";while(n=this[r++])if(1===n.nodeType&&-1<(" "+Tt(Ct(n))+" ").indexOf(t))return!0;return!1}});var St=/\r/g;ce.fn.extend({val:function(n){var r,e,i,t=this[0];return arguments.length?(i=v(n),this.each(function(e){var t;1===this.nodeType&&(null==(t=i?n.call(this,e,ce(this).val()):n)?t="":"number"==typeof t?t+="":Array.isArray(t)&&(t=ce.map(t,function(e){return null==e?"":e+""})),(r=ce.valHooks[this.type]||ce.valHooks[this.nodeName.toLowerCase()])&&"set"in r&&void 0!==r.set(this,t,"value")||(this.value=t))})):t?(r=ce.valHooks[t.type]||ce.valHooks[t.nodeName.toLowerCase()])&&"get"in r&&void 0!==(e=r.get(t,"value"))?e:"string"==typeof(e=t.value)?e.replace(St,""):null==e?"":e:void 0}}),ce.extend({valHooks:{option:{get:function(e){var t=ce.find.attr(e,"value");return null!=t?t:Tt(ce.text(e))}},select:{get:function(e){var t,n,r,i=e.options,o=e.selectedIndex,a="select-one"===e.type,s=a?null:[],u=a?o+1:i.length;for(r=o<0?u:a?o:0;r<u;r++)if(((n=i[r]).selected||r===o)&&!n.disabled&&(!n.parentNode.disabled||!fe(n.parentNode,"optgroup"))){if(t=ce(n).val(),a)return t;s.push(t)}return s},set:function(e,t){var n,r,i=e.options,o=ce.makeArray(t),a=i.length;while(a--)((r=i[a]).selected=-1<ce.inArray(ce.valHooks.option.get(r),o))&&(n=!0);return n||(e.selectedIndex=-1),o}}}}),ce.each(["radio","checkbox"],function(){ce.valHooks[this]={set:function(e,t){if(Array.isArray(t))return e.checked=-1<ce.inArray(ce(e).val(),t)}},le.checkOn||(ce.valHooks[this].get=function(e){return null===e.getAttribute("value")?"on":e.value})});var Et=ie.location,jt={guid:Date.now()},At=/\?/;ce.parseXML=function(e){var t,n;if(!e||"string"!=typeof e)return null;try{t=(new ie.DOMParser).parseFromString(e,"text/xml")}catch(e){}return n=t&&t.getElementsByTagName("parsererror")[0],t&&!n||ce.error("Invalid XML: "+(n?ce.map(n.childNodes,function(e){return e.textContent}).join("\n"):e)),t};var Dt=/^(?:focusinfocus|focusoutblur)$/,Nt=function(e){e.stopPropagation()};ce.extend(ce.event,{trigger:function(e,t,n,r){var i,o,a,s,u,l,c,f,p=[n||C],d=ue.call(e,"type")?e.type:e,h=ue.call(e,"namespace")?e.namespace.split("."):[];if(o=f=a=n=n||C,3!==n.nodeType&&8!==n.nodeType&&!Dt.test(d+ce.event.triggered)&&(-1<d.indexOf(".")&&(d=(h=d.split(".")).shift(),h.sort()),u=d.indexOf(":")<0&&"on"+d,(e=e[ce.expando]?e:new ce.Event(d,"object"==typeof e&&e)).isTrigger=r?2:3,e.namespace=h.join("."),e.rnamespace=e.namespace?new RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,e.result=void 0,e.target||(e.target=n),t=null==t?[e]:ce.makeArray(t,[e]),c=ce.event.special[d]||{},r||!c.trigger||!1!==c.trigger.apply(n,t))){if(!r&&!c.noBubble&&!y(n)){for(s=c.delegateType||d,Dt.test(s+d)||(o=o.parentNode);o;o=o.parentNode)p.push(o),a=o;a===(n.ownerDocument||C)&&p.push(a.defaultView||a.parentWindow||ie)}i=0;while((o=p[i++])&&!e.isPropagationStopped())f=o,e.type=1<i?s:c.bindType||d,(l=(_.get(o,"events")||Object.create(null))[e.type]&&_.get(o,"handle"))&&l.apply(o,t),(l=u&&o[u])&&l.apply&&$(o)&&(e.result=l.apply(o,t),!1===e.result&&e.preventDefault());return e.type=d,r||e.isDefaultPrevented()||c._default&&!1!==c._default.apply(p.pop(),t)||!$(n)||u&&v(n[d])&&!y(n)&&((a=n[u])&&(n[u]=null),ce.event.triggered=d,e.isPropagationStopped()&&f.addEventListener(d,Nt),n[d](),e.isPropagationStopped()&&f.removeEventListener(d,Nt),ce.event.triggered=void 0,a&&(n[u]=a)),e.result}},simulate:function(e,t,n){var r=ce.extend(new ce.Event,n,{type:e,isSimulated:!0});ce.event.trigger(r,null,t)}}),ce.fn.extend({trigger:function(e,t){return this.each(function(){ce.event.trigger(e,t,this)})},triggerHandler:function(e,t){var n=this[0];if(n)return ce.event.trigger(e,t,n,!0)}});var qt=/\[\]$/,Lt=/\r?\n/g,Ht=/^(?:submit|button|image|reset|file)$/i,Ot=/^(?:input|select|textarea|keygen)/i;function Pt(n,e,r,i){var t;if(Array.isArray(e))ce.each(e,function(e,t){r||qt.test(n)?i(n,t):Pt(n+"["+("object"==typeof t&&null!=t?e:"")+"]",t,r,i)});else if(r||"object"!==x(e))i(n,e);else for(t in e)Pt(n+"["+t+"]",e[t],r,i)}ce.param=function(e,t){var n,r=[],i=function(e,t){var n=v(t)?t():t;r[r.length]=encodeURIComponent(e)+"="+encodeURIComponent(null==n?"":n)};if(null==e)return"";if(Array.isArray(e)||e.jquery&&!ce.isPlainObject(e))ce.each(e,function(){i(this.name,this.value)});else for(n in e)Pt(n,e[n],t,i);return r.join("&")},ce.fn.extend({serialize:function(){return ce.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var e=ce.prop(this,"elements");return e?ce.makeArray(e):this}).filter(function(){var e=this.type;return this.name&&!ce(this).is(":disabled")&&Ot.test(this.nodeName)&&!Ht.test(e)&&(this.checked||!we.test(e))}).map(function(e,t){var n=ce(this).val();return null==n?null:Array.isArray(n)?ce.map(n,function(e){return{name:t.name,value:e.replace(Lt,"\r\n")}}):{name:t.name,value:n.replace(Lt,"\r\n")}}).get()}});var Mt=/%20/g,Rt=/#.*$/,It=/([?&])_=[^&]*/,Wt=/^(.*?):[ \t]*([^\r\n]*)$/gm,Ft=/^(?:GET|HEAD)$/,$t=/^\/\//,Bt={},_t={},zt="*/".concat("*"),Xt=C.createElement("a");function Ut(o){return function(e,t){"string"!=typeof e&&(t=e,e="*");var n,r=0,i=e.toLowerCase().match(D)||[];if(v(t))while(n=i[r++])"+"===n[0]?(n=n.slice(1)||"*",(o[n]=o[n]||[]).unshift(t)):(o[n]=o[n]||[]).push(t)}}function Vt(t,i,o,a){var s={},u=t===_t;function l(e){var r;return s[e]=!0,ce.each(t[e]||[],function(e,t){var n=t(i,o,a);return"string"!=typeof n||u||s[n]?u?!(r=n):void 0:(i.dataTypes.unshift(n),l(n),!1)}),r}return l(i.dataTypes[0])||!s["*"]&&l("*")}function Gt(e,t){var n,r,i=ce.ajaxSettings.flatOptions||{};for(n in t)void 0!==t[n]&&((i[n]?e:r||(r={}))[n]=t[n]);return r&&ce.extend(!0,e,r),e}Xt.href=Et.href,ce.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:Et.href,type:"GET",isLocal:/^(?:about|app|app-storage|.+-extension|file|res|widget):$/.test(Et.protocol),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":zt,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/\bxml\b/,html:/\bhtml/,json:/\bjson\b/},responseFields:{xml:"responseXML",text:"responseText",json:"responseJSON"},converters:{"* text":String,"text html":!0,"text json":JSON.parse,"text xml":ce.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(e,t){return t?Gt(Gt(e,ce.ajaxSettings),t):Gt(ce.ajaxSettings,e)},ajaxPrefilter:Ut(Bt),ajaxTransport:Ut(_t),ajax:function(e,t){"object"==typeof e&&(t=e,e=void 0),t=t||{};var c,f,p,n,d,r,h,g,i,o,v=ce.ajaxSetup({},t),y=v.context||v,m=v.context&&(y.nodeType||y.jquery)?ce(y):ce.event,x=ce.Deferred(),b=ce.Callbacks("once memory"),w=v.statusCode||{},a={},s={},u="canceled",T={readyState:0,getResponseHeader:function(e){var t;if(h){if(!n){n={};while(t=Wt.exec(p))n[t[1].toLowerCase()+" "]=(n[t[1].toLowerCase()+" "]||[]).concat(t[2])}t=n[e.toLowerCase()+" "]}return null==t?null:t.join(", ")},getAllResponseHeaders:function(){return h?p:null},setRequestHeader:function(e,t){return null==h&&(e=s[e.toLowerCase()]=s[e.toLowerCase()]||e,a[e]=t),this},overrideMimeType:function(e){return null==h&&(v.mimeType=e),this},statusCode:function(e){var t;if(e)if(h)T.always(e[T.status]);else for(t in e)w[t]=[w[t],e[t]];return this},abort:function(e){var t=e||u;return c&&c.abort(t),l(0,t),this}};if(x.promise(T),v.url=((e||v.url||Et.href)+"").replace($t,Et.protocol+"//"),v.type=t.method||t.type||v.method||v.type,v.dataTypes=(v.dataType||"*").toLowerCase().match(D)||[""],null==v.crossDomain){r=C.createElement("a");try{r.href=v.url,r.href=r.href,v.crossDomain=Xt.protocol+"//"+Xt.host!=r.protocol+"//"+r.host}catch(e){v.crossDomain=!0}}if(v.data&&v.processData&&"string"!=typeof v.data&&(v.data=ce.param(v.data,v.traditional)),Vt(Bt,v,t,T),h)return T;for(i in(g=ce.event&&v.global)&&0==ce.active++&&ce.event.trigger("ajaxStart"),v.type=v.type.toUpperCase(),v.hasContent=!Ft.test(v.type),f=v.url.replace(Rt,""),v.hasContent?v.data&&v.processData&&0===(v.contentType||"").indexOf("application/x-www-form-urlencoded")&&(v.data=v.data.replace(Mt,"+")):(o=v.url.slice(f.length),v.data&&(v.processData||"string"==typeof v.data)&&(f+=(At.test(f)?"&":"?")+v.data,delete v.data),!1===v.cache&&(f=f.replace(It,"$1"),o=(At.test(f)?"&":"?")+"_="+jt.guid+++o),v.url=f+o),v.ifModified&&(ce.lastModified[f]&&T.setRequestHeader("If-Modified-Since",ce.lastModified[f]),ce.etag[f]&&T.setRequestHeader("If-None-Match",ce.etag[f])),(v.data&&v.hasContent&&!1!==v.contentType||t.contentType)&&T.setRequestHeader("Content-Type",v.contentType),T.setRequestHeader("Accept",v.dataTypes[0]&&v.accepts[v.dataTypes[0]]?v.accepts[v.dataTypes[0]]+("*"!==v.dataTypes[0]?", "+zt+"; q=0.01":""):v.accepts["*"]),v.headers)T.setRequestHeader(i,v.headers[i]);if(v.beforeSend&&(!1===v.beforeSend.call(y,T,v)||h))return T.abort();if(u="abort",b.add(v.complete),T.done(v.success),T.fail(v.error),c=Vt(_t,v,t,T)){if(T.readyState=1,g&&m.trigger("ajaxSend",[T,v]),h)return T;v.async&&0<v.timeout&&(d=ie.setTimeout(function(){T.abort("timeout")},v.timeout));try{h=!1,c.send(a,l)}catch(e){if(h)throw e;l(-1,e)}}else l(-1,"No Transport");function l(e,t,n,r){var i,o,a,s,u,l=t;h||(h=!0,d&&ie.clearTimeout(d),c=void 0,p=r||"",T.readyState=0<e?4:0,i=200<=e&&e<300||304===e,n&&(s=function(e,t,n){var r,i,o,a,s=e.contents,u=e.dataTypes;while("*"===u[0])u.shift(),void 0===r&&(r=e.mimeType||t.getResponseHeader("Content-Type"));if(r)for(i in s)if(s[i]&&s[i].test(r)){u.unshift(i);break}if(u[0]in n)o=u[0];else{for(i in n){if(!u[0]||e.converters[i+" "+u[0]]){o=i;break}a||(a=i)}o=o||a}if(o)return o!==u[0]&&u.unshift(o),n[o]}(v,T,n)),!i&&-1<ce.inArray("script",v.dataTypes)&&ce.inArray("json",v.dataTypes)<0&&(v.converters["text script"]=function(){}),s=function(e,t,n,r){var i,o,a,s,u,l={},c=e.dataTypes.slice();if(c[1])for(a in e.converters)l[a.toLowerCase()]=e.converters[a];o=c.shift();while(o)if(e.responseFields[o]&&(n[e.responseFields[o]]=t),!u&&r&&e.dataFilter&&(t=e.dataFilter(t,e.dataType)),u=o,o=c.shift())if("*"===o)o=u;else if("*"!==u&&u!==o){if(!(a=l[u+" "+o]||l["* "+o]))for(i in l)if((s=i.split(" "))[1]===o&&(a=l[u+" "+s[0]]||l["* "+s[0]])){!0===a?a=l[i]:!0!==l[i]&&(o=s[0],c.unshift(s[1]));break}if(!0!==a)if(a&&e["throws"])t=a(t);else try{t=a(t)}catch(e){return{state:"parsererror",error:a?e:"No conversion from "+u+" to "+o}}}return{state:"success",data:t}}(v,s,T,i),i?(v.ifModified&&((u=T.getResponseHeader("Last-Modified"))&&(ce.lastModified[f]=u),(u=T.getResponseHeader("etag"))&&(ce.etag[f]=u)),204===e||"HEAD"===v.type?l="nocontent":304===e?l="notmodified":(l=s.state,o=s.data,i=!(a=s.error))):(a=l,!e&&l||(l="error",e<0&&(e=0))),T.status=e,T.statusText=(t||l)+"",i?x.resolveWith(y,[o,l,T]):x.rejectWith(y,[T,l,a]),T.statusCode(w),w=void 0,g&&m.trigger(i?"ajaxSuccess":"ajaxError",[T,v,i?o:a]),b.fireWith(y,[T,l]),g&&(m.trigger("ajaxComplete",[T,v]),--ce.active||ce.event.trigger("ajaxStop")))}return T},getJSON:function(e,t,n){return ce.get(e,t,n,"json")},getScript:function(e,t){return ce.get(e,void 0,t,"script")}}),ce.each(["get","post"],function(e,i){ce[i]=function(e,t,n,r){return v(t)&&(r=r||n,n=t,t=void 0),ce.ajax(ce.extend({url:e,type:i,dataType:r,data:t,success:n},ce.isPlainObject(e)&&e))}}),ce.ajaxPrefilter(function(e){var t;for(t in e.headers)"content-type"===t.toLowerCase()&&(e.contentType=e.headers[t]||"")}),ce._evalUrl=function(e,t,n){return ce.ajax({url:e,type:"GET",dataType:"script",cache:!0,async:!1,global:!1,converters:{"text script":function(){}},dataFilter:function(e){ce.globalEval(e,t,n)}})},ce.fn.extend({wrapAll:function(e){var t;return this[0]&&(v(e)&&(e=e.call(this[0])),t=ce(e,this[0].ownerDocument).eq(0).clone(!0),this[0].parentNode&&t.insertBefore(this[0]),t.map(function(){var e=this;while(e.firstElementChild)e=e.firstElementChild;return e}).append(this)),this},wrapInner:function(n){return v(n)?this.each(function(e){ce(this).wrapInner(n.call(this,e))}):this.each(function(){var e=ce(this),t=e.contents();t.length?t.wrapAll(n):e.append(n)})},wrap:function(t){var n=v(t);return this.each(function(e){ce(this).wrapAll(n?t.call(this,e):t)})},unwrap:function(e){return this.parent(e).not("body").each(function(){ce(this).replaceWith(this.childNodes)}),this}}),ce.expr.pseudos.hidden=function(e){return!ce.expr.pseudos.visible(e)},ce.expr.pseudos.visible=function(e){return!!(e.offsetWidth||e.offsetHeight||e.getClientRects().length)},ce.ajaxSettings.xhr=function(){try{return new ie.XMLHttpRequest}catch(e){}};var Yt={0:200,1223:204},Qt=ce.ajaxSettings.xhr();le.cors=!!Qt&&"withCredentials"in Qt,le.ajax=Qt=!!Qt,ce.ajaxTransport(function(i){var o,a;if(le.cors||Qt&&!i.crossDomain)return{send:function(e,t){var n,r=i.xhr();if(r.open(i.type,i.url,i.async,i.username,i.password),i.xhrFields)for(n in i.xhrFields)r[n]=i.xhrFields[n];for(n in i.mimeType&&r.overrideMimeType&&r.overrideMimeType(i.mimeType),i.crossDomain||e["X-Requested-With"]||(e["X-Requested-With"]="XMLHttpRequest"),e)r.setRequestHeader(n,e[n]);o=function(e){return function(){o&&(o=a=r.onload=r.onerror=r.onabort=r.ontimeout=r.onreadystatechange=null,"abort"===e?r.abort():"error"===e?"number"!=typeof r.status?t(0,"error"):t(r.status,r.statusText):t(Yt[r.status]||r.status,r.statusText,"text"!==(r.responseType||"text")||"string"!=typeof r.responseText?{binary:r.response}:{text:r.responseText},r.getAllResponseHeaders()))}},r.onload=o(),a=r.onerror=r.ontimeout=o("error"),void 0!==r.onabort?r.onabort=a:r.onreadystatechange=function(){4===r.readyState&&ie.setTimeout(function(){o&&a()})},o=o("abort");try{r.send(i.hasContent&&i.data||null)}catch(e){if(o)throw e}},abort:function(){o&&o()}}}),ce.ajaxPrefilter(function(e){e.crossDomain&&(e.contents.script=!1)}),ce.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/\b(?:java|ecma)script\b/},converters:{"text script":function(e){return ce.globalEval(e),e}}}),ce.ajaxPrefilter("script",function(e){void 0===e.cache&&(e.cache=!1),e.crossDomain&&(e.type="GET")}),ce.ajaxTransport("script",function(n){var r,i;if(n.crossDomain||n.scriptAttrs)return{send:function(e,t){r=ce("<script>").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),C.head.appendChild(r[0])},abort:function(){i&&i()}}});var Jt,Kt=[],Zt=/(=)\?(?=&|$)|\?\?/;ce.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Kt.pop()||ce.expando+"_"+jt.guid++;return this[e]=!0,e}}),ce.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Zt.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Zt.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=v(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Zt,"$1"+r):!1!==e.jsonp&&(e.url+=(At.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||ce.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=ie[r],ie[r]=function(){o=arguments},n.always(function(){void 0===i?ce(ie).removeProp(r):ie[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,Kt.push(r)),o&&v(i)&&i(o[0]),o=i=void 0}),"script"}),le.createHTMLDocument=((Jt=C.implementation.createHTMLDocument("").body).innerHTML="<form></form><form></form>",2===Jt.childNodes.length),ce.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(le.createHTMLDocument?((r=(t=C.implementation.createHTMLDocument("")).createElement("base")).href=C.location.href,t.head.appendChild(r)):t=C),o=!n&&[],(i=w.exec(e))?[t.createElement(i[1])]:(i=Ae([e],t,o),o&&o.length&&ce(o).remove(),ce.merge([],i.childNodes)));var r,i,o},ce.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1<s&&(r=Tt(e.slice(s)),e=e.slice(0,s)),v(t)?(n=t,t=void 0):t&&"object"==typeof t&&(i="POST"),0<a.length&&ce.ajax({url:e,type:i||"GET",dataType:"html",data:t}).done(function(e){o=arguments,a.html(r?ce("<div>").append(ce.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},ce.expr.pseudos.animated=function(t){return ce.grep(ce.timers,function(e){return t===e.elem}).length},ce.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=ce.css(e,"position"),c=ce(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=ce.css(e,"top"),u=ce.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),v(t)&&(t=t.call(e,n,ce.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},ce.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){ce.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===ce.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===ce.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=ce(e).offset()).top+=ce.css(e,"borderTopWidth",!0),i.left+=ce.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-ce.css(r,"marginTop",!0),left:t.left-i.left-ce.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===ce.css(e,"position"))e=e.offsetParent;return e||J})}}),ce.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;ce.fn[t]=function(e){return M(this,function(e,t,n){var r;if(y(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),ce.each(["top","left"],function(e,n){ce.cssHooks[n]=Ye(le.pixelPosition,function(e,t){if(t)return t=Ge(e,n),_e.test(t)?ce(e).position()[n]+"px":t})}),ce.each({Height:"height",Width:"width"},function(a,s){ce.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){ce.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return M(this,function(e,t,n){var r;return y(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?ce.css(e,t,i):ce.style(e,t,n,i)},s,n?e:void 0,n)}})}),ce.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){ce.fn[t]=function(e){return this.on(t,e)}}),ce.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},hover:function(e,t){return this.on("mouseenter",e).on("mouseleave",t||e)}}),ce.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){ce.fn[n]=function(e,t){return 0<arguments.length?this.on(n,null,e,t):this.trigger(n)}});var en=/^[\s\uFEFF\xA0]+|([^\s\uFEFF\xA0])[\s\uFEFF\xA0]+$/g;ce.proxy=function(e,t){var n,r,i;if("string"==typeof t&&(n=e[t],t=e,e=n),v(e))return r=ae.call(arguments,2),(i=function(){return e.apply(t||this,r.concat(ae.call(arguments)))}).guid=e.guid=e.guid||ce.guid++,i},ce.holdReady=function(e){e?ce.readyWait++:ce.ready(!0)},ce.isArray=Array.isArray,ce.parseJSON=JSON.parse,ce.nodeName=fe,ce.isFunction=v,ce.isWindow=y,ce.camelCase=F,ce.type=x,ce.now=Date.now,ce.isNumeric=function(e){var t=ce.type(e);return("number"===t||"string"===t)&&!isNaN(e-parseFloat(e))},ce.trim=function(e){return null==e?"":(e+"").replace(en,"$1")},"function"==typeof define&&define.amd&&define("jquery",[],function(){return ce});var tn=ie.jQuery,nn=ie.$;return ce.noConflict=function(e){return ie.$===ce&&(ie.$=nn),e&&ie.jQuery===ce&&(ie.jQuery=tn),ce},"undefined"==typeof e&&(ie.jQuery=ie.$=ce),ce});
diff --git a/lib/toaster/toastergui/static/js/jquery-3.7.1.min.map b/lib/toaster/toastergui/static/js/jquery-3.7.1.min.map
new file mode 100644
index 000000000..db38af589
--- /dev/null
+++ b/lib/toaster/toastergui/static/js/jquery-3.7.1.min.map
@@ -0,0 +1 @@
+{"version":3,"sources":["jquery-3.7.1.js"],"names":["global","factory","module","exports","document","w","Error","window","this","noGlobal","arr","getProto","Object","getPrototypeOf","slice","flat","array","call","concat","apply","push","indexOf","class2type","toString","hasOwn","hasOwnProperty","fnToString","ObjectFunctionString","support","isFunction","obj","nodeType","item","isWindow","preservedScriptAttributes","type","src","nonce","noModule","DOMEval","code","node","doc","i","val","script","createElement","text","getAttribute","setAttribute","head","appendChild","parentNode","removeChild","toType","version","rhtmlSuffix","jQuery","selector","context","fn","init","isArrayLike","length","nodeName","elem","name","toLowerCase","prototype","jquery","constructor","toArray","get","num","pushStack","elems","ret","merge","prevObject","each","callback","map","arguments","first","eq","last","even","grep","_elem","odd","len","j","end","sort","splice","extend","options","copy","copyIsArray","clone","target","deep","isPlainObject","Array","isArray","undefined","expando","Math","random","replace","isReady","error","msg","noop","proto","Ctor","isEmptyObject","globalEval","textContent","documentElement","nodeValue","makeArray","results","inArray","isXMLDoc","namespace","namespaceURI","docElem","ownerDocument","test","second","invert","matches","callbackExpect","arg","value","guid","Symbol","iterator","split","_i","pop","whitespace","rtrimCSS","RegExp","contains","a","b","bup","compareDocumentPosition","rcssescape","fcssescape","ch","asCodePoint","charCodeAt","escapeSelector","sel","preferredDoc","pushNative","Expr","outermostContext","sortInput","hasDuplicate","documentIsHTML","rbuggyQSA","dirruns","done","classCache","createCache","tokenCache","compilerCache","nonnativeSelectorCache","sortOrder","booleans","identifier","attributes","pseudos","rwhitespace","rcomma","rleadingCombinator","rdescend","rpseudo","ridentifier","matchExpr","ID","CLASS","TAG","ATTR","PSEUDO","CHILD","bool","needsContext","rinputs","rheader","rquickExpr","rsibling","runescape","funescape","escape","nonHex","high","String","fromCharCode","unloadHandler","setDocument","inDisabledFieldset","addCombinator","disabled","dir","next","childNodes","e","els","find","seed","m","nid","match","groups","newSelector","newContext","exec","getElementById","id","getElementsByTagName","getElementsByClassName","testContext","scope","tokenize","toSelector","join","querySelectorAll","qsaError","removeAttribute","select","keys","cache","key","cacheLength","shift","markFunction","assert","el","createInputPseudo","createButtonPseudo","createDisabledPseudo","isDisabled","createPositionalPseudo","argument","matchIndexes","subWindow","webkitMatchesSelector","msMatchesSelector","defaultView","top","addEventListener","getById","getElementsByName","disconnectedMatch","cssHas","querySelector","filter","attrId","getAttributeNode","tag","className","input","innerHTML","compare","sortDetached","expr","elements","matchesSelector","attr","attrHandle","uniqueSort","duplicates","sortStable","createPseudo","relative",">"," ","+","~","preFilter","excess","unquoted","nodeNameSelector","expectedNodeName","pattern","operator","check","result","what","_argument","simple","forward","ofType","_context","xml","outerCache","nodeIndex","start","parent","useCache","diff","firstChild","lastChild","pseudo","args","setFilters","idx","matched","not","matcher","compile","unmatched","has","lang","elemLang","hash","location","root","focus","activeElement","err","safeActiveElement","hasFocus","href","tabIndex","enabled","checked","selected","selectedIndex","empty","nextSibling","header","button","_matchIndexes","lt","gt","nth","radio","checkbox","file","password","image","submit","reset","parseOnly","tokens","soFar","preFilters","cached","combinator","base","skip","checkNonElements","doneName","oldCache","newCache","elementMatcher","matchers","condense","newUnmatched","mapped","setMatcher","postFilter","postFinder","postSelector","temp","matcherOut","preMap","postMap","preexisting","contexts","multipleContexts","matcherIn","matcherFromTokens","checkContext","leadingRelative","implicitRelative","matchContext","matchAnyContext","elementMatchers","setMatchers","bySet","byElement","superMatcher","outermost","matchedCount","setMatched","contextBackup","dirrunsUnique","token","compiled","filters","unique","getText","isXML","selectors","until","truncate","is","siblings","n","rneedsContext","rsingleTag","winnow","qualifier","self","rootjQuery","parseHTML","ready","rparentsprev","guaranteedUnique","children","contents","prev","sibling","cur","targets","l","closest","index","prevAll","add","addBack","parents","parentsUntil","nextAll","nextUntil","prevUntil","contentDocument","content","reverse","rnothtmlwhite","Identity","v","Thrower","ex","adoptValue","resolve","reject","noValue","method","promise","fail","then","Callbacks","object","_","flag","firing","memory","fired","locked","list","queue","firingIndex","fire","once","stopOnFalse","remove","disable","lock","fireWith","Deferred","func","tuples","state","always","deferred","catch","pipe","fns","newDefer","tuple","returned","progress","notify","onFulfilled","onRejected","onProgress","maxDepth","depth","handler","special","that","mightThrow","TypeError","notifyWith","resolveWith","process","exceptionHook","rejectWith","getErrorHook","getStackHook","setTimeout","stateString","when","singleValue","remaining","resolveContexts","resolveValues","primary","updateFunc","rerrorNames","asyncError","console","warn","message","stack","readyException","readyList","completed","removeEventListener","readyWait","wait","readyState","doScroll","access","chainable","emptyGet","raw","bulk","_key","rmsPrefix","rdashAlpha","fcamelCase","_all","letter","toUpperCase","camelCase","string","acceptData","owner","Data","uid","defineProperty","configurable","set","data","prop","hasData","dataPriv","dataUser","rbrace","rmultiDash","dataAttr","JSON","parse","removeData","_data","_removeData","attrs","dequeue","startLength","hooks","_queueHooks","unshift","stop","setter","clearQueue","tmp","count","defer","pnum","source","rcssNum","cssExpand","isAttached","composed","getRootNode","isHiddenWithinTree","style","display","css","adjustCSS","valueParts","tween","adjusted","scale","maxIterations","currentValue","initial","unit","cssNumber","initialInUnit","defaultDisplayMap","showHide","show","values","body","hide","toggle","div","rcheckableType","rtagName","rscriptType","createDocumentFragment","checkClone","cloneNode","noCloneChecked","defaultValue","option","wrapMap","thead","col","tr","td","_default","getAll","setGlobalEval","refElements","tbody","tfoot","colgroup","caption","th","optgroup","rhtml","buildFragment","scripts","selection","ignored","wrap","attached","fragment","nodes","htmlPrefilter","createTextNode","rtypenamespace","returnTrue","returnFalse","on","types","one","origFn","event","off","leverageNative","isSetup","saved","isTrigger","delegateType","stopPropagation","stopImmediatePropagation","preventDefault","trigger","isImmediatePropagationStopped","handleObjIn","eventHandle","events","t","handleObj","handlers","namespaces","origType","elemData","create","handle","triggered","dispatch","bindType","delegateCount","setup","mappedTypes","origCount","teardown","removeEvent","nativeEvent","handlerQueue","fix","delegateTarget","preDispatch","isPropagationStopped","currentTarget","rnamespace","postDispatch","matchedHandlers","matchedSelectors","addProp","hook","Event","enumerable","originalEvent","writable","load","noBubble","click","beforeunload","returnValue","props","isDefaultPrevented","defaultPrevented","relatedTarget","timeStamp","Date","now","isSimulated","altKey","bubbles","cancelable","changedTouches","ctrlKey","detail","eventPhase","metaKey","pageX","pageY","shiftKey","view","char","charCode","keyCode","buttons","clientX","clientY","offsetX","offsetY","pointerId","pointerType","screenX","screenY","targetTouches","toElement","touches","which","blur","focusMappedHandler","documentMode","simulate","attaches","dataHolder","mouseenter","mouseleave","pointerenter","pointerleave","orig","related","rnoInnerhtml","rchecked","rcleanScript","manipulationTarget","disableScript","restoreScript","cloneCopyEvent","dest","udataOld","udataCur","domManip","collection","hasScripts","iNoClone","valueIsFunction","html","_evalUrl","keepData","cleanData","dataAndEvents","deepDataAndEvents","srcElements","destElements","inPage","detach","append","prepend","insertBefore","before","after","replaceWith","replaceChild","appendTo","prependTo","insertAfter","replaceAll","original","insert","rnumnonpx","rcustomProp","getStyles","opener","getComputedStyle","swap","old","rboxStyle","curCSS","computed","width","minWidth","maxWidth","isCustomProp","getPropertyValue","pixelBoxStyles","addGetHookIf","conditionFn","hookFn","computeStyleTests","container","cssText","divStyle","pixelPositionVal","reliableMarginLeftVal","roundPixelMeasures","marginLeft","right","pixelBoxStylesVal","boxSizingReliableVal","position","scrollboxSizeVal","offsetWidth","measure","round","parseFloat","reliableTrDimensionsVal","backgroundClip","clearCloneStyle","boxSizingReliable","pixelPosition","reliableMarginLeft","scrollboxSize","reliableTrDimensions","table","trChild","trStyle","height","parseInt","borderTopWidth","borderBottomWidth","offsetHeight","cssPrefixes","emptyStyle","vendorProps","finalPropName","final","cssProps","capName","vendorPropName","rdisplayswap","cssShow","visibility","cssNormalTransform","letterSpacing","fontWeight","setPositiveNumber","subtract","max","boxModelAdjustment","dimension","box","isBorderBox","styles","computedVal","extra","delta","marginDelta","ceil","getWidthOrHeight","valueIsBorderBox","offsetProp","getClientRects","Tween","easing","cssHooks","opacity","animationIterationCount","aspectRatio","borderImageSlice","columnCount","flexGrow","flexShrink","gridArea","gridColumn","gridColumnEnd","gridColumnStart","gridRow","gridRowEnd","gridRowStart","lineHeight","order","orphans","widows","zIndex","zoom","fillOpacity","floodOpacity","stopOpacity","strokeMiterlimit","strokeOpacity","origName","setProperty","isFinite","getBoundingClientRect","scrollboxSizeBuggy","left","margin","padding","border","prefix","suffix","expand","expanded","parts","propHooks","run","percent","eased","duration","pos","step","fx","scrollTop","scrollLeft","linear","p","swing","cos","PI","fxNow","inProgress","opt","rfxtypes","rrun","schedule","hidden","requestAnimationFrame","interval","tick","createFxNow","genFx","includeWidth","createTween","animation","Animation","tweeners","properties","stopped","prefilters","currentTime","startTime","tweens","opts","specialEasing","originalProperties","originalOptions","gotoEnd","propFilter","bind","complete","timer","anim","*","tweener","oldfire","propTween","restoreDisplay","isBox","dataShow","unqueued","overflow","overflowX","overflowY","prefilter","speed","speeds","fadeTo","to","animate","optall","doAnimation","finish","stopQueue","timers","cssFn","slideDown","slideUp","slideToggle","fadeIn","fadeOut","fadeToggle","slow","fast","delay","time","timeout","clearTimeout","checkOn","optSelected","radioValue","boolHook","removeAttr","nType","attrHooks","attrNames","getter","lowercaseName","rfocusable","rclickable","stripAndCollapse","getClass","classesToArray","removeProp","propFix","tabindex","for","class","addClass","classNames","curValue","finalValue","removeClass","toggleClass","stateVal","isValidValue","hasClass","rreturn","valHooks","optionSet","rquery","parseXML","parserErrorElem","DOMParser","parseFromString","rfocusMorph","stopPropagationCallback","onlyHandlers","bubbleType","ontype","lastElement","eventPath","parentWindow","triggerHandler","rbracket","rCRLF","rsubmitterTypes","rsubmittable","buildParams","traditional","param","s","valueOrFunction","encodeURIComponent","serialize","serializeArray","r20","rhash","rantiCache","rheaders","rnoContent","rprotocol","transports","allTypes","originAnchor","addToPrefiltersOrTransports","structure","dataTypeExpression","dataType","dataTypes","inspectPrefiltersOrTransports","jqXHR","inspected","seekingTransport","inspect","prefilterOrFactory","dataTypeOrTransport","ajaxExtend","flatOptions","ajaxSettings","active","lastModified","etag","url","isLocal","protocol","processData","async","contentType","accepts","json","responseFields","converters","* text","text html","text json","text xml","ajaxSetup","settings","ajaxPrefilter","ajaxTransport","ajax","transport","cacheURL","responseHeadersString","responseHeaders","timeoutTimer","urlAnchor","fireGlobals","uncached","callbackContext","globalEventContext","completeDeferred","statusCode","requestHeaders","requestHeadersNames","strAbort","getResponseHeader","getAllResponseHeaders","setRequestHeader","overrideMimeType","mimeType","status","abort","statusText","finalText","crossDomain","host","hasContent","ifModified","headers","beforeSend","success","send","nativeStatusText","responses","isSuccess","response","modified","ct","finalDataType","firstDataType","ajaxHandleResponses","conv2","current","conv","dataFilter","throws","ajaxConvert","getJSON","getScript","text script","wrapAll","firstElementChild","wrapInner","htmlIsFunction","unwrap","visible","xhr","XMLHttpRequest","xhrSuccessStatus","0","1223","xhrSupported","cors","errorCallback","open","username","xhrFields","onload","onerror","onabort","ontimeout","onreadystatechange","responseType","responseText","binary","scriptAttrs","charset","scriptCharset","evt","oldCallbacks","rjsonp","jsonp","jsonpCallback","originalSettings","callbackName","overwritten","responseContainer","jsonProp","createHTMLDocument","implementation","keepScripts","parsed","params","animated","offset","setOffset","curPosition","curLeft","curCSSTop","curTop","curOffset","curCSSLeft","curElem","using","rect","win","pageYOffset","pageXOffset","offsetParent","parentOffset","scrollTo","Height","Width","","defaultExtra","funcName","unbind","delegate","undelegate","hover","fnOver","fnOut","rtrim","proxy","holdReady","hold","parseJSON","isNumeric","isNaN","trim","define","amd","_jQuery","_$","$","noConflict"],"mappings":";CAUA,SAAYA,EAAQC,GAEnB,aAEuB,iBAAXC,QAAiD,iBAAnBA,OAAOC,QAShDD,OAAOC,QAAUH,EAAOI,SACvBH,EAASD,GAAQ,GACjB,SAAUK,GACT,IAAMA,EAAED,SACP,MAAM,IAAIE,MAAO,4CAElB,OAAOL,EAASI,IAGlBJ,EAASD,GAtBX,CA0BuB,oBAAXO,OAAyBA,OAASC,KAAM,SAAUD,GAAQE,GAMtE,aAEA,IAAIC,GAAM,GAENC,EAAWC,OAAOC,eAElBC,GAAQJ,GAAII,MAEZC,EAAOL,GAAIK,KAAO,SAAUC,GAC/B,OAAON,GAAIK,KAAKE,KAAMD,IACnB,SAAUA,GACb,OAAON,GAAIQ,OAAOC,MAAO,GAAIH,IAI1BI,EAAOV,GAAIU,KAEXC,GAAUX,GAAIW,QAEdC,EAAa,GAEbC,EAAWD,EAAWC,SAEtBC,GAASF,EAAWG,eAEpBC,EAAaF,GAAOD,SAEpBI,EAAuBD,EAAWT,KAAML,QAExCgB,GAAU,GAEVC,EAAa,SAAqBC,GASpC,MAAsB,mBAARA,GAA8C,iBAAjBA,EAAIC,UAC1B,mBAAbD,EAAIE,MAIVC,EAAW,SAAmBH,GAChC,OAAc,MAAPA,GAAeA,IAAQA,EAAIvB,QAIhCH,EAAWG,GAAOH,SAIjB8B,EAA4B,CAC/BC,MAAM,EACNC,KAAK,EACLC,OAAO,EACPC,UAAU,GAGX,SAASC,EAASC,EAAMC,EAAMC,GAG7B,IAAIC,EAAGC,EACNC,GAHDH,EAAMA,GAAOtC,GAGC0C,cAAe,UAG7B,GADAD,EAAOE,KAAOP,EACTC,EACJ,IAAME,KAAKT,GAYVU,EAAMH,EAAME,IAAOF,EAAKO,cAAgBP,EAAKO,aAAcL,KAE1DE,EAAOI,aAAcN,EAAGC,GAI3BF,EAAIQ,KAAKC,YAAaN,GAASO,WAAWC,YAAaR,GAIzD,SAASS,EAAQxB,GAChB,OAAY,MAAPA,EACGA,EAAM,GAIQ,iBAARA,GAAmC,mBAARA,EACxCR,EAAYC,EAASN,KAAMa,KAAW,gBAC/BA,EAQT,IAAIyB,EAAU,QAEbC,EAAc,SAGdC,GAAS,SAAUC,EAAUC,GAI5B,OAAO,IAAIF,GAAOG,GAAGC,KAAMH,EAAUC,IAmYvC,SAASG,EAAahC,GAMrB,IAAIiC,IAAWjC,GAAO,WAAYA,GAAOA,EAAIiC,OAC5C5B,EAAOmB,EAAQxB,GAEhB,OAAKD,EAAYC,KAASG,EAAUH,KAIpB,UAATK,GAA+B,IAAX4B,GACR,iBAAXA,GAAgC,EAATA,GAAgBA,EAAS,KAAOjC,GAIhE,SAASkC,GAAUC,EAAMC,GAExB,OAAOD,EAAKD,UAAYC,EAAKD,SAASG,gBAAkBD,EAAKC,cApZ9DV,GAAOG,GAAKH,GAAOW,UAAY,CAG9BC,OAAQd,EAERe,YAAab,GAGbM,OAAQ,EAERQ,QAAS,WACR,OAAOzD,GAAMG,KAAMT,OAKpBgE,IAAK,SAAUC,GAGd,OAAY,MAAPA,EACG3D,GAAMG,KAAMT,MAIbiE,EAAM,EAAIjE,KAAMiE,EAAMjE,KAAKuD,QAAWvD,KAAMiE,IAKpDC,UAAW,SAAUC,GAGpB,IAAIC,EAAMnB,GAAOoB,MAAOrE,KAAK8D,cAAeK,GAM5C,OAHAC,EAAIE,WAAatE,KAGVoE,GAIRG,KAAM,SAAUC,GACf,OAAOvB,GAAOsB,KAAMvE,KAAMwE,IAG3BC,IAAK,SAAUD,GACd,OAAOxE,KAAKkE,UAAWjB,GAAOwB,IAAKzE,KAAM,SAAUyD,EAAMtB,GACxD,OAAOqC,EAAS/D,KAAMgD,EAAMtB,EAAGsB,OAIjCnD,MAAO,WACN,OAAON,KAAKkE,UAAW5D,GAAMK,MAAOX,KAAM0E,aAG3CC,MAAO,WACN,OAAO3E,KAAK4E,GAAI,IAGjBC,KAAM,WACL,OAAO7E,KAAK4E,IAAK,IAGlBE,KAAM,WACL,OAAO9E,KAAKkE,UAAWjB,GAAO8B,KAAM/E,KAAM,SAAUgF,EAAO7C,GAC1D,OAASA,EAAI,GAAM,MAIrB8C,IAAK,WACJ,OAAOjF,KAAKkE,UAAWjB,GAAO8B,KAAM/E,KAAM,SAAUgF,EAAO7C,GAC1D,OAAOA,EAAI,MAIbyC,GAAI,SAAUzC,GACb,IAAI+C,EAAMlF,KAAKuD,OACd4B,GAAKhD,GAAMA,EAAI,EAAI+C,EAAM,GAC1B,OAAOlF,KAAKkE,UAAgB,GAALiB,GAAUA,EAAID,EAAM,CAAElF,KAAMmF,IAAQ,KAG5DC,IAAK,WACJ,OAAOpF,KAAKsE,YAActE,KAAK8D,eAKhClD,KAAMA,EACNyE,KAAMnF,GAAImF,KACVC,OAAQpF,GAAIoF,QAGbrC,GAAOsC,OAAStC,GAAOG,GAAGmC,OAAS,WAClC,IAAIC,EAAS9B,EAAM9B,EAAK6D,EAAMC,EAAaC,EAC1CC,EAASlB,UAAW,IAAO,GAC3BvC,EAAI,EACJoB,EAASmB,UAAUnB,OACnBsC,GAAO,EAsBR,IAnBuB,kBAAXD,IACXC,EAAOD,EAGPA,EAASlB,UAAWvC,IAAO,GAC3BA,KAIsB,iBAAXyD,GAAwBvE,EAAYuE,KAC/CA,EAAS,IAILzD,IAAMoB,IACVqC,EAAS5F,KACTmC,KAGOA,EAAIoB,EAAQpB,IAGnB,GAAqC,OAA9BqD,EAAUd,UAAWvC,IAG3B,IAAMuB,KAAQ8B,EACbC,EAAOD,EAAS9B,GAIF,cAATA,GAAwBkC,IAAWH,IAKnCI,GAAQJ,IAAUxC,GAAO6C,cAAeL,KAC1CC,EAAcK,MAAMC,QAASP,MAC/B7D,EAAMgE,EAAQlC,GAIbiC,EADID,IAAgBK,MAAMC,QAASpE,GAC3B,GACI8D,GAAgBzC,GAAO6C,cAAelE,GAG1CA,EAFA,GAIT8D,GAAc,EAGdE,EAAQlC,GAAST,GAAOsC,OAAQM,EAAMF,EAAOF,SAGzBQ,IAATR,IACXG,EAAQlC,GAAS+B,IAOrB,OAAOG,GAGR3C,GAAOsC,OAAQ,CAGdW,QAAS,UAAanD,EAAUoD,KAAKC,UAAWC,QAAS,MAAO,IAGhEC,SAAS,EAETC,MAAO,SAAUC,GAChB,MAAM,IAAI1G,MAAO0G,IAGlBC,KAAM,aAENX,cAAe,SAAUxE,GACxB,IAAIoF,EAAOC,EAIX,SAAMrF,GAAgC,oBAAzBP,EAASN,KAAMa,QAI5BoF,EAAQvG,EAAUmB,KASK,mBADvBqF,EAAO3F,GAAOP,KAAMiG,EAAO,gBAAmBA,EAAM5C,cACf5C,EAAWT,KAAMkG,KAAWxF,IAGlEyF,cAAe,SAAUtF,GACxB,IAAIoC,EAEJ,IAAMA,KAAQpC,EACb,OAAO,EAER,OAAO,GAKRuF,WAAY,SAAU7E,EAAMwD,EAAStD,GACpCH,EAASC,EAAM,CAAEH,MAAO2D,GAAWA,EAAQ3D,OAASK,IAGrDqC,KAAM,SAAUjD,EAAKkD,GACpB,IAAIjB,EAAQpB,EAAI,EAEhB,GAAKmB,EAAahC,IAEjB,IADAiC,EAASjC,EAAIiC,OACLpB,EAAIoB,EAAQpB,IACnB,IAAgD,IAA3CqC,EAAS/D,KAAMa,EAAKa,GAAKA,EAAGb,EAAKa,IACrC,WAIF,IAAMA,KAAKb,EACV,IAAgD,IAA3CkD,EAAS/D,KAAMa,EAAKa,GAAKA,EAAGb,EAAKa,IACrC,MAKH,OAAOb,GAKRiB,KAAM,SAAUkB,GACf,IAAIxB,EACHmC,EAAM,GACNjC,EAAI,EACJZ,EAAWkC,EAAKlC,SAEjB,IAAMA,EAGL,MAAUU,EAAOwB,EAAMtB,KAGtBiC,GAAOnB,GAAOV,KAAMN,GAGtB,OAAkB,IAAbV,GAA+B,KAAbA,EACfkC,EAAKqD,YAEK,IAAbvF,EACGkC,EAAKsD,gBAAgBD,YAEX,IAAbvF,GAA+B,IAAbA,EACfkC,EAAKuD,UAKN5C,GAIR6C,UAAW,SAAU/G,EAAKgH,GACzB,IAAI9C,EAAM8C,GAAW,GAarB,OAXY,MAAPhH,IACCoD,EAAalD,OAAQF,IACzB+C,GAAOoB,MAAOD,EACE,iBAARlE,EACN,CAAEA,GAAQA,GAGZU,EAAKH,KAAM2D,EAAKlE,IAIXkE,GAGR+C,QAAS,SAAU1D,EAAMvD,EAAKiC,GAC7B,OAAc,MAAPjC,GAAe,EAAIW,GAAQJ,KAAMP,EAAKuD,EAAMtB,IAGpDiF,SAAU,SAAU3D,GACnB,IAAI4D,EAAY5D,GAAQA,EAAK6D,aAC5BC,EAAU9D,IAAUA,EAAK+D,eAAiB/D,GAAOsD,gBAIlD,OAAQ/D,EAAYyE,KAAMJ,GAAaE,GAAWA,EAAQ/D,UAAY,SAKvEa,MAAO,SAAUM,EAAO+C,GAKvB,IAJA,IAAIxC,GAAOwC,EAAOnE,OACjB4B,EAAI,EACJhD,EAAIwC,EAAMpB,OAEH4B,EAAID,EAAKC,IAChBR,EAAOxC,KAAQuF,EAAQvC,GAKxB,OAFAR,EAAMpB,OAASpB,EAERwC,GAGRI,KAAM,SAAUZ,EAAOK,EAAUmD,GAShC,IARA,IACCC,EAAU,GACVzF,EAAI,EACJoB,EAASY,EAAMZ,OACfsE,GAAkBF,EAIXxF,EAAIoB,EAAQpB,KACAqC,EAAUL,EAAOhC,GAAKA,KAChB0F,GACxBD,EAAQhH,KAAMuD,EAAOhC,IAIvB,OAAOyF,GAIRnD,IAAK,SAAUN,EAAOK,EAAUsD,GAC/B,IAAIvE,EAAQwE,EACX5F,EAAI,EACJiC,EAAM,GAGP,GAAKd,EAAaa,GAEjB,IADAZ,EAASY,EAAMZ,OACPpB,EAAIoB,EAAQpB,IAGL,OAFd4F,EAAQvD,EAAUL,EAAOhC,GAAKA,EAAG2F,KAGhC1D,EAAIxD,KAAMmH,QAMZ,IAAM5F,KAAKgC,EAGI,OAFd4D,EAAQvD,EAAUL,EAAOhC,GAAKA,EAAG2F,KAGhC1D,EAAIxD,KAAMmH,GAMb,OAAOxH,EAAM6D,IAId4D,KAAM,EAIN5G,QAASA,KAGa,mBAAX6G,SACXhF,GAAOG,GAAI6E,OAAOC,UAAahI,GAAK+H,OAAOC,WAI5CjF,GAAOsB,KAAM,uEAAuE4D,MAAO,KAC1F,SAAUC,EAAI1E,GACb5C,EAAY,WAAa4C,EAAO,KAAQA,EAAKC,gBA0B/C,IAAI0E,GAAMnI,GAAImI,IAGVhD,GAAOnF,GAAImF,KAGXC,GAASpF,GAAIoF,OAGbgD,GAAa,sBAGbC,GAAW,IAAIC,OAClB,IAAMF,GAAa,8BAAgCA,GAAa,KAChE,KAODrF,GAAOwF,SAAW,SAAUC,EAAGC,GAC9B,IAAIC,EAAMD,GAAKA,EAAE/F,WAEjB,OAAO8F,IAAME,MAAWA,GAAwB,IAAjBA,EAAIrH,YAIlCmH,EAAED,SACDC,EAAED,SAAUG,GACZF,EAAEG,yBAA8D,GAAnCH,EAAEG,wBAAyBD,MAS3D,IAAIE,EAAa,+CAEjB,SAASC,EAAYC,EAAIC,GACxB,OAAKA,EAGQ,OAAPD,EACG,SAIDA,EAAG1I,MAAO,GAAI,GAAM,KAAO0I,EAAGE,WAAYF,EAAGzF,OAAS,GAAIxC,SAAU,IAAO,IAI5E,KAAOiI,EAGf/F,GAAOkG,eAAiB,SAAUC,GACjC,OAASA,EAAM,IAAK/C,QAASyC,EAAYC,IAM1C,IAAIM,GAAezJ,EAClB0J,GAAa1I,GAEd,WAEA,IAAIuB,EACHoH,EACAC,EACAC,EACAC,EAIA9J,EACAmH,EACA4C,EACAC,EACAhC,EAPAhH,EAAO0I,GAUPpD,EAAUjD,GAAOiD,QACjB2D,EAAU,EACVC,EAAO,EACPC,EAAaC,IACbC,EAAaD,IACbE,EAAgBF,IAChBG,EAAyBH,IACzBI,EAAY,SAAU1B,EAAGC,GAIxB,OAHKD,IAAMC,IACVe,GAAe,GAET,GAGRW,EAAW,6HAMXC,EAAa,0BAA4BhC,GACxC,0CAGDiC,EAAa,MAAQjC,GAAa,KAAOgC,EAAa,OAAShC,GAG9D,gBAAkBA,GAGlB,2DAA6DgC,EAAa,OAC1EhC,GAAa,OAEdkC,EAAU,KAAOF,EAAa,wFAOAC,EAAa,eAO3CE,EAAc,IAAIjC,OAAQF,GAAa,IAAK,KAE5CoC,EAAS,IAAIlC,OAAQ,IAAMF,GAAa,KAAOA,GAAa,KAC5DqC,EAAqB,IAAInC,OAAQ,IAAMF,GAAa,WAAaA,GAAa,IAC7EA,GAAa,KACdsC,EAAW,IAAIpC,OAAQF,GAAa,MAEpCuC,EAAU,IAAIrC,OAAQgC,GACtBM,EAAc,IAAItC,OAAQ,IAAM8B,EAAa,KAE7CS,EAAY,CACXC,GAAI,IAAIxC,OAAQ,MAAQ8B,EAAa,KACrCW,MAAO,IAAIzC,OAAQ,QAAU8B,EAAa,KAC1CY,IAAK,IAAI1C,OAAQ,KAAO8B,EAAa,SACrCa,KAAM,IAAI3C,OAAQ,IAAM+B,GACxBa,OAAQ,IAAI5C,OAAQ,IAAMgC,GAC1Ba,MAAO,IAAI7C,OACV,yDACCF,GAAa,+BAAiCA,GAAa,cAC3DA,GAAa,aAAeA,GAAa,SAAU,KACrDgD,KAAM,IAAI9C,OAAQ,OAAS6B,EAAW,KAAM,KAI5CkB,aAAc,IAAI/C,OAAQ,IAAMF,GAC/B,mDAAqDA,GACrD,mBAAqBA,GAAa,mBAAoB,MAGxDkD,EAAU,sCACVC,EAAU,SAGVC,EAAa,mCAEbC,EAAW,OAIXC,EAAY,IAAIpD,OAAQ,uBAAyBF,GAChD,uBAAwB,KACzBuD,EAAY,SAAUC,EAAQC,GAC7B,IAAIC,EAAO,KAAOF,EAAOxL,MAAO,GAAM,MAEtC,OAAKyL,IAUEC,EAAO,EACbC,OAAOC,aAAcF,EAAO,OAC5BC,OAAOC,aAAcF,GAAQ,GAAK,MAAe,KAAPA,EAAe,SAO3DG,EAAgB,WACfC,KAGDC,EAAqBC,EACpB,SAAU7I,GACT,OAAyB,IAAlBA,EAAK8I,UAAqB/I,GAAUC,EAAM,aAElD,CAAE+I,IAAK,aAAcC,KAAM,WAa7B,IACC7L,EAAKD,MACFT,GAAMI,GAAMG,KAAM4I,GAAaqD,YACjCrD,GAAaqD,YAMdxM,GAAKmJ,GAAaqD,WAAWnJ,QAAShC,SACrC,MAAQoL,GACT/L,EAAO,CACND,MAAO,SAAUiF,EAAQgH,GACxBtD,GAAW3I,MAAOiF,EAAQtF,GAAMG,KAAMmM,KAEvCnM,KAAM,SAAUmF,GACf0D,GAAW3I,MAAOiF,EAAQtF,GAAMG,KAAMiE,UAAW,MAKpD,SAASmI,EAAM3J,EAAUC,EAAS+D,EAAS4F,GAC1C,IAAIC,EAAG5K,EAAGsB,EAAMuJ,EAAKC,EAAOC,EAAQC,EACnCC,EAAajK,GAAWA,EAAQqE,cAGhCjG,EAAW4B,EAAUA,EAAQ5B,SAAW,EAKzC,GAHA2F,EAAUA,GAAW,GAGI,iBAAbhE,IAA0BA,GACxB,IAAb3B,GAA+B,IAAbA,GAA+B,KAAbA,EAEpC,OAAO2F,EAIR,IAAM4F,IACLV,EAAajJ,GACbA,EAAUA,GAAWvD,EAEhB+J,GAAiB,CAIrB,GAAkB,KAAbpI,IAAqB0L,EAAQvB,EAAW2B,KAAMnK,IAGlD,GAAO6J,EAAIE,EAAO,IAGjB,GAAkB,IAAb1L,EAAiB,CACrB,KAAOkC,EAAON,EAAQmK,eAAgBP,IASrC,OAAO7F,EALP,GAAKzD,EAAK8J,KAAOR,EAEhB,OADAnM,EAAKH,KAAMyG,EAASzD,GACbyD,OAWT,GAAKkG,IAAgB3J,EAAO2J,EAAWE,eAAgBP,KACtDF,EAAKpE,SAAUtF,EAASM,IACxBA,EAAK8J,KAAOR,EAGZ,OADAnM,EAAKH,KAAMyG,EAASzD,GACbyD,MAKH,CAAA,GAAK+F,EAAO,GAElB,OADArM,EAAKD,MAAOuG,EAAS/D,EAAQqK,qBAAsBtK,IAC5CgE,EAGD,IAAO6F,EAAIE,EAAO,KAAS9J,EAAQsK,uBAEzC,OADA7M,EAAKD,MAAOuG,EAAS/D,EAAQsK,uBAAwBV,IAC9C7F,EAKT,KAAMiD,EAAwBjH,EAAW,MACrC0G,GAAcA,EAAUnC,KAAMvE,IAAe,CAYhD,GAVAiK,EAAcjK,EACdkK,EAAajK,EASK,IAAb5B,IACFqJ,EAASnD,KAAMvE,IAAcyH,EAAmBlD,KAAMvE,IAAe,EAGvEkK,EAAazB,EAASlE,KAAMvE,IAAcwK,EAAavK,EAAQP,aAC9DO,IAQkBA,GAAY/B,GAAQuM,SAG/BX,EAAM7J,EAAQX,aAAc,OAClCwK,EAAM/J,GAAOkG,eAAgB6D,GAE7B7J,EAAQV,aAAc,KAAQuK,EAAM9G,IAMtC/D,GADA+K,EAASU,EAAU1K,IACRK,OACX,MAAQpB,IACP+K,EAAQ/K,IAAQ6K,EAAM,IAAMA,EAAM,UAAa,IAC9Ca,EAAYX,EAAQ/K,IAEtBgL,EAAcD,EAAOY,KAAM,KAG5B,IAIC,OAHAlN,EAAKD,MAAOuG,EACXkG,EAAWW,iBAAkBZ,IAEvBjG,EACN,MAAQ8G,GACT7D,EAAwBjH,GAAU,GACjC,QACI8J,IAAQ9G,GACZ/C,EAAQ8K,gBAAiB,QAQ9B,OAAOC,GAAQhL,EAASmD,QAASkC,GAAU,MAAQpF,EAAS+D,EAAS4F,GAStE,SAAS9C,IACR,IAAImE,EAAO,GAaX,OAXA,SAASC,EAAOC,EAAKtG,GASpB,OALKoG,EAAKvN,KAAMyN,EAAM,KAAQ9E,EAAK+E,oBAG3BF,EAAOD,EAAKI,SAEXH,EAAOC,EAAM,KAAQtG,GAShC,SAASyG,EAAcpL,GAEtB,OADAA,EAAI8C,IAAY,EACT9C,EAOR,SAASqL,EAAQrL,GAChB,IAAIsL,EAAK9O,EAAS0C,cAAe,YAEjC,IACC,QAASc,EAAIsL,GACZ,MAAQ/B,GACT,OAAO,EACN,QAGI+B,EAAG9L,YACP8L,EAAG9L,WAAWC,YAAa6L,GAI5BA,EAAK,MAQP,SAASC,EAAmBhN,GAC3B,OAAO,SAAU8B,GAChB,OAAOD,GAAUC,EAAM,UAAaA,EAAK9B,OAASA,GAQpD,SAASiN,EAAoBjN,GAC5B,OAAO,SAAU8B,GAChB,OAASD,GAAUC,EAAM,UAAaD,GAAUC,EAAM,YACrDA,EAAK9B,OAASA,GAQjB,SAASkN,EAAsBtC,GAG9B,OAAO,SAAU9I,GAKhB,MAAK,SAAUA,EASTA,EAAKb,aAAgC,IAAlBa,EAAK8I,SAGvB,UAAW9I,EACV,UAAWA,EAAKb,WACba,EAAKb,WAAW2J,WAAaA,EAE7B9I,EAAK8I,WAAaA,EAMpB9I,EAAKqL,aAAevC,GAG1B9I,EAAKqL,cAAgBvC,GACpBF,EAAoB5I,KAAW8I,EAG3B9I,EAAK8I,WAAaA,EAKd,UAAW9I,GACfA,EAAK8I,WAAaA,GAY5B,SAASwC,EAAwB3L,GAChC,OAAOoL,EAAc,SAAUQ,GAE9B,OADAA,GAAYA,EACLR,EAAc,SAAU1B,EAAMlF,GACpC,IAAIzC,EACH8J,EAAe7L,EAAI,GAAI0J,EAAKvJ,OAAQyL,GACpC7M,EAAI8M,EAAa1L,OAGlB,MAAQpB,IACF2K,EAAQ3H,EAAI8J,EAAc9M,MAC9B2K,EAAM3H,KAASyC,EAASzC,GAAM2H,EAAM3H,SAYzC,SAASuI,EAAavK,GACrB,OAAOA,GAAmD,oBAAjCA,EAAQqK,sBAAwCrK,EAQ1E,SAASiJ,EAAanK,GACrB,IAAIiN,EACHhN,EAAMD,EAAOA,EAAKuF,eAAiBvF,EAAOoH,GAO3C,OAAKnH,GAAOtC,GAA6B,IAAjBsC,EAAIX,UAAmBW,EAAI6E,kBAMnDA,GADAnH,EAAWsC,GACgB6E,gBAC3B4C,GAAkB1G,GAAOmE,SAAUxH,GAInCgI,EAAUb,EAAgBa,SACzBb,EAAgBoI,uBAChBpI,EAAgBqI,kBAOZrI,EAAgBqI,mBAMpB/F,IAAgBzJ,IACdsP,EAAYtP,EAASyP,cAAiBH,EAAUI,MAAQJ,GAG1DA,EAAUK,iBAAkB,SAAUpD,GAOvC/K,GAAQoO,QAAUf,EAAQ,SAAUC,GAEnC,OADA3H,EAAgBpE,YAAa+L,GAAKnB,GAAKtK,GAAOiD,SACtCtG,EAAS6P,oBACf7P,EAAS6P,kBAAmBxM,GAAOiD,SAAU3C,SAMhDnC,GAAQsO,kBAAoBjB,EAAQ,SAAUC,GAC7C,OAAO9G,EAAQnH,KAAMiO,EAAI,OAK1BtN,GAAQuM,MAAQc,EAAQ,WACvB,OAAO7O,EAASmO,iBAAkB,YAYnC3M,GAAQuO,OAASlB,EAAQ,WACxB,IAEC,OADA7O,EAASgQ,cAAe,oBACjB,EACN,MAAQjD,GACT,OAAO,KAKJvL,GAAQoO,SACZjG,EAAKsG,OAAO7E,GAAK,SAAUuC,GAC1B,IAAIuC,EAASvC,EAAGlH,QAASuF,EAAWC,GACpC,OAAO,SAAUpI,GAChB,OAAOA,EAAKjB,aAAc,QAAWsN,IAGvCvG,EAAKsD,KAAK7B,GAAK,SAAUuC,EAAIpK,GAC5B,GAAuC,oBAA3BA,EAAQmK,gBAAkC3D,EAAiB,CACtE,IAAIlG,EAAON,EAAQmK,eAAgBC,GACnC,OAAO9J,EAAO,CAAEA,GAAS,OAI3B8F,EAAKsG,OAAO7E,GAAM,SAAUuC,GAC3B,IAAIuC,EAASvC,EAAGlH,QAASuF,EAAWC,GACpC,OAAO,SAAUpI,GAChB,IAAIxB,EAAwC,oBAA1BwB,EAAKsM,kBACtBtM,EAAKsM,iBAAkB,MACxB,OAAO9N,GAAQA,EAAK8F,QAAU+H,IAMhCvG,EAAKsD,KAAK7B,GAAK,SAAUuC,EAAIpK,GAC5B,GAAuC,oBAA3BA,EAAQmK,gBAAkC3D,EAAiB,CACtE,IAAI1H,EAAME,EAAGgC,EACZV,EAAON,EAAQmK,eAAgBC,GAEhC,GAAK9J,EAAO,CAIX,IADAxB,EAAOwB,EAAKsM,iBAAkB,QACjB9N,EAAK8F,QAAUwF,EAC3B,MAAO,CAAE9J,GAIVU,EAAQhB,EAAQsM,kBAAmBlC,GACnCpL,EAAI,EACJ,MAAUsB,EAAOU,EAAOhC,KAEvB,IADAF,EAAOwB,EAAKsM,iBAAkB,QACjB9N,EAAK8F,QAAUwF,EAC3B,MAAO,CAAE9J,GAKZ,MAAO,MAMV8F,EAAKsD,KAAK3B,IAAM,SAAU8E,EAAK7M,GAC9B,MAA6C,oBAAjCA,EAAQqK,qBACZrK,EAAQqK,qBAAsBwC,GAI9B7M,EAAQ4K,iBAAkBiC,IAKnCzG,EAAKsD,KAAK5B,MAAQ,SAAUgF,EAAW9M,GACtC,GAA+C,oBAAnCA,EAAQsK,wBAA0C9D,EAC7D,OAAOxG,EAAQsK,uBAAwBwC,IASzCrG,EAAY,GAIZ6E,EAAQ,SAAUC,GAEjB,IAAIwB,EAEJnJ,EAAgBpE,YAAa+L,GAAKyB,UACjC,UAAYjK,EAAU,iDACLA,EAAU,oEAKtBwI,EAAGX,iBAAkB,cAAexK,QACzCqG,EAAUhJ,KAAM,MAAQ0H,GAAa,aAAe+B,EAAW,KAI1DqE,EAAGX,iBAAkB,QAAU7H,EAAU,MAAO3C,QACrDqG,EAAUhJ,KAAM,MAMX8N,EAAGX,iBAAkB,KAAO7H,EAAU,MAAO3C,QAClDqG,EAAUhJ,KAAM,YAOX8N,EAAGX,iBAAkB,YAAaxK,QACvCqG,EAAUhJ,KAAM,aAKjBsP,EAAQtQ,EAAS0C,cAAe,UAC1BG,aAAc,OAAQ,UAC5BiM,EAAG/L,YAAauN,GAAQzN,aAAc,OAAQ,KAQ9CsE,EAAgBpE,YAAa+L,GAAKnC,UAAW,EACM,IAA9CmC,EAAGX,iBAAkB,aAAcxK,QACvCqG,EAAUhJ,KAAM,WAAY,cAQ7BsP,EAAQtQ,EAAS0C,cAAe,UAC1BG,aAAc,OAAQ,IAC5BiM,EAAG/L,YAAauN,GACVxB,EAAGX,iBAAkB,aAAcxK,QACxCqG,EAAUhJ,KAAM,MAAQ0H,GAAa,QAAUA,GAAa,KAC3DA,GAAa,kBAIVlH,GAAQuO,QAQb/F,EAAUhJ,KAAM,QAGjBgJ,EAAYA,EAAUrG,QAAU,IAAIiF,OAAQoB,EAAUkE,KAAM,MAM5D1D,EAAY,SAAU1B,EAAGC,GAGxB,GAAKD,IAAMC,EAEV,OADAe,GAAe,EACR,EAIR,IAAI0G,GAAW1H,EAAEG,yBAA2BF,EAAEE,wBAC9C,OAAKuH,IAgBU,GAPfA,GAAY1H,EAAElB,eAAiBkB,KAASC,EAAEnB,eAAiBmB,GAC1DD,EAAEG,wBAAyBF,GAG3B,KAIGvH,GAAQiP,cAAgB1H,EAAEE,wBAAyBH,KAAQ0H,EAOzD1H,IAAM9I,GAAY8I,EAAElB,eAAiB6B,IACzCwD,EAAKpE,SAAUY,GAAcX,IACrB,EAOJC,IAAM/I,GAAY+I,EAAEnB,eAAiB6B,IACzCwD,EAAKpE,SAAUY,GAAcV,GACtB,EAIDc,EACJ5I,GAAQJ,KAAMgJ,EAAWf,GAAM7H,GAAQJ,KAAMgJ,EAAWd,GAC1D,EAGe,EAAVyH,GAAe,EAAI,KAGpBxQ,EAqpBR,IAAMuC,KAlpBN0K,EAAKjF,QAAU,SAAU0I,EAAMC,GAC9B,OAAO1D,EAAMyD,EAAM,KAAM,KAAMC,IAGhC1D,EAAK2D,gBAAkB,SAAU/M,EAAM6M,GAGtC,GAFAlE,EAAa3I,GAERkG,IACHQ,EAAwBmG,EAAO,QAC7B1G,IAAcA,EAAUnC,KAAM6I,IAEjC,IACC,IAAIlM,EAAMwD,EAAQnH,KAAMgD,EAAM6M,GAG9B,GAAKlM,GAAOhD,GAAQsO,mBAIlBjM,EAAK7D,UAAuC,KAA3B6D,EAAK7D,SAAS2B,SAChC,OAAO6C,EAEP,MAAQuI,GACTxC,EAAwBmG,GAAM,GAIhC,OAAuD,EAAhDzD,EAAMyD,EAAM1Q,EAAU,KAAM,CAAE6D,IAASF,QAG/CsJ,EAAKpE,SAAW,SAAUtF,EAASM,GAUlC,OAHON,EAAQqE,eAAiBrE,IAAavD,GAC5CwM,EAAajJ,GAEPF,GAAOwF,SAAUtF,EAASM,IAIlCoJ,EAAK4D,KAAO,SAAUhN,EAAMC,IAOpBD,EAAK+D,eAAiB/D,IAAU7D,GACtCwM,EAAa3I,GAGd,IAAIL,EAAKmG,EAAKmH,WAAYhN,EAAKC,eAG9BvB,EAAMgB,GAAMpC,GAAOP,KAAM8I,EAAKmH,WAAYhN,EAAKC,eAC9CP,EAAIK,EAAMC,GAAOiG,QACjB1D,EAEF,YAAaA,IAAR7D,EACGA,EAGDqB,EAAKjB,aAAckB,IAG3BmJ,EAAKtG,MAAQ,SAAUC,GACtB,MAAM,IAAI1G,MAAO,0CAA4C0G,IAO9DvD,GAAO0N,WAAa,SAAUzJ,GAC7B,IAAIzD,EACHmN,EAAa,GACbzL,EAAI,EACJhD,EAAI,EAWL,GAJAuH,GAAgBtI,GAAQyP,WACxBpH,GAAarI,GAAQyP,YAAcvQ,GAAMG,KAAMyG,EAAS,GACxD7B,GAAK5E,KAAMyG,EAASkD,GAEfV,EAAe,CACnB,MAAUjG,EAAOyD,EAAS/E,KACpBsB,IAASyD,EAAS/E,KACtBgD,EAAIyL,EAAWhQ,KAAMuB,IAGvB,MAAQgD,IACPG,GAAO7E,KAAMyG,EAAS0J,EAAYzL,GAAK,GAQzC,OAFAsE,EAAY,KAELvC,GAGRjE,GAAOG,GAAGuN,WAAa,WACtB,OAAO3Q,KAAKkE,UAAWjB,GAAO0N,WAAYrQ,GAAMK,MAAOX,UAGxDuJ,EAAOtG,GAAOqN,KAAO,CAGpBhC,YAAa,GAEbwC,aAActC,EAEdvB,MAAOlC,EAEP2F,WAAY,GAEZ7D,KAAM,GAENkE,SAAU,CACTC,IAAK,CAAExE,IAAK,aAAc7H,OAAO,GACjCsM,IAAK,CAAEzE,IAAK,cACZ0E,IAAK,CAAE1E,IAAK,kBAAmB7H,OAAO,GACtCwM,IAAK,CAAE3E,IAAK,oBAGb4E,UAAW,CACVjG,KAAM,SAAU8B,GAWf,OAVAA,EAAO,GAAMA,EAAO,GAAI5G,QAASuF,EAAWC,GAG5CoB,EAAO,IAAQA,EAAO,IAAOA,EAAO,IAAOA,EAAO,IAAO,IACvD5G,QAASuF,EAAWC,GAEF,OAAfoB,EAAO,KACXA,EAAO,GAAM,IAAMA,EAAO,GAAM,KAG1BA,EAAM3M,MAAO,EAAG,IAGxB+K,MAAO,SAAU4B,GAkChB,OAtBAA,EAAO,GAAMA,EAAO,GAAItJ,cAEU,QAA7BsJ,EAAO,GAAI3M,MAAO,EAAG,IAGnB2M,EAAO,IACZJ,EAAKtG,MAAO0G,EAAO,IAKpBA,EAAO,KAASA,EAAO,GACtBA,EAAO,IAAQA,EAAO,IAAO,GAC7B,GAAqB,SAAfA,EAAO,IAAiC,QAAfA,EAAO,KAEvCA,EAAO,KAAWA,EAAO,GAAMA,EAAO,IAAwB,QAAfA,EAAO,KAG3CA,EAAO,IAClBJ,EAAKtG,MAAO0G,EAAO,IAGbA,GAGR7B,OAAQ,SAAU6B,GACjB,IAAIoE,EACHC,GAAYrE,EAAO,IAAOA,EAAO,GAElC,OAAKlC,EAAUM,MAAM5D,KAAMwF,EAAO,IAC1B,MAIHA,EAAO,GACXA,EAAO,GAAMA,EAAO,IAAOA,EAAO,IAAO,GAG9BqE,GAAYzG,EAAQpD,KAAM6J,KAGnCD,EAASzD,EAAU0D,GAAU,MAG7BD,EAASC,EAASzQ,QAAS,IAAKyQ,EAAS/N,OAAS8N,GAAWC,EAAS/N,UAGxE0J,EAAO,GAAMA,EAAO,GAAI3M,MAAO,EAAG+Q,GAClCpE,EAAO,GAAMqE,EAAShR,MAAO,EAAG+Q,IAI1BpE,EAAM3M,MAAO,EAAG,MAIzBuP,OAAQ,CAEP3E,IAAK,SAAUqG,GACd,IAAIC,EAAmBD,EAAiBlL,QAASuF,EAAWC,GAAYlI,cACxE,MAA4B,MAArB4N,EACN,WACC,OAAO,GAER,SAAU9N,GACT,OAAOD,GAAUC,EAAM+N,KAI1BvG,MAAO,SAAUgF,GAChB,IAAIwB,EAAU1H,EAAYkG,EAAY,KAEtC,OAAOwB,IACJA,EAAU,IAAIjJ,OAAQ,MAAQF,GAAa,IAAM2H,EAClD,IAAM3H,GAAa,SACpByB,EAAYkG,EAAW,SAAUxM,GAChC,OAAOgO,EAAQhK,KACY,iBAAnBhE,EAAKwM,WAA0BxM,EAAKwM,WACb,oBAAtBxM,EAAKjB,cACXiB,EAAKjB,aAAc,UACpB,OAKL2I,KAAM,SAAUzH,EAAMgO,EAAUC,GAC/B,OAAO,SAAUlO,GAChB,IAAImO,EAAS/E,EAAK4D,KAAMhN,EAAMC,GAE9B,OAAe,MAAVkO,EACgB,OAAbF,GAEFA,IAINE,GAAU,GAEQ,MAAbF,EACGE,IAAWD,EAED,OAAbD,EACGE,IAAWD,EAED,OAAbD,EACGC,GAAqC,IAA5BC,EAAO/Q,QAAS8Q,GAEf,OAAbD,EACGC,IAAoC,EAA3BC,EAAO/Q,QAAS8Q,GAEf,OAAbD,EACGC,GAASC,EAAOtR,OAAQqR,EAAMpO,UAAaoO,EAEjC,OAAbD,GAEkB,GADb,IAAME,EAAOvL,QAASoE,EAAa,KAAQ,KAClD5J,QAAS8Q,GAEM,OAAbD,IACGE,IAAWD,GAASC,EAAOtR,MAAO,EAAGqR,EAAMpO,OAAS,KAAQoO,EAAQ,QAO9EtG,MAAO,SAAU1J,EAAMkQ,EAAMC,EAAWnN,EAAOE,GAC9C,IAAIkN,EAAgC,QAAvBpQ,EAAKrB,MAAO,EAAG,GAC3B0R,EAA+B,SAArBrQ,EAAKrB,OAAQ,GACvB2R,EAAkB,YAATJ,EAEV,OAAiB,IAAVlN,GAAwB,IAATE,EAGrB,SAAUpB,GACT,QAASA,EAAKb,YAGf,SAAUa,EAAMyO,EAAUC,GACzB,IAAI/D,EAAOgE,EAAYnQ,EAAMoQ,EAAWC,EACvC9F,EAAMuF,IAAWC,EAAU,cAAgB,kBAC3CO,EAAS9O,EAAKb,WACdc,EAAOuO,GAAUxO,EAAKD,SAASG,cAC/B6O,GAAYL,IAAQF,EACpBQ,GAAO,EAER,GAAKF,EAAS,CAGb,GAAKR,EAAS,CACb,MAAQvF,EAAM,CACbvK,EAAOwB,EACP,MAAUxB,EAAOA,EAAMuK,GACtB,GAAKyF,EACJzO,GAAUvB,EAAMyB,GACE,IAAlBzB,EAAKV,SAEL,OAAO,EAKT+Q,EAAQ9F,EAAe,SAAT7K,IAAoB2Q,GAAS,cAE5C,OAAO,EAMR,GAHAA,EAAQ,CAAEN,EAAUO,EAAOG,WAAaH,EAAOI,WAG1CX,GAAWQ,EAAW,CAM1BC,GADAJ,GADAjE,GADAgE,EAAaG,EAAQrM,KAAeqM,EAAQrM,GAAY,KACpCvE,IAAU,IACX,KAAQkI,GAAWuE,EAAO,KACzBA,EAAO,GAC3BnM,EAAOoQ,GAAaE,EAAO7F,WAAY2F,GAEvC,MAAUpQ,IAASoQ,GAAapQ,GAAQA,EAAMuK,KAG3CiG,EAAOJ,EAAY,IAAOC,EAAMjK,MAGlC,GAAuB,IAAlBpG,EAAKV,YAAoBkR,GAAQxQ,IAASwB,EAAO,CACrD2O,EAAYzQ,GAAS,CAAEkI,EAASwI,EAAWI,GAC3C,YAgBF,GATKD,IAIJC,EADAJ,GADAjE,GADAgE,EAAa3O,EAAMyC,KAAezC,EAAMyC,GAAY,KAChCvE,IAAU,IACX,KAAQkI,GAAWuE,EAAO,KAMhC,IAATqE,EAGJ,MAAUxQ,IAASoQ,GAAapQ,GAAQA,EAAMuK,KAC3CiG,EAAOJ,EAAY,IAAOC,EAAMjK,MAElC,IAAO4J,EACNzO,GAAUvB,EAAMyB,GACE,IAAlBzB,EAAKV,aACHkR,IAGGD,KACJJ,EAAanQ,EAAMiE,KAChBjE,EAAMiE,GAAY,KACTvE,GAAS,CAAEkI,EAAS4I,IAG5BxQ,IAASwB,GACb,MASL,OADAgP,GAAQ5N,KACQF,GAAW8N,EAAO9N,GAAU,GAAqB,GAAhB8N,EAAO9N,KAK5DyG,OAAQ,SAAUwH,EAAQ5D,GAMzB,IAAI6D,EACHzP,EAAKmG,EAAKiB,QAASoI,IAAYrJ,EAAKuJ,WAAYF,EAAOjP,gBACtDkJ,EAAKtG,MAAO,uBAAyBqM,GAKvC,OAAKxP,EAAI8C,GACD9C,EAAI4L,GAIK,EAAZ5L,EAAGG,QACPsP,EAAO,CAAED,EAAQA,EAAQ,GAAI5D,GACtBzF,EAAKuJ,WAAW7R,eAAgB2R,EAAOjP,eAC7C6K,EAAc,SAAU1B,EAAMlF,GAC7B,IAAImL,EACHC,EAAU5P,EAAI0J,EAAMkC,GACpB7M,EAAI6Q,EAAQzP,OACb,MAAQpB,IAEP2K,EADAiG,EAAMlS,GAAQJ,KAAMqM,EAAMkG,EAAS7Q,OAClByF,EAASmL,GAAQC,EAAS7Q,MAG7C,SAAUsB,GACT,OAAOL,EAAIK,EAAM,EAAGoP,KAIhBzP,IAIToH,QAAS,CAGRyI,IAAKzE,EAAc,SAAUtL,GAK5B,IAAIgN,EAAQ,GACXhJ,EAAU,GACVgM,EAAUC,GAASjQ,EAASmD,QAASkC,GAAU,OAEhD,OAAO2K,EAAShN,GACfsI,EAAc,SAAU1B,EAAMlF,EAASsK,EAAUC,GAChD,IAAI1O,EACH2P,EAAYF,EAASpG,EAAM,KAAMqF,EAAK,IACtChQ,EAAI2K,EAAKvJ,OAGV,MAAQpB,KACAsB,EAAO2P,EAAWjR,MACxB2K,EAAM3K,KAASyF,EAASzF,GAAMsB,MAIjC,SAAUA,EAAMyO,EAAUC,GAOzB,OANAjC,EAAO,GAAMzM,EACbyP,EAAShD,EAAO,KAAMiC,EAAKjL,GAI3BgJ,EAAO,GAAM,MACLhJ,EAAQmB,SAInBgL,IAAK7E,EAAc,SAAUtL,GAC5B,OAAO,SAAUO,GAChB,OAAuC,EAAhCoJ,EAAM3J,EAAUO,GAAOF,UAIhCkF,SAAU+F,EAAc,SAAUjM,GAEjC,OADAA,EAAOA,EAAK8D,QAASuF,EAAWC,GACzB,SAAUpI,GAChB,OAAsE,GAA7DA,EAAKqD,aAAe7D,GAAOV,KAAMkB,IAAS5C,QAAS0B,MAW9D+Q,KAAM9E,EAAc,SAAU8E,GAO7B,OAJMxI,EAAYrD,KAAM6L,GAAQ,KAC/BzG,EAAKtG,MAAO,qBAAuB+M,GAEpCA,EAAOA,EAAKjN,QAASuF,EAAWC,GAAYlI,cACrC,SAAUF,GAChB,IAAI8P,EACJ,GACC,GAAOA,EAAW5J,EACjBlG,EAAK6P,KACL7P,EAAKjB,aAAc,aAAgBiB,EAAKjB,aAAc,QAGtD,OADA+Q,EAAWA,EAAS5P,iBACA2P,GAA2C,IAAnCC,EAAS1S,QAASyS,EAAO,YAE3C7P,EAAOA,EAAKb,aAAkC,IAAlBa,EAAKlC,UAC7C,OAAO,KAKTqE,OAAQ,SAAUnC,GACjB,IAAI+P,EAAOzT,GAAO0T,UAAY1T,GAAO0T,SAASD,KAC9C,OAAOA,GAAQA,EAAKlT,MAAO,KAAQmD,EAAK8J,IAGzCmG,KAAM,SAAUjQ,GACf,OAAOA,IAASsD,GAGjB4M,MAAO,SAAUlQ,GAChB,OAAOA,IA5oCV,WACC,IACC,OAAO7D,EAASgU,cACf,MAAQC,KAyoCQC,IACflU,EAASmU,eACLtQ,EAAK9B,MAAQ8B,EAAKuQ,OAASvQ,EAAKwQ,WAItCC,QAASrF,GAAsB,GAC/BtC,SAAUsC,GAAsB,GAEhCsF,QAAS,SAAU1Q,GAIlB,OAASD,GAAUC,EAAM,YAAeA,EAAK0Q,SAC1C3Q,GAAUC,EAAM,aAAgBA,EAAK2Q,UAGzCA,SAAU,SAAU3Q,GAWnB,OALKA,EAAKb,YAETa,EAAKb,WAAWyR,eAGQ,IAAlB5Q,EAAK2Q,UAIbE,MAAO,SAAU7Q,GAMhB,IAAMA,EAAOA,EAAKiP,WAAYjP,EAAMA,EAAOA,EAAK8Q,YAC/C,GAAK9Q,EAAKlC,SAAW,EACpB,OAAO,EAGT,OAAO,GAGRgR,OAAQ,SAAU9O,GACjB,OAAQ8F,EAAKiB,QAAQ8J,MAAO7Q,IAI7B+Q,OAAQ,SAAU/Q,GACjB,OAAOgI,EAAQhE,KAAMhE,EAAKD,WAG3B0M,MAAO,SAAUzM,GAChB,OAAO+H,EAAQ/D,KAAMhE,EAAKD,WAG3BiR,OAAQ,SAAUhR,GACjB,OAAOD,GAAUC,EAAM,UAA2B,WAAdA,EAAK9B,MACxC6B,GAAUC,EAAM,WAGlBlB,KAAM,SAAUkB,GACf,IAAIgN,EACJ,OAAOjN,GAAUC,EAAM,UAA2B,SAAdA,EAAK9B,OAKI,OAAxC8O,EAAOhN,EAAKjB,aAAc,UACN,SAAvBiO,EAAK9M,gBAIRgB,MAAOoK,EAAwB,WAC9B,MAAO,CAAE,KAGVlK,KAAMkK,EAAwB,SAAU2F,EAAenR,GACtD,MAAO,CAAEA,EAAS,KAGnBqB,GAAImK,EAAwB,SAAU2F,EAAenR,EAAQyL,GAC5D,MAAO,CAAEA,EAAW,EAAIA,EAAWzL,EAASyL,KAG7ClK,KAAMiK,EAAwB,SAAUE,EAAc1L,GAErD,IADA,IAAIpB,EAAI,EACAA,EAAIoB,EAAQpB,GAAK,EACxB8M,EAAarO,KAAMuB,GAEpB,OAAO8M,IAGRhK,IAAK8J,EAAwB,SAAUE,EAAc1L,GAEpD,IADA,IAAIpB,EAAI,EACAA,EAAIoB,EAAQpB,GAAK,EACxB8M,EAAarO,KAAMuB,GAEpB,OAAO8M,IAGR0F,GAAI5F,EAAwB,SAAUE,EAAc1L,EAAQyL,GAC3D,IAAI7M,EAUJ,IAPCA,EADI6M,EAAW,EACXA,EAAWzL,EACOA,EAAXyL,EACPzL,EAEAyL,EAGU,KAAL7M,GACT8M,EAAarO,KAAMuB,GAEpB,OAAO8M,IAGR2F,GAAI7F,EAAwB,SAAUE,EAAc1L,EAAQyL,GAE3D,IADA,IAAI7M,EAAI6M,EAAW,EAAIA,EAAWzL,EAASyL,IACjC7M,EAAIoB,GACb0L,EAAarO,KAAMuB,GAEpB,OAAO8M,OAKLzE,QAAQqK,IAAMtL,EAAKiB,QAAQ5F,GAGrB,CAAEkQ,OAAO,EAAMC,UAAU,EAAMC,MAAM,EAAMC,UAAU,EAAMC,OAAO,GAC5E3L,EAAKiB,QAASrI,GAAMwM,EAAmBxM,GAExC,IAAMA,IAAK,CAAEgT,QAAQ,EAAMC,OAAO,GACjC7L,EAAKiB,QAASrI,GAAMyM,EAAoBzM,GAIzC,SAAS2Q,KAIT,SAASlF,EAAU1K,EAAUmS,GAC5B,IAAIrC,EAAS/F,EAAOqI,EAAQ3T,EAC3B4T,EAAOrI,EAAQsI,EACfC,EAASxL,EAAY/G,EAAW,KAEjC,GAAKuS,EACJ,OAAOJ,EAAY,EAAII,EAAOnV,MAAO,GAGtCiV,EAAQrS,EACRgK,EAAS,GACTsI,EAAajM,EAAK6H,UAElB,MAAQmE,EAAQ,CA2Bf,IAAM5T,KAxBAqR,KAAa/F,EAAQvC,EAAO2C,KAAMkI,MAClCtI,IAGJsI,EAAQA,EAAMjV,MAAO2M,EAAO,GAAI1J,SAAYgS,GAE7CrI,EAAOtM,KAAQ0U,EAAS,KAGzBtC,GAAU,GAGH/F,EAAQtC,EAAmB0C,KAAMkI,MACvCvC,EAAU/F,EAAMsB,QAChB+G,EAAO1U,KAAM,CACZmH,MAAOiL,EAGPrR,KAAMsL,EAAO,GAAI5G,QAASkC,GAAU,OAErCgN,EAAQA,EAAMjV,MAAO0S,EAAQzP,SAIhBgG,EAAKsG,SACX5C,EAAQlC,EAAWpJ,GAAO0L,KAAMkI,KAAgBC,EAAY7T,MAChEsL,EAAQuI,EAAY7T,GAAQsL,MAC9B+F,EAAU/F,EAAMsB,QAChB+G,EAAO1U,KAAM,CACZmH,MAAOiL,EACPrR,KAAMA,EACNiG,QAASqF,IAEVsI,EAAQA,EAAMjV,MAAO0S,EAAQzP,SAI/B,IAAMyP,EACL,MAOF,OAAKqC,EACGE,EAAMhS,OAGPgS,EACN1I,EAAKtG,MAAOrD,GAGZ+G,EAAY/G,EAAUgK,GAAS5M,MAAO,GAGxC,SAASuN,EAAYyH,GAIpB,IAHA,IAAInT,EAAI,EACP+C,EAAMoQ,EAAO/R,OACbL,EAAW,GACJf,EAAI+C,EAAK/C,IAChBe,GAAYoS,EAAQnT,GAAI4F,MAEzB,OAAO7E,EAGR,SAASoJ,EAAe4G,EAASwC,EAAYC,GAC5C,IAAInJ,EAAMkJ,EAAWlJ,IACpBoJ,EAAOF,EAAWjJ,KAClB4B,EAAMuH,GAAQpJ,EACdqJ,EAAmBF,GAAgB,eAARtH,EAC3ByH,EAAWhM,IAEZ,OAAO4L,EAAW/Q,MAGjB,SAAUlB,EAAMN,EAASgP,GACxB,MAAU1O,EAAOA,EAAM+I,GACtB,GAAuB,IAAlB/I,EAAKlC,UAAkBsU,EAC3B,OAAO3C,EAASzP,EAAMN,EAASgP,GAGjC,OAAO,GAIR,SAAU1O,EAAMN,EAASgP,GACxB,IAAI4D,EAAU3D,EACb4D,EAAW,CAAEnM,EAASiM,GAGvB,GAAK3D,GACJ,MAAU1O,EAAOA,EAAM+I,GACtB,IAAuB,IAAlB/I,EAAKlC,UAAkBsU,IACtB3C,EAASzP,EAAMN,EAASgP,GAC5B,OAAO,OAKV,MAAU1O,EAAOA,EAAM+I,GACtB,GAAuB,IAAlB/I,EAAKlC,UAAkBsU,EAG3B,GAFAzD,EAAa3O,EAAMyC,KAAezC,EAAMyC,GAAY,IAE/C0P,GAAQpS,GAAUC,EAAMmS,GAC5BnS,EAAOA,EAAM+I,IAAS/I,MAChB,CAAA,IAAOsS,EAAW3D,EAAY/D,KACpC0H,EAAU,KAAQlM,GAAWkM,EAAU,KAAQD,EAG/C,OAASE,EAAU,GAAMD,EAAU,GAOnC,IAHA3D,EAAY/D,GAAQ2H,GAGH,GAAM9C,EAASzP,EAAMN,EAASgP,GAC9C,OAAO,EAMZ,OAAO,GAIV,SAAS8D,EAAgBC,GACxB,OAAyB,EAAlBA,EAAS3S,OACf,SAAUE,EAAMN,EAASgP,GACxB,IAAIhQ,EAAI+T,EAAS3S,OACjB,MAAQpB,IACP,IAAM+T,EAAU/T,GAAKsB,EAAMN,EAASgP,GACnC,OAAO,EAGT,OAAO,GAER+D,EAAU,GAYZ,SAASC,EAAU/C,EAAW3O,EAAKoL,EAAQ1M,EAASgP,GAOnD,IANA,IAAI1O,EACH2S,EAAe,GACfjU,EAAI,EACJ+C,EAAMkO,EAAU7P,OAChB8S,EAAgB,MAAP5R,EAEFtC,EAAI+C,EAAK/C,KACTsB,EAAO2P,EAAWjR,MAClB0N,IAAUA,EAAQpM,EAAMN,EAASgP,KACtCiE,EAAaxV,KAAM6C,GACd4S,GACJ5R,EAAI7D,KAAMuB,KAMd,OAAOiU,EAGR,SAASE,GAAYlF,EAAWlO,EAAUgQ,EAASqD,EAAYC,EAAYC,GAO1E,OANKF,IAAeA,EAAYrQ,KAC/BqQ,EAAaD,GAAYC,IAErBC,IAAeA,EAAYtQ,KAC/BsQ,EAAaF,GAAYE,EAAYC,IAE/BjI,EAAc,SAAU1B,EAAM5F,EAAS/D,EAASgP,GACtD,IAAIuE,EAAMvU,EAAGsB,EAAMkT,EAClBC,EAAS,GACTC,EAAU,GACVC,EAAc5P,EAAQ3D,OAGtBY,EAAQ2I,GA5CX,SAA2B5J,EAAU6T,EAAU7P,GAG9C,IAFA,IAAI/E,EAAI,EACP+C,EAAM6R,EAASxT,OACRpB,EAAI+C,EAAK/C,IAChB0K,EAAM3J,EAAU6T,EAAU5U,GAAK+E,GAEhC,OAAOA,EAuCJ8P,CAAkB9T,GAAY,IAC7BC,EAAQ5B,SAAW,CAAE4B,GAAYA,EAAS,IAG5C8T,GAAY7F,IAAetE,GAAS5J,EAEnCiB,EADAgS,EAAUhS,EAAOyS,EAAQxF,EAAWjO,EAASgP,GAsB/C,GAnBKe,EAaJA,EAAS+D,EATTN,EAAaH,IAAgB1J,EAAOsE,EAAY0F,GAAeP,GAG9D,GAGArP,EAG+B/D,EAASgP,GAEzCwE,EAAaM,EAITV,EAAa,CACjBG,EAAOP,EAAUQ,EAAYE,GAC7BN,EAAYG,EAAM,GAAIvT,EAASgP,GAG/BhQ,EAAIuU,EAAKnT,OACT,MAAQpB,KACAsB,EAAOiT,EAAMvU,MACnBwU,EAAYE,EAAS1U,MAAW8U,EAAWJ,EAAS1U,IAAQsB,IAK/D,GAAKqJ,GACJ,GAAK0J,GAAcpF,EAAY,CAC9B,GAAKoF,EAAa,CAGjBE,EAAO,GACPvU,EAAIwU,EAAWpT,OACf,MAAQpB,KACAsB,EAAOkT,EAAYxU,KAGzBuU,EAAK9V,KAAQqW,EAAW9U,GAAMsB,GAGhC+S,EAAY,KAAQG,EAAa,GAAMD,EAAMvE,GAI9ChQ,EAAIwU,EAAWpT,OACf,MAAQpB,KACAsB,EAAOkT,EAAYxU,MAC2C,GAAlEuU,EAAOF,EAAa3V,GAAQJ,KAAMqM,EAAMrJ,GAASmT,EAAQzU,MAE3D2K,EAAM4J,KAAYxP,EAASwP,GAASjT,UAOvCkT,EAAaR,EACZQ,IAAezP,EACdyP,EAAWrR,OAAQwR,EAAaH,EAAWpT,QAC3CoT,GAEGH,EACJA,EAAY,KAAMtP,EAASyP,EAAYxE,GAEvCvR,EAAKD,MAAOuG,EAASyP,KAMzB,SAASO,GAAmB5B,GA+B3B,IA9BA,IAAI6B,EAAcjE,EAAS/N,EAC1BD,EAAMoQ,EAAO/R,OACb6T,EAAkB7N,EAAKwH,SAAUuE,EAAQ,GAAI3T,MAC7C0V,EAAmBD,GAAmB7N,EAAKwH,SAAU,KACrD5O,EAAIiV,EAAkB,EAAI,EAG1BE,EAAehL,EAAe,SAAU7I,GACvC,OAAOA,IAAS0T,GACdE,GAAkB,GACrBE,EAAkBjL,EAAe,SAAU7I,GAC1C,OAA6C,EAAtC5C,GAAQJ,KAAM0W,EAAc1T,IACjC4T,GAAkB,GACrBnB,EAAW,CAAE,SAAUzS,EAAMN,EAASgP,GAMrC,IAAI/N,GAASgT,IAAqBjF,GAAOhP,GAAWqG,MACjD2N,EAAehU,GAAU5B,SAC1B+V,EAAc7T,EAAMN,EAASgP,GAC7BoF,EAAiB9T,EAAMN,EAASgP,IAKlC,OADAgF,EAAe,KACR/S,IAGDjC,EAAI+C,EAAK/C,IAChB,GAAO+Q,EAAU3J,EAAKwH,SAAUuE,EAAQnT,GAAIR,MAC3CuU,EAAW,CAAE5J,EAAe2J,EAAgBC,GAAYhD,QAClD,CAIN,IAHAA,EAAU3J,EAAKsG,OAAQyF,EAAQnT,GAAIR,MAAOhB,MAAO,KAAM2U,EAAQnT,GAAIyF,UAGrD1B,GAAY,CAIzB,IADAf,IAAMhD,EACEgD,EAAID,EAAKC,IAChB,GAAKoE,EAAKwH,SAAUuE,EAAQnQ,GAAIxD,MAC/B,MAGF,OAAO2U,GACF,EAAJnU,GAAS8T,EAAgBC,GACrB,EAAJ/T,GAAS0L,EAGRyH,EAAOhV,MAAO,EAAG6B,EAAI,GACnBzB,OAAQ,CAAEqH,MAAgC,MAAzBuN,EAAQnT,EAAI,GAAIR,KAAe,IAAM,MACvD0E,QAASkC,GAAU,MACrB2K,EACA/Q,EAAIgD,GAAK+R,GAAmB5B,EAAOhV,MAAO6B,EAAGgD,IAC7CA,EAAID,GAAOgS,GAAqB5B,EAASA,EAAOhV,MAAO6E,IACvDA,EAAID,GAAO2I,EAAYyH,IAGzBY,EAAStV,KAAMsS,GAIjB,OAAO+C,EAAgBC,GAiIxB,SAAS/C,GAASjQ,EAAU+J,GAC3B,IAAI9K,EA/H8BqV,EAAiBC,EAC/CC,EACHC,EACAC,EA6HAH,EAAc,GACdD,EAAkB,GAClB/B,EAASvL,EAAehH,EAAW,KAEpC,IAAMuS,EAAS,CAGRxI,IACLA,EAAQW,EAAU1K,IAEnBf,EAAI8K,EAAM1J,OACV,MAAQpB,KACPsT,EAASyB,GAAmBjK,EAAO9K,KACtB+D,GACZuR,EAAY7W,KAAM6U,GAElB+B,EAAgB5W,KAAM6U,IAKxBA,EAASvL,EAAehH,GArJSsU,EAsJNA,EArJxBE,EAA6B,GADkBD,EAsJNA,GArJrBlU,OACvBoU,EAAqC,EAAzBH,EAAgBjU,OAC5BqU,EAAe,SAAU9K,EAAM3J,EAASgP,EAAKjL,EAAS2Q,GACrD,IAAIpU,EAAM0B,EAAG+N,EACZ4E,EAAe,EACf3V,EAAI,IACJiR,EAAYtG,GAAQ,GACpBiL,EAAa,GACbC,EAAgBxO,EAGhBrF,EAAQ2I,GAAQ6K,GAAapO,EAAKsD,KAAK3B,IAAK,IAAK2M,GAGjDI,EAAkBpO,GAA4B,MAAjBmO,EAAwB,EAAI7R,KAAKC,UAAY,GAC1ElB,EAAMf,EAAMZ,OAeb,IAbKsU,IAMJrO,EAAmBrG,GAAWvD,GAAYuD,GAAW0U,GAO9C1V,IAAM+C,GAAgC,OAAvBzB,EAAOU,EAAOhC,IAAeA,IAAM,CACzD,GAAKwV,GAAalU,EAAO,CACxB0B,EAAI,EAMEhC,GAAWM,EAAK+D,eAAiB5H,IACtCwM,EAAa3I,GACb0O,GAAOxI,GAER,MAAUuJ,EAAUsE,EAAiBrS,KACpC,GAAK+N,EAASzP,EAAMN,GAAWvD,EAAUuS,GAAQ,CAChDvR,EAAKH,KAAMyG,EAASzD,GACpB,MAGGoU,IACJhO,EAAUoO,GAKPP,KAGGjU,GAAQyP,GAAWzP,IACzBqU,IAIIhL,GACJsG,EAAUxS,KAAM6C,IAgBnB,GATAqU,GAAgB3V,EASXuV,GAASvV,IAAM2V,EAAe,CAClC3S,EAAI,EACJ,MAAU+N,EAAUuE,EAAatS,KAChC+N,EAASE,EAAW2E,EAAY5U,EAASgP,GAG1C,GAAKrF,EAAO,CAGX,GAAoB,EAAfgL,EACJ,MAAQ3V,IACCiR,EAAWjR,IAAO4V,EAAY5V,KACrC4V,EAAY5V,GAAMkG,GAAI5H,KAAMyG,IAM/B6Q,EAAa5B,EAAU4B,GAIxBnX,EAAKD,MAAOuG,EAAS6Q,GAGhBF,IAAc/K,GAA4B,EAApBiL,EAAWxU,QACG,EAAtCuU,EAAeL,EAAYlU,QAE7BN,GAAO0N,WAAYzJ,GAUrB,OALK2Q,IACJhO,EAAUoO,EACVzO,EAAmBwO,GAGb5E,GAGFsE,EACNlJ,EAAcoJ,GACdA,KA8BO1U,SAAWA,EAEnB,OAAOuS,EAYR,SAASvH,GAAQhL,EAAUC,EAAS+D,EAAS4F,GAC5C,IAAI3K,EAAGmT,EAAQ4C,EAAOvW,EAAMkL,EAC3BsL,EAA+B,mBAAbjV,GAA2BA,EAC7C+J,GAASH,GAAQc,EAAY1K,EAAWiV,EAASjV,UAAYA,GAM9D,GAJAgE,EAAUA,GAAW,GAIC,IAAjB+F,EAAM1J,OAAe,CAIzB,GAAqB,GADrB+R,EAASrI,EAAO,GAAMA,EAAO,GAAI3M,MAAO,IAC5BiD,QAA+C,QAA/B2U,EAAQ5C,EAAQ,IAAM3T,MAC3B,IAArBwB,EAAQ5B,UAAkBoI,GAAkBJ,EAAKwH,SAAUuE,EAAQ,GAAI3T,MAAS,CAMjF,KAJAwB,GAAYoG,EAAKsD,KAAK7B,GACrBkN,EAAMtQ,QAAS,GAAIvB,QAASuF,EAAWC,GACvC1I,IACI,IAAM,IAEV,OAAO+D,EAGIiR,IACXhV,EAAUA,EAAQP,YAGnBM,EAAWA,EAAS5C,MAAOgV,EAAO/G,QAAQxG,MAAMxE,QAIjDpB,EAAI4I,EAAUQ,aAAa9D,KAAMvE,GAAa,EAAIoS,EAAO/R,OACzD,MAAQpB,IAAM,CAIb,GAHA+V,EAAQ5C,EAAQnT,GAGXoH,EAAKwH,SAAYpP,EAAOuW,EAAMvW,MAClC,MAED,IAAOkL,EAAOtD,EAAKsD,KAAMlL,MAGjBmL,EAAOD,EACbqL,EAAMtQ,QAAS,GAAIvB,QAASuF,EAAWC,GACvCF,EAASlE,KAAM6N,EAAQ,GAAI3T,OAC1B+L,EAAavK,EAAQP,aAAgBO,IACjC,CAKL,GAFAmS,EAAOhQ,OAAQnD,EAAG,KAClBe,EAAW4J,EAAKvJ,QAAUsK,EAAYyH,IAGrC,OADA1U,EAAKD,MAAOuG,EAAS4F,GACd5F,EAGR,QAeJ,OAPEiR,GAAYhF,GAASjQ,EAAU+J,IAChCH,EACA3J,GACCwG,EACDzC,GACC/D,GAAWwI,EAASlE,KAAMvE,IAAcwK,EAAavK,EAAQP,aAAgBO,GAExE+D,EArlBR4L,EAAWlP,UAAY2F,EAAK6O,QAAU7O,EAAKiB,QAC3CjB,EAAKuJ,WAAa,IAAIA,EA2lBtB1R,GAAQyP,WAAa3K,EAAQiC,MAAO,IAAK9C,KAAM+E,GAAY0D,KAAM,MAAS5H,EAG1EkG,IAIAhL,GAAQiP,aAAe5B,EAAQ,SAAUC,GAGxC,OAA4E,EAArEA,EAAG7F,wBAAyBjJ,EAAS0C,cAAe,eAG5DW,GAAO4J,KAAOA,EAGd5J,GAAOqN,KAAM,KAAQrN,GAAOqN,KAAK9F,QACjCvH,GAAOoV,OAASpV,GAAO0N,WAIvB9D,EAAKsG,QAAUA,GACftG,EAAKqB,OAASA,GACdrB,EAAKT,YAAcA,EACnBS,EAAKe,SAAWA,EAEhBf,EAAKf,OAAS7I,GAAOkG,eACrB0D,EAAKyL,QAAUrV,GAAOV,KACtBsK,EAAK0L,MAAQtV,GAAOmE,SACpByF,EAAK2L,UAAYvV,GAAOqN,KACxBzD,EAAKzL,QAAU6B,GAAO7B,QACtByL,EAAK8D,WAAa1N,GAAO0N,WAniEzB,GA0iEA,IAAInE,EAAM,SAAU/I,EAAM+I,EAAKiM,GAC9B,IAAIzF,EAAU,GACb0F,OAAqBzS,IAAVwS,EAEZ,OAAUhV,EAAOA,EAAM+I,KAA6B,IAAlB/I,EAAKlC,SACtC,GAAuB,IAAlBkC,EAAKlC,SAAiB,CAC1B,GAAKmX,GAAYzV,GAAQQ,GAAOkV,GAAIF,GACnC,MAEDzF,EAAQpS,KAAM6C,GAGhB,OAAOuP,GAIJ4F,EAAW,SAAUC,EAAGpV,GAG3B,IAFA,IAAIuP,EAAU,GAEN6F,EAAGA,EAAIA,EAAEtE,YACI,IAAfsE,EAAEtX,UAAkBsX,IAAMpV,GAC9BuP,EAAQpS,KAAMiY,GAIhB,OAAO7F,GAIJ8F,EAAgB7V,GAAOqN,KAAKrD,MAAM1B,aAElCwN,EAAa,kEAKjB,SAASC,EAAQzI,EAAU0I,EAAWhG,GACrC,OAAK5R,EAAY4X,GACThW,GAAO8B,KAAMwL,EAAU,SAAU9M,EAAMtB,GAC7C,QAAS8W,EAAUxY,KAAMgD,EAAMtB,EAAGsB,KAAWwP,IAK1CgG,EAAU1X,SACP0B,GAAO8B,KAAMwL,EAAU,SAAU9M,GACvC,OAASA,IAASwV,IAAgBhG,IAKV,iBAAdgG,EACJhW,GAAO8B,KAAMwL,EAAU,SAAU9M,GACvC,OAA4C,EAAnC5C,GAAQJ,KAAMwY,EAAWxV,KAAkBwP,IAK/ChQ,GAAO4M,OAAQoJ,EAAW1I,EAAU0C,GAG5ChQ,GAAO4M,OAAS,SAAUS,EAAMnM,EAAO8O,GACtC,IAAIxP,EAAOU,EAAO,GAMlB,OAJK8O,IACJ3C,EAAO,QAAUA,EAAO,KAGH,IAAjBnM,EAAMZ,QAAkC,IAAlBE,EAAKlC,SACxB0B,GAAO4J,KAAK2D,gBAAiB/M,EAAM6M,GAAS,CAAE7M,GAAS,GAGxDR,GAAO4J,KAAKjF,QAAS0I,EAAMrN,GAAO8B,KAAMZ,EAAO,SAAUV,GAC/D,OAAyB,IAAlBA,EAAKlC,aAId0B,GAAOG,GAAGmC,OAAQ,CACjBsH,KAAM,SAAU3J,GACf,IAAIf,EAAGiC,EACNc,EAAMlF,KAAKuD,OACX2V,EAAOlZ,KAER,GAAyB,iBAAbkD,EACX,OAAOlD,KAAKkE,UAAWjB,GAAQC,GAAW2M,OAAQ,WACjD,IAAM1N,EAAI,EAAGA,EAAI+C,EAAK/C,IACrB,GAAKc,GAAOwF,SAAUyQ,EAAM/W,GAAKnC,MAChC,OAAO,KAQX,IAFAoE,EAAMpE,KAAKkE,UAAW,IAEhB/B,EAAI,EAAGA,EAAI+C,EAAK/C,IACrBc,GAAO4J,KAAM3J,EAAUgW,EAAM/W,GAAKiC,GAGnC,OAAa,EAANc,EAAUjC,GAAO0N,WAAYvM,GAAQA,GAE7CyL,OAAQ,SAAU3M,GACjB,OAAOlD,KAAKkE,UAAW8U,EAAQhZ,KAAMkD,GAAY,IAAI,KAEtD+P,IAAK,SAAU/P,GACd,OAAOlD,KAAKkE,UAAW8U,EAAQhZ,KAAMkD,GAAY,IAAI,KAEtDyV,GAAI,SAAUzV,GACb,QAAS8V,EACRhZ,KAIoB,iBAAbkD,GAAyB4V,EAAcrR,KAAMvE,GACnDD,GAAQC,GACRA,GAAY,IACb,GACCK,UASJ,IAAI4V,EAMHzN,EAAa,uCAENzI,GAAOG,GAAGC,KAAO,SAAUH,EAAUC,EAASuQ,GACpD,IAAIzG,EAAOxJ,EAGX,IAAMP,EACL,OAAOlD,KAQR,GAHA0T,EAAOA,GAAQyF,EAGU,iBAAbjW,EAAwB,CAanC,KAPC+J,EALsB,MAAlB/J,EAAU,IACsB,MAApCA,EAAUA,EAASK,OAAS,IACT,GAAnBL,EAASK,OAGD,CAAE,KAAML,EAAU,MAGlBwI,EAAW2B,KAAMnK,MAIV+J,EAAO,IAAQ9J,EA6CxB,OAAMA,GAAWA,EAAQU,QACtBV,GAAWuQ,GAAO7G,KAAM3J,GAK1BlD,KAAK8D,YAAaX,GAAU0J,KAAM3J,GAhDzC,GAAK+J,EAAO,GAAM,CAYjB,GAXA9J,EAAUA,aAAmBF,GAASE,EAAS,GAAMA,EAIrDF,GAAOoB,MAAOrE,KAAMiD,GAAOmW,UAC1BnM,EAAO,GACP9J,GAAWA,EAAQ5B,SAAW4B,EAAQqE,eAAiBrE,EAAUvD,GACjE,IAIImZ,EAAWtR,KAAMwF,EAAO,KAAShK,GAAO6C,cAAe3C,GAC3D,IAAM8J,KAAS9J,EAGT9B,EAAYrB,KAAMiN,IACtBjN,KAAMiN,GAAS9J,EAAS8J,IAIxBjN,KAAKyQ,KAAMxD,EAAO9J,EAAS8J,IAK9B,OAAOjN,KAYP,OARAyD,EAAO7D,EAAS0N,eAAgBL,EAAO,OAKtCjN,KAAM,GAAMyD,EACZzD,KAAKuD,OAAS,GAERvD,KAcH,OAAKkD,EAAS3B,UACpBvB,KAAM,GAAMkD,EACZlD,KAAKuD,OAAS,EACPvD,MAIIqB,EAAY6B,QACD+C,IAAfyN,EAAK2F,MACX3F,EAAK2F,MAAOnW,GAGZA,EAAUD,IAGLA,GAAOgE,UAAW/D,EAAUlD,QAIhC4D,UAAYX,GAAOG,GAGxB+V,EAAalW,GAAQrD,GAGrB,IAAI0Z,EAAe,iCAGlBC,EAAmB,CAClBC,UAAU,EACVC,UAAU,EACVhN,MAAM,EACNiN,MAAM,GAoFR,SAASC,EAASC,EAAKpN,GACtB,OAAUoN,EAAMA,EAAKpN,KAA4B,IAAjBoN,EAAIrY,UACpC,OAAOqY,EAnFR3W,GAAOG,GAAGmC,OAAQ,CACjB8N,IAAK,SAAUzN,GACd,IAAIiU,EAAU5W,GAAQ2C,EAAQ5F,MAC7B8Z,EAAID,EAAQtW,OAEb,OAAOvD,KAAK6P,OAAQ,WAEnB,IADA,IAAI1N,EAAI,EACAA,EAAI2X,EAAG3X,IACd,GAAKc,GAAOwF,SAAUzI,KAAM6Z,EAAS1X,IACpC,OAAO,KAMX4X,QAAS,SAAUvB,EAAWrV,GAC7B,IAAIyW,EACHzX,EAAI,EACJ2X,EAAI9Z,KAAKuD,OACTyP,EAAU,GACV6G,EAA+B,iBAAdrB,GAA0BvV,GAAQuV,GAGpD,IAAMM,EAAcrR,KAAM+Q,GACzB,KAAQrW,EAAI2X,EAAG3X,IACd,IAAMyX,EAAM5Z,KAAMmC,GAAKyX,GAAOA,IAAQzW,EAASyW,EAAMA,EAAIhX,WAGxD,GAAKgX,EAAIrY,SAAW,KAAQsY,GACH,EAAxBA,EAAQG,MAAOJ,GAGE,IAAjBA,EAAIrY,UACH0B,GAAO4J,KAAK2D,gBAAiBoJ,EAAKpB,IAAgB,CAEnDxF,EAAQpS,KAAMgZ,GACd,MAMJ,OAAO5Z,KAAKkE,UAA4B,EAAjB8O,EAAQzP,OAAaN,GAAO0N,WAAYqC,GAAYA,IAI5EgH,MAAO,SAAUvW,GAGhB,OAAMA,EAKe,iBAATA,EACJ5C,GAAQJ,KAAMwC,GAAQQ,GAAQzD,KAAM,IAIrCa,GAAQJ,KAAMT,KAGpByD,EAAKI,OAASJ,EAAM,GAAMA,GAZjBzD,KAAM,IAAOA,KAAM,GAAI4C,WAAe5C,KAAK2E,QAAQsV,UAAU1W,QAAU,GAgBlF2W,IAAK,SAAUhX,EAAUC,GACxB,OAAOnD,KAAKkE,UACXjB,GAAO0N,WACN1N,GAAOoB,MAAOrE,KAAKgE,MAAOf,GAAQC,EAAUC,OAK/CgX,QAAS,SAAUjX,GAClB,OAAOlD,KAAKka,IAAiB,MAAZhX,EAChBlD,KAAKsE,WAAatE,KAAKsE,WAAWuL,OAAQ3M,OAU7CD,GAAOsB,KAAM,CACZgO,OAAQ,SAAU9O,GACjB,IAAI8O,EAAS9O,EAAKb,WAClB,OAAO2P,GAA8B,KAApBA,EAAOhR,SAAkBgR,EAAS,MAEpD6H,QAAS,SAAU3W,GAClB,OAAO+I,EAAK/I,EAAM,eAEnB4W,aAAc,SAAU5W,EAAM2E,EAAIqQ,GACjC,OAAOjM,EAAK/I,EAAM,aAAcgV,IAEjChM,KAAM,SAAUhJ,GACf,OAAOkW,EAASlW,EAAM,gBAEvBiW,KAAM,SAAUjW,GACf,OAAOkW,EAASlW,EAAM,oBAEvB6W,QAAS,SAAU7W,GAClB,OAAO+I,EAAK/I,EAAM,gBAEnBwW,QAAS,SAAUxW,GAClB,OAAO+I,EAAK/I,EAAM,oBAEnB8W,UAAW,SAAU9W,EAAM2E,EAAIqQ,GAC9B,OAAOjM,EAAK/I,EAAM,cAAegV,IAElC+B,UAAW,SAAU/W,EAAM2E,EAAIqQ,GAC9B,OAAOjM,EAAK/I,EAAM,kBAAmBgV,IAEtCG,SAAU,SAAUnV,GACnB,OAAOmV,GAAYnV,EAAKb,YAAc,IAAK8P,WAAYjP,IAExD+V,SAAU,SAAU/V,GACnB,OAAOmV,EAAUnV,EAAKiP,aAEvB+G,SAAU,SAAUhW,GACnB,OAA6B,MAAxBA,EAAKgX,iBAKTta,EAAUsD,EAAKgX,iBAERhX,EAAKgX,iBAMRjX,GAAUC,EAAM,cACpBA,EAAOA,EAAKiX,SAAWjX,GAGjBR,GAAOoB,MAAO,GAAIZ,EAAKiJ,eAE7B,SAAUhJ,EAAMN,GAClBH,GAAOG,GAAIM,GAAS,SAAU+U,EAAOvV,GACpC,IAAI8P,EAAU/P,GAAOwB,IAAKzE,KAAMoD,EAAIqV,GAuBpC,MArB0B,UAArB/U,EAAKpD,OAAQ,KACjB4C,EAAWuV,GAGPvV,GAAgC,iBAAbA,IACvB8P,EAAU/P,GAAO4M,OAAQ3M,EAAU8P,IAGjB,EAAdhT,KAAKuD,SAGHgW,EAAkB7V,IACvBT,GAAO0N,WAAYqC,GAIfsG,EAAa7R,KAAM/D,IACvBsP,EAAQ2H,WAIH3a,KAAKkE,UAAW8O,MAGzB,IAAI4H,EAAgB,oBAsOpB,SAASC,EAAUC,GAClB,OAAOA,EAER,SAASC,EAASC,GACjB,MAAMA,EAGP,SAASC,EAAYlT,EAAOmT,EAASC,EAAQC,GAC5C,IAAIC,EAEJ,IAGMtT,GAAS1G,EAAcga,EAAStT,EAAMuT,SAC1CD,EAAO5a,KAAMsH,GAAQ+B,KAAMoR,GAAUK,KAAMJ,GAGhCpT,GAAS1G,EAAcga,EAAStT,EAAMyT,MACjDH,EAAO5a,KAAMsH,EAAOmT,EAASC,GAQ7BD,EAAQva,WAAOsF,EAAW,CAAE8B,GAAQzH,MAAO8a,IAM3C,MAAQrT,GAIToT,EAAOxa,WAAOsF,EAAW,CAAE8B,KAvO7B9E,GAAOwY,UAAY,SAAUjW,GA9B7B,IAAwBA,EACnBkW,EAiCJlW,EAA6B,iBAAZA,GAlCMA,EAmCPA,EAlCZkW,EAAS,GACbzY,GAAOsB,KAAMiB,EAAQyH,MAAO2N,IAAmB,GAAI,SAAUe,EAAGC,GAC/DF,EAAQE,IAAS,IAEXF,GA+BNzY,GAAOsC,OAAQ,GAAIC,GAEpB,IACCqW,EAGAC,EAGAC,EAGAC,EAGAC,EAAO,GAGPC,EAAQ,GAGRC,GAAe,EAGfC,EAAO,WAQN,IALAJ,EAASA,GAAUxW,EAAQ6W,KAI3BN,EAAQF,GAAS,EACTK,EAAM3Y,OAAQ4Y,GAAe,EAAI,CACxCL,EAASI,EAAM3N,QACf,QAAU4N,EAAcF,EAAK1Y,QAGmC,IAA1D0Y,EAAME,GAAcxb,MAAOmb,EAAQ,GAAKA,EAAQ,KACpDtW,EAAQ8W,cAGRH,EAAcF,EAAK1Y,OACnBuY,GAAS,GAMNtW,EAAQsW,SACbA,GAAS,GAGVD,GAAS,EAGJG,IAIHC,EADIH,EACG,GAIA,KAMV5C,EAAO,CAGNgB,IAAK,WA2BJ,OA1BK+B,IAGCH,IAAWD,IACfM,EAAcF,EAAK1Y,OAAS,EAC5B2Y,EAAMtb,KAAMkb,IAGb,SAAW5B,EAAKrH,GACf5P,GAAOsB,KAAMsO,EAAM,SAAU8I,EAAG7T,GAC1BzG,EAAYyG,GACVtC,EAAQ6S,QAAWa,EAAK7F,IAAKvL,IAClCmU,EAAKrb,KAAMkH,GAEDA,GAAOA,EAAIvE,QAA4B,WAAlBT,EAAQgF,IAGxCoS,EAAKpS,KATR,CAYKpD,WAEAoX,IAAWD,GACfO,KAGKpc,MAIRuc,OAAQ,WAYP,OAXAtZ,GAAOsB,KAAMG,UAAW,SAAUiX,EAAG7T,GACpC,IAAIkS,EACJ,OAA0D,GAAhDA,EAAQ/W,GAAOkE,QAASW,EAAKmU,EAAMjC,IAC5CiC,EAAK3W,OAAQ0U,EAAO,GAGfA,GAASmC,GACbA,MAIInc,MAKRqT,IAAK,SAAUjQ,GACd,OAAOA,GACwB,EAA9BH,GAAOkE,QAAS/D,EAAI6Y,GACN,EAAdA,EAAK1Y,QAIP+Q,MAAO,WAIN,OAHK2H,IACJA,EAAO,IAEDjc,MAMRwc,QAAS,WAGR,OAFAR,EAASE,EAAQ,GACjBD,EAAOH,EAAS,GACT9b,MAERuM,SAAU,WACT,OAAQ0P,GAMTQ,KAAM,WAKL,OAJAT,EAASE,EAAQ,GACXJ,GAAWD,IAChBI,EAAOH,EAAS,IAEV9b,MAERgc,OAAQ,WACP,QAASA,GAIVU,SAAU,SAAUvZ,EAAS0P,GAS5B,OARMmJ,IAELnJ,EAAO,CAAE1P,GADT0P,EAAOA,GAAQ,IACQvS,MAAQuS,EAAKvS,QAAUuS,GAC9CqJ,EAAMtb,KAAMiS,GACNgJ,GACLO,KAGKpc,MAIRoc,KAAM,WAEL,OADAlD,EAAKwD,SAAU1c,KAAM0E,WACd1E,MAIR+b,MAAO,WACN,QAASA,IAIZ,OAAO7C,GA4CRjW,GAAOsC,OAAQ,CAEdoX,SAAU,SAAUC,GACnB,IAAIC,EAAS,CAIX,CAAE,SAAU,WAAY5Z,GAAOwY,UAAW,UACzCxY,GAAOwY,UAAW,UAAY,GAC/B,CAAE,UAAW,OAAQxY,GAAOwY,UAAW,eACtCxY,GAAOwY,UAAW,eAAiB,EAAG,YACvC,CAAE,SAAU,OAAQxY,GAAOwY,UAAW,eACrCxY,GAAOwY,UAAW,eAAiB,EAAG,aAExCqB,EAAQ,UACRxB,EAAU,CACTwB,MAAO,WACN,OAAOA,GAERC,OAAQ,WAEP,OADAC,EAASlT,KAAMpF,WAAY6W,KAAM7W,WAC1B1E,MAERid,QAAS,SAAU7Z,GAClB,OAAOkY,EAAQE,KAAM,KAAMpY,IAI5B8Z,KAAM,WACL,IAAIC,EAAMzY,UAEV,OAAOzB,GAAO0Z,SAAU,SAAUS,GACjCna,GAAOsB,KAAMsY,EAAQ,SAAUzU,EAAIiV,GAGlC,IAAIja,EAAK/B,EAAY8b,EAAKE,EAAO,MAAWF,EAAKE,EAAO,IAKxDL,EAAUK,EAAO,IAAO,WACvB,IAAIC,EAAWla,GAAMA,EAAGzC,MAAOX,KAAM0E,WAChC4Y,GAAYjc,EAAYic,EAAShC,SACrCgC,EAAShC,UACPiC,SAAUH,EAASI,QACnB1T,KAAMsT,EAASlC,SACfK,KAAM6B,EAASjC,QAEjBiC,EAAUC,EAAO,GAAM,QACtBrd,KACAoD,EAAK,CAAEka,GAAa5Y,eAKxByY,EAAM,OACH7B,WAELE,KAAM,SAAUiC,EAAaC,EAAYC,GACxC,IAAIC,EAAW,EACf,SAAS1C,EAAS2C,EAAOb,EAAUc,EAASC,GAC3C,OAAO,WACN,IAAIC,EAAOhe,KACV6S,EAAOnO,UACPuZ,EAAa,WACZ,IAAIX,EAAU9B,EAKd,KAAKqC,EAAQD,GAAb,CAQA,IAJAN,EAAWQ,EAAQnd,MAAOqd,EAAMnL,MAIdmK,EAAS1B,UAC1B,MAAM,IAAI4C,UAAW,4BAOtB1C,EAAO8B,IAKgB,iBAAbA,GACY,mBAAbA,IACRA,EAAS9B,KAGLna,EAAYma,GAGXuC,EACJvC,EAAK/a,KACJ6c,EACApC,EAAS0C,EAAUZ,EAAUnC,EAAUkD,GACvC7C,EAAS0C,EAAUZ,EAAUjC,EAASgD,KAOvCH,IAEApC,EAAK/a,KACJ6c,EACApC,EAAS0C,EAAUZ,EAAUnC,EAAUkD,GACvC7C,EAAS0C,EAAUZ,EAAUjC,EAASgD,GACtC7C,EAAS0C,EAAUZ,EAAUnC,EAC5BmC,EAASmB,eASPL,IAAYjD,IAChBmD,OAAO/X,EACP4M,EAAO,CAAEyK,KAKRS,GAAWf,EAASoB,aAAeJ,EAAMnL,MAK7CwL,EAAUN,EACTE,EACA,WACC,IACCA,IACC,MAAQtR,GAEJ1J,GAAO0Z,SAAS2B,eACpBrb,GAAO0Z,SAAS2B,cAAe3R,EAC9B0R,EAAQ9X,OAMQqX,GAAbC,EAAQ,IAIPC,IAAY/C,IAChBiD,OAAO/X,EACP4M,EAAO,CAAElG,IAGVqQ,EAASuB,WAAYP,EAAMnL,MAS3BgL,EACJQ,KAKKpb,GAAO0Z,SAAS6B,aACpBH,EAAQ9X,MAAQtD,GAAO0Z,SAAS6B,eAMrBvb,GAAO0Z,SAAS8B,eAC3BJ,EAAQ9X,MAAQtD,GAAO0Z,SAAS8B,gBAEjC1e,GAAO2e,WAAYL,KAKtB,OAAOpb,GAAO0Z,SAAU,SAAUS,GAGjCP,EAAQ,GAAK,GAAI3C,IAChBgB,EACC,EACAkC,EACA/b,EAAYsc,GACXA,EACA9C,EACDuC,EAASe,aAKXtB,EAAQ,GAAK,GAAI3C,IAChBgB,EACC,EACAkC,EACA/b,EAAYoc,GACXA,EACA5C,IAKHgC,EAAQ,GAAK,GAAI3C,IAChBgB,EACC,EACAkC,EACA/b,EAAYqc,GACXA,EACA3C,MAGAO,WAKLA,QAAS,SAAUha,GAClB,OAAc,MAAPA,EAAc2B,GAAOsC,OAAQjE,EAAKga,GAAYA,IAGvD0B,EAAW,GAkEZ,OA/DA/Z,GAAOsB,KAAMsY,EAAQ,SAAU1a,EAAGkb,GACjC,IAAIpB,EAAOoB,EAAO,GACjBsB,EAActB,EAAO,GAKtB/B,EAAS+B,EAAO,IAAQpB,EAAK/B,IAGxByE,GACJ1C,EAAK/B,IACJ,WAIC4C,EAAQ6B,GAKT9B,EAAQ,EAAI1a,GAAK,GAAIqa,QAIrBK,EAAQ,EAAI1a,GAAK,GAAIqa,QAGrBK,EAAQ,GAAK,GAAIJ,KAGjBI,EAAQ,GAAK,GAAIJ,MAOnBR,EAAK/B,IAAKmD,EAAO,GAAIjB,MAKrBY,EAAUK,EAAO,IAAQ,WAExB,OADAL,EAAUK,EAAO,GAAM,QAAUrd,OAASgd,OAAW/W,EAAYjG,KAAM0E,WAChE1E,MAMRgd,EAAUK,EAAO,GAAM,QAAWpB,EAAKS,WAIxCpB,EAAQA,QAAS0B,GAGZJ,GACJA,EAAKnc,KAAMuc,EAAUA,GAIfA,GAIR4B,KAAM,SAAUC,GACf,IAGCC,EAAYpa,UAAUnB,OAGtBpB,EAAI2c,EAGJC,EAAkBhZ,MAAO5D,GACzB6c,EAAgB1e,GAAMG,KAAMiE,WAG5Bua,EAAUhc,GAAO0Z,WAGjBuC,EAAa,SAAU/c,GACtB,OAAO,SAAU4F,GAChBgX,EAAiB5c,GAAMnC,KACvBgf,EAAe7c,GAAyB,EAAnBuC,UAAUnB,OAAajD,GAAMG,KAAMiE,WAAcqD,IAC5D+W,GACTG,EAAQb,YAAaW,EAAiBC,KAM1C,GAAKF,GAAa,IACjB7D,EAAY4D,EAAaI,EAAQnV,KAAMoV,EAAY/c,IAAM+Y,QAAS+D,EAAQ9D,QACxE2D,GAGuB,YAApBG,EAAQnC,SACZzb,EAAY2d,EAAe7c,IAAO6c,EAAe7c,GAAIqZ,OAErD,OAAOyD,EAAQzD,OAKjB,MAAQrZ,IACP8Y,EAAY+D,EAAe7c,GAAK+c,EAAY/c,GAAK8c,EAAQ9D,QAG1D,OAAO8D,EAAQ3D,aAOjB,IAAI6D,EAAc,yDAKlBlc,GAAO0Z,SAAS2B,cAAgB,SAAU/X,EAAO6Y,GAI3Crf,GAAOsf,SAAWtf,GAAOsf,QAAQC,MAAQ/Y,GAAS4Y,EAAY1X,KAAMlB,EAAM7C,OAC9E3D,GAAOsf,QAAQC,KAAM,8BAAgC/Y,EAAMgZ,QAC1DhZ,EAAMiZ,MAAOJ,IAOhBnc,GAAOwc,eAAiB,SAAUlZ,GACjCxG,GAAO2e,WAAY,WAClB,MAAMnY,KAQR,IAAImZ,EAAYzc,GAAO0Z,WAkDvB,SAASgD,IACR/f,EAASggB,oBAAqB,mBAAoBD,GAClD5f,GAAO6f,oBAAqB,OAAQD,GACpC1c,GAAOoW,QAnDRpW,GAAOG,GAAGiW,MAAQ,SAAUjW,GAY3B,OAVAsc,EACElE,KAAMpY,GAKN6Z,SAAO,SAAU1W,GACjBtD,GAAOwc,eAAgBlZ,KAGlBvG,MAGRiD,GAAOsC,OAAQ,CAGde,SAAS,EAITuZ,UAAW,EAGXxG,MAAO,SAAUyG,KAGF,IAATA,IAAkB7c,GAAO4c,UAAY5c,GAAOqD,WAKjDrD,GAAOqD,SAAU,KAGZwZ,GAAsC,IAAnB7c,GAAO4c,WAK/BH,EAAUtB,YAAaxe,EAAU,CAAEqD,QAIrCA,GAAOoW,MAAMmC,KAAOkE,EAAUlE,KAaD,aAAxB5b,EAASmgB,YACa,YAAxBngB,EAASmgB,aAA6BngB,EAASmH,gBAAgBiZ,SAGjEjgB,GAAO2e,WAAYzb,GAAOoW,QAK1BzZ,EAAS2P,iBAAkB,mBAAoBoQ,GAG/C5f,GAAOwP,iBAAkB,OAAQoQ,IAQlC,IAAIM,EAAS,SAAU9b,EAAOf,EAAIiL,EAAKtG,EAAOmY,EAAWC,EAAUC,GAClE,IAAIje,EAAI,EACP+C,EAAMf,EAAMZ,OACZ8c,EAAc,MAAPhS,EAGR,GAAuB,WAAlBvL,EAAQuL,GAEZ,IAAMlM,KADN+d,GAAY,EACD7R,EACV4R,EAAQ9b,EAAOf,EAAIjB,EAAGkM,EAAKlM,IAAK,EAAMge,EAAUC,QAI3C,QAAena,IAAV8B,IACXmY,GAAY,EAEN7e,EAAY0G,KACjBqY,GAAM,GAGFC,IAGCD,GACJhd,EAAG3C,KAAM0D,EAAO4D,GAChB3E,EAAK,OAILid,EAAOjd,EACPA,EAAK,SAAUK,EAAM6c,EAAMvY,GAC1B,OAAOsY,EAAK5f,KAAMwC,GAAQQ,GAAQsE,MAKhC3E,GACJ,KAAQjB,EAAI+C,EAAK/C,IAChBiB,EACCe,EAAOhC,GAAKkM,EAAK+R,EAChBrY,EACAA,EAAMtH,KAAM0D,EAAOhC,GAAKA,EAAGiB,EAAIe,EAAOhC,GAAKkM,KAMhD,OAAK6R,EACG/b,EAIHkc,EACGjd,EAAG3C,KAAM0D,GAGVe,EAAM9B,EAAIe,EAAO,GAAKkK,GAAQ8R,GAKlCI,EAAY,QACfC,EAAa,YAGd,SAASC,EAAYC,EAAMC,GAC1B,OAAOA,EAAOC,cAMf,SAASC,EAAWC,GACnB,OAAOA,EAAOza,QAASka,EAAW,OAAQla,QAASma,EAAYC,GAEhE,IAAIM,EAAa,SAAUC,GAQ1B,OAA0B,IAAnBA,EAAMzf,UAAqC,IAAnByf,EAAMzf,YAAsByf,EAAMzf,UAMlE,SAAS0f,IACRjhB,KAAKkG,QAAUjD,GAAOiD,QAAU+a,EAAKC,MAGtCD,EAAKC,IAAM,EAEXD,EAAKrd,UAAY,CAEhBwK,MAAO,SAAU4S,GAGhB,IAAIjZ,EAAQiZ,EAAOhhB,KAAKkG,SA4BxB,OAzBM6B,IACLA,EAAQ,GAKHgZ,EAAYC,KAIXA,EAAMzf,SACVyf,EAAOhhB,KAAKkG,SAAY6B,EAMxB3H,OAAO+gB,eAAgBH,EAAOhhB,KAAKkG,QAAS,CAC3C6B,MAAOA,EACPqZ,cAAc,MAMXrZ,GAERsZ,IAAK,SAAUL,EAAOM,EAAMvZ,GAC3B,IAAIwZ,EACHnT,EAAQpO,KAAKoO,MAAO4S,GAIrB,GAAqB,iBAATM,EACXlT,EAAOyS,EAAWS,IAAWvZ,OAM7B,IAAMwZ,KAAQD,EACblT,EAAOyS,EAAWU,IAAWD,EAAMC,GAGrC,OAAOnT,GAERpK,IAAK,SAAUgd,EAAO3S,GACrB,YAAepI,IAARoI,EACNrO,KAAKoO,MAAO4S,GAGZA,EAAOhhB,KAAKkG,UAAa8a,EAAOhhB,KAAKkG,SAAW2a,EAAWxS,KAE7D4R,OAAQ,SAAUe,EAAO3S,EAAKtG,GAa7B,YAAa9B,IAARoI,GACCA,GAAsB,iBAARA,QAAgCpI,IAAV8B,EAElC/H,KAAKgE,IAAKgd,EAAO3S,IASzBrO,KAAKqhB,IAAKL,EAAO3S,EAAKtG,QAIL9B,IAAV8B,EAAsBA,EAAQsG,IAEtCkO,OAAQ,SAAUyE,EAAO3S,GACxB,IAAIlM,EACHiM,EAAQ4S,EAAOhhB,KAAKkG,SAErB,QAAeD,IAAVmI,EAAL,CAIA,QAAanI,IAARoI,EAAoB,CAkBxBlM,GAXCkM,EAJItI,MAAMC,QAASqI,GAIbA,EAAI5J,IAAKoc,IAEfxS,EAAMwS,EAAWxS,MAIJD,EACZ,CAAEC,GACAA,EAAIpB,MAAO2N,IAAmB,IAG1BrX,OAER,MAAQpB,WACAiM,EAAOC,EAAKlM,UAKR8D,IAARoI,GAAqBpL,GAAO2D,cAAewH,MAM1C4S,EAAMzf,SACVyf,EAAOhhB,KAAKkG,cAAYD,SAEjB+a,EAAOhhB,KAAKkG,YAItBsb,QAAS,SAAUR,GAClB,IAAI5S,EAAQ4S,EAAOhhB,KAAKkG,SACxB,YAAiBD,IAAVmI,IAAwBnL,GAAO2D,cAAewH,KAGvD,IAAIqT,EAAW,IAAIR,EAEfS,EAAW,IAAIT,EAcfU,EAAS,gCACZC,EAAa,SA2Bd,SAASC,EAAUpe,EAAM4K,EAAKiT,GAC7B,IAAI5d,EA1Ba4d,EA8BjB,QAAcrb,IAATqb,GAAwC,IAAlB7d,EAAKlC,SAI/B,GAHAmC,EAAO,QAAU2K,EAAIhI,QAASub,EAAY,OAAQje,cAG7B,iBAFrB2d,EAAO7d,EAAKjB,aAAckB,IAEM,CAC/B,IACC4d,EAnCW,UADGA,EAoCEA,IA/BL,UAATA,IAIS,SAATA,EACG,KAIHA,KAAUA,EAAO,IACbA,EAGJK,EAAOla,KAAM6Z,GACVQ,KAAKC,MAAOT,GAGbA,GAeH,MAAQ3U,IAGV+U,EAASL,IAAK5d,EAAM4K,EAAKiT,QAEzBA,OAAOrb,EAGT,OAAOqb,EAGRre,GAAOsC,OAAQ,CACdic,QAAS,SAAU/d,GAClB,OAAOie,EAASF,QAAS/d,IAAUge,EAASD,QAAS/d,IAGtD6d,KAAM,SAAU7d,EAAMC,EAAM4d,GAC3B,OAAOI,EAASzB,OAAQxc,EAAMC,EAAM4d,IAGrCU,WAAY,SAAUve,EAAMC,GAC3Bge,EAASnF,OAAQ9Y,EAAMC,IAKxBue,MAAO,SAAUxe,EAAMC,EAAM4d,GAC5B,OAAOG,EAASxB,OAAQxc,EAAMC,EAAM4d,IAGrCY,YAAa,SAAUze,EAAMC,GAC5B+d,EAASlF,OAAQ9Y,EAAMC,MAIzBT,GAAOG,GAAGmC,OAAQ,CACjB+b,KAAM,SAAUjT,EAAKtG,GACpB,IAAI5F,EAAGuB,EAAM4d,EACZ7d,EAAOzD,KAAM,GACbmiB,EAAQ1e,GAAQA,EAAK8G,WAGtB,QAAatE,IAARoI,EAAoB,CACxB,GAAKrO,KAAKuD,SACT+d,EAAOI,EAAS1d,IAAKP,GAEE,IAAlBA,EAAKlC,WAAmBkgB,EAASzd,IAAKP,EAAM,iBAAmB,CACnEtB,EAAIggB,EAAM5e,OACV,MAAQpB,IAIFggB,EAAOhgB,IAEsB,KADjCuB,EAAOye,EAAOhgB,GAAIuB,MACR7C,QAAS,WAClB6C,EAAOmd,EAAWnd,EAAKpD,MAAO,IAC9BuhB,EAAUpe,EAAMC,EAAM4d,EAAM5d,KAI/B+d,EAASJ,IAAK5d,EAAM,gBAAgB,GAItC,OAAO6d,EAIR,MAAoB,iBAARjT,EACJrO,KAAKuE,KAAM,WACjBmd,EAASL,IAAKrhB,KAAMqO,KAIf4R,EAAQjgB,KAAM,SAAU+H,GAC9B,IAAIuZ,EAOJ,GAAK7d,QAAkBwC,IAAV8B,EAKZ,YAAc9B,KADdqb,EAAOI,EAAS1d,IAAKP,EAAM4K,IAEnBiT,OAMMrb,KADdqb,EAAOO,EAAUpe,EAAM4K,IAEfiT,OAIR,EAIDthB,KAAKuE,KAAM,WAGVmd,EAASL,IAAKrhB,KAAMqO,EAAKtG,MAExB,KAAMA,EAA0B,EAAnBrD,UAAUnB,OAAY,MAAM,IAG7Cye,WAAY,SAAU3T,GACrB,OAAOrO,KAAKuE,KAAM,WACjBmd,EAASnF,OAAQvc,KAAMqO,QAM1BpL,GAAOsC,OAAQ,CACd2W,MAAO,SAAUzY,EAAM9B,EAAM2f,GAC5B,IAAIpF,EAEJ,GAAKzY,EAYJ,OAXA9B,GAASA,GAAQ,MAAS,QAC1Bua,EAAQuF,EAASzd,IAAKP,EAAM9B,GAGvB2f,KACEpF,GAASnW,MAAMC,QAASsb,GAC7BpF,EAAQuF,EAASxB,OAAQxc,EAAM9B,EAAMsB,GAAOgE,UAAWqa,IAEvDpF,EAAMtb,KAAM0gB,IAGPpF,GAAS,IAIlBkG,QAAS,SAAU3e,EAAM9B,GACxBA,EAAOA,GAAQ,KAEf,IAAIua,EAAQjZ,GAAOiZ,MAAOzY,EAAM9B,GAC/B0gB,EAAcnG,EAAM3Y,OACpBH,EAAK8Y,EAAM3N,QACX+T,EAAQrf,GAAOsf,YAAa9e,EAAM9B,GAMvB,eAAPyB,IACJA,EAAK8Y,EAAM3N,QACX8T,KAGIjf,IAIU,OAATzB,GACJua,EAAMsG,QAAS,qBAITF,EAAMG,KACbrf,EAAG3C,KAAMgD,EApBF,WACNR,GAAOmf,QAAS3e,EAAM9B,IAmBF2gB,KAGhBD,GAAeC,GACpBA,EAAMhO,MAAM8H,QAKdmG,YAAa,SAAU9e,EAAM9B,GAC5B,IAAI0M,EAAM1M,EAAO,aACjB,OAAO8f,EAASzd,IAAKP,EAAM4K,IAASoT,EAASxB,OAAQxc,EAAM4K,EAAK,CAC/DiG,MAAOrR,GAAOwY,UAAW,eAAgBvB,IAAK,WAC7CuH,EAASlF,OAAQ9Y,EAAM,CAAE9B,EAAO,QAAS0M,WAM7CpL,GAAOG,GAAGmC,OAAQ,CACjB2W,MAAO,SAAUva,EAAM2f,GACtB,IAAIoB,EAAS,EAQb,MANqB,iBAAT/gB,IACX2f,EAAO3f,EACPA,EAAO,KACP+gB,KAGIhe,UAAUnB,OAASmf,EAChBzf,GAAOiZ,MAAOlc,KAAM,GAAK2B,QAGjBsE,IAATqb,EACNthB,KACAA,KAAKuE,KAAM,WACV,IAAI2X,EAAQjZ,GAAOiZ,MAAOlc,KAAM2B,EAAM2f,GAGtCre,GAAOsf,YAAaviB,KAAM2B,GAEZ,OAATA,GAAgC,eAAfua,EAAO,IAC5BjZ,GAAOmf,QAASpiB,KAAM2B,MAI1BygB,QAAS,SAAUzgB,GAClB,OAAO3B,KAAKuE,KAAM,WACjBtB,GAAOmf,QAASpiB,KAAM2B,MAGxBghB,WAAY,SAAUhhB,GACrB,OAAO3B,KAAKkc,MAAOva,GAAQ,KAAM,KAKlC2Z,QAAS,SAAU3Z,EAAML,GACxB,IAAIshB,EACHC,EAAQ,EACRC,EAAQ7f,GAAO0Z,WACfpM,EAAWvQ,KACXmC,EAAInC,KAAKuD,OACT2X,EAAU,aACC2H,GACTC,EAAM1E,YAAa7N,EAAU,CAAEA,KAIb,iBAAT5O,IACXL,EAAMK,EACNA,OAAOsE,GAERtE,EAAOA,GAAQ,KAEf,MAAQQ,KACPygB,EAAMnB,EAASzd,IAAKuM,EAAUpO,GAAKR,EAAO,gBAC9BihB,EAAItO,QACfuO,IACAD,EAAItO,MAAM4F,IAAKgB,IAIjB,OADAA,IACO4H,EAAMxH,QAASha,MAGxB,IAAIyhB,EAAO,sCAA0CC,OAEjDC,EAAU,IAAIza,OAAQ,iBAAmBua,EAAO,cAAe,KAG/DG,EAAY,CAAE,MAAO,QAAS,SAAU,QAExCnc,EAAkBnH,EAASmH,gBAI1Boc,EAAa,SAAU1f,GACzB,OAAOR,GAAOwF,SAAUhF,EAAK+D,cAAe/D,IAE7C2f,EAAW,CAAEA,UAAU,GAOnBrc,EAAgBsc,cACpBF,EAAa,SAAU1f,GACtB,OAAOR,GAAOwF,SAAUhF,EAAK+D,cAAe/D,IAC3CA,EAAK4f,YAAaD,KAAe3f,EAAK+D,gBAG1C,IAAI8b,GAAqB,SAAU7f,EAAMiL,GAOvC,MAA8B,UAH9BjL,EAAOiL,GAAMjL,GAGD8f,MAAMC,SACM,KAAvB/f,EAAK8f,MAAMC,SAMXL,EAAY1f,IAEsB,SAAlCR,GAAOwgB,IAAKhgB,EAAM,YAKrB,SAASigB,GAAWjgB,EAAM8d,EAAMoC,EAAYC,GAC3C,IAAIC,EAAUC,EACbC,EAAgB,GAChBC,EAAeJ,EACd,WACC,OAAOA,EAAMhK,OAEd,WACC,OAAO3W,GAAOwgB,IAAKhgB,EAAM8d,EAAM,KAEjC0C,EAAUD,IACVE,EAAOP,GAAcA,EAAY,KAAS1gB,GAAOkhB,UAAW5C,GAAS,GAAK,MAG1E6C,EAAgB3gB,EAAKlC,WAClB0B,GAAOkhB,UAAW5C,IAAmB,OAAT2C,IAAkBD,IAChDhB,EAAQ5V,KAAMpK,GAAOwgB,IAAKhgB,EAAM8d,IAElC,GAAK6C,GAAiBA,EAAe,KAAQF,EAAO,CAInDD,GAAoB,EAGpBC,EAAOA,GAAQE,EAAe,GAG9BA,GAAiBH,GAAW,EAE5B,MAAQF,IAIP9gB,GAAOsgB,MAAO9f,EAAM8d,EAAM6C,EAAgBF,IACnC,EAAIJ,IAAY,GAAMA,EAAQE,IAAiBC,GAAW,MAAW,IAC3EF,EAAgB,GAEjBK,GAAgCN,EAIjCM,GAAgC,EAChCnhB,GAAOsgB,MAAO9f,EAAM8d,EAAM6C,EAAgBF,GAG1CP,EAAaA,GAAc,GAgB5B,OAbKA,IACJS,GAAiBA,IAAkBH,GAAW,EAG9CJ,EAAWF,EAAY,GACtBS,GAAkBT,EAAY,GAAM,GAAMA,EAAY,IACrDA,EAAY,GACTC,IACJA,EAAMM,KAAOA,EACbN,EAAMtR,MAAQ8R,EACdR,EAAMxe,IAAMye,IAGPA,EAIR,IAAIQ,GAAoB,GAyBxB,SAASC,GAAU/T,EAAUgU,GAO5B,IANA,IAAIf,EAAS/f,EAxBcA,EACvBiT,EACHxU,EACAsB,EACAggB,EAqBAgB,EAAS,GACTxK,EAAQ,EACRzW,EAASgN,EAAShN,OAGXyW,EAAQzW,EAAQyW,KACvBvW,EAAO8M,EAAUyJ,IACNuJ,QAIXC,EAAU/f,EAAK8f,MAAMC,QAChBe,GAKa,SAAZf,IACJgB,EAAQxK,GAAUyH,EAASzd,IAAKP,EAAM,YAAe,KAC/C+gB,EAAQxK,KACbvW,EAAK8f,MAAMC,QAAU,KAGK,KAAvB/f,EAAK8f,MAAMC,SAAkBF,GAAoB7f,KACrD+gB,EAAQxK,IA7CVwJ,EAFAthB,EADGwU,OAAAA,EACHxU,GAF0BuB,EAiDaA,GA/C5B+D,cACXhE,EAAWC,EAAKD,UAChBggB,EAAUa,GAAmB7gB,MAM9BkT,EAAOxU,EAAIuiB,KAAK9hB,YAAaT,EAAII,cAAekB,IAChDggB,EAAUvgB,GAAOwgB,IAAK/M,EAAM,WAE5BA,EAAK9T,WAAWC,YAAa6T,GAEZ,SAAZ8M,IACJA,EAAU,SAEXa,GAAmB7gB,GAAaggB,MAkCb,SAAZA,IACJgB,EAAQxK,GAAU,OAGlByH,EAASJ,IAAK5d,EAAM,UAAW+f,KAMlC,IAAMxJ,EAAQ,EAAGA,EAAQzW,EAAQyW,IACR,MAAnBwK,EAAQxK,KACZzJ,EAAUyJ,GAAQuJ,MAAMC,QAAUgB,EAAQxK,IAI5C,OAAOzJ,EAGRtN,GAAOG,GAAGmC,OAAQ,CACjBgf,KAAM,WACL,OAAOD,GAAUtkB,MAAM,IAExB0kB,KAAM,WACL,OAAOJ,GAAUtkB,OAElB2kB,OAAQ,SAAU7H,GACjB,MAAsB,kBAAVA,EACJA,EAAQ9c,KAAKukB,OAASvkB,KAAK0kB,OAG5B1kB,KAAKuE,KAAM,WACZ+e,GAAoBtjB,MACxBiD,GAAQjD,MAAOukB,OAEfthB,GAAQjD,MAAO0kB,YAKnB,IAUEE,GACA1U,GAXE2U,GAAiB,wBAEjBC,GAAW,iCAEXC,GAAc,qCAMhBH,GADchlB,EAASolB,yBACRriB,YAAa/C,EAAS0C,cAAe,SACpD4N,GAAQtQ,EAAS0C,cAAe,UAM3BG,aAAc,OAAQ,SAC5ByN,GAAMzN,aAAc,UAAW,WAC/ByN,GAAMzN,aAAc,OAAQ,KAE5BmiB,GAAIjiB,YAAauN,IAIjB9O,GAAQ6jB,WAAaL,GAAIM,WAAW,GAAOA,WAAW,GAAOvS,UAAUwB,QAIvEyQ,GAAIzU,UAAY,yBAChB/O,GAAQ+jB,iBAAmBP,GAAIM,WAAW,GAAOvS,UAAUyS,aAK3DR,GAAIzU,UAAY,oBAChB/O,GAAQikB,SAAWT,GAAIjS,UAKxB,IAAI2S,GAAU,CAKbC,MAAO,CAAE,EAAG,UAAW,YACvBC,IAAK,CAAE,EAAG,oBAAqB,uBAC/BC,GAAI,CAAE,EAAG,iBAAkB,oBAC3BC,GAAI,CAAE,EAAG,qBAAsB,yBAE/BC,SAAU,CAAE,EAAG,GAAI,KAYpB,SAASC,GAAQziB,EAAS6M,GAIzB,IAAI5L,EAYJ,OATCA,EAD4C,oBAAjCjB,EAAQqK,qBACbrK,EAAQqK,qBAAsBwC,GAAO,KAEI,oBAA7B7M,EAAQ4K,iBACpB5K,EAAQ4K,iBAAkBiC,GAAO,KAGjC,QAGM/J,IAAR+J,GAAqBA,GAAOxM,GAAUL,EAAS6M,GAC5C/M,GAAOoB,MAAO,CAAElB,GAAWiB,GAG5BA,EAKR,SAASyhB,GAAe1hB,EAAO2hB,GAI9B,IAHA,IAAI3jB,EAAI,EACP2X,EAAI3V,EAAMZ,OAEHpB,EAAI2X,EAAG3X,IACdsf,EAASJ,IACRld,EAAOhC,GACP,cACC2jB,GAAerE,EAASzd,IAAK8hB,EAAa3jB,GAAK,eA1CnDmjB,GAAQS,MAAQT,GAAQU,MAAQV,GAAQW,SAAWX,GAAQY,QAAUZ,GAAQC,MAC7ED,GAAQa,GAAKb,GAAQI,GAGftkB,GAAQikB,SACbC,GAAQc,SAAWd,GAAQD,OAAS,CAAE,EAAG,+BAAgC,cA2C1E,IAAIgB,GAAQ,YAEZ,SAASC,GAAeniB,EAAOhB,EAASojB,EAASC,EAAWC,GAO3D,IANA,IAAIhjB,EAAMmf,EAAK5S,EAAK0W,EAAMC,EAAUxhB,EACnCyhB,EAAWzjB,EAAQ6hB,yBACnB6B,EAAQ,GACR1kB,EAAI,EACJ2X,EAAI3V,EAAMZ,OAEHpB,EAAI2X,EAAG3X,IAGd,IAFAsB,EAAOU,EAAOhC,KAEQ,IAATsB,EAGZ,GAAwB,WAAnBX,EAAQW,GAIZR,GAAOoB,MAAOwiB,EAAOpjB,EAAKlC,SAAW,CAAEkC,GAASA,QAG1C,GAAM4iB,GAAM5e,KAAMhE,GAIlB,CACNmf,EAAMA,GAAOgE,EAASjkB,YAAaQ,EAAQb,cAAe,QAG1D0N,GAAQ8U,GAASzX,KAAM5J,IAAU,CAAE,GAAI,KAAQ,GAAIE,cACnD+iB,EAAOpB,GAAStV,IAASsV,GAAQK,SACjC/C,EAAIzS,UAAYuW,EAAM,GAAMzjB,GAAO6jB,cAAerjB,GAASijB,EAAM,GAGjEvhB,EAAIuhB,EAAM,GACV,MAAQvhB,IACPyd,EAAMA,EAAIjQ,UAKX1P,GAAOoB,MAAOwiB,EAAOjE,EAAIlW,aAGzBkW,EAAMgE,EAASlU,YAGX5L,YAAc,QAzBlB+f,EAAMjmB,KAAMuC,EAAQ4jB,eAAgBtjB,IA+BvCmjB,EAAS9f,YAAc,GAEvB3E,EAAI,EACJ,MAAUsB,EAAOojB,EAAO1kB,KAGvB,GAAKqkB,IAAkD,EAArCvjB,GAAOkE,QAAS1D,EAAM+iB,GAClCC,GACJA,EAAQ7lB,KAAM6C,QAgBhB,GAXAkjB,EAAWxD,EAAY1f,GAGvBmf,EAAMgD,GAAQgB,EAASjkB,YAAac,GAAQ,UAGvCkjB,GACJd,GAAejD,GAIX2D,EAAU,CACdphB,EAAI,EACJ,MAAU1B,EAAOmf,EAAKzd,KAChB4f,GAAYtd,KAAMhE,EAAK9B,MAAQ,KACnC4kB,EAAQ3lB,KAAM6C,GAMlB,OAAOmjB,EAIR,IAAII,GAAiB,sBAErB,SAASC,KACR,OAAO,EAGR,SAASC,KACR,OAAO,EAGR,SAASC,GAAI1jB,EAAM2jB,EAAOlkB,EAAUoe,EAAMle,EAAIikB,GAC7C,IAAIC,EAAQ3lB,EAGZ,GAAsB,iBAAVylB,EAAqB,CAShC,IAAMzlB,IANmB,iBAAbuB,IAGXoe,EAAOA,GAAQpe,EACfA,OAAW+C,GAEEmhB,EACbD,GAAI1jB,EAAM9B,EAAMuB,EAAUoe,EAAM8F,EAAOzlB,GAAQ0lB,GAEhD,OAAO5jB,EAsBR,GAnBa,MAAR6d,GAAsB,MAANle,GAGpBA,EAAKF,EACLoe,EAAOpe,OAAW+C,GACD,MAAN7C,IACc,iBAAbF,GAGXE,EAAKke,EACLA,OAAOrb,IAIP7C,EAAKke,EACLA,EAAOpe,EACPA,OAAW+C,KAGD,IAAP7C,EACJA,EAAK8jB,QACC,IAAM9jB,EACZ,OAAOK,EAeR,OAZa,IAAR4jB,IACJC,EAASlkB,GACTA,EAAK,SAAUmkB,GAId,OADAtkB,KAASukB,IAAKD,GACPD,EAAO3mB,MAAOX,KAAM0E,aAIzBsD,KAAOsf,EAAOtf,OAAUsf,EAAOtf,KAAO/E,GAAO+E,SAE1CvE,EAAKc,KAAM,WACjBtB,GAAOskB,MAAMrN,IAAKla,KAAMonB,EAAOhkB,EAAIke,EAAMpe,KA+a3C,SAASukB,GAAgB/Y,EAAI/M,EAAM+lB,GAG5BA,GAQNjG,EAASJ,IAAK3S,EAAI/M,GAAM,GACxBsB,GAAOskB,MAAMrN,IAAKxL,EAAI/M,EAAM,CAC3B0F,WAAW,EACXyW,QAAS,SAAUyJ,GAClB,IAAI3V,EACH+V,EAAQlG,EAASzd,IAAKhE,KAAM2B,GAE7B,GAAyB,EAAlB4lB,EAAMK,WAAmB5nB,KAAM2B,IAGrC,GAAMgmB,GA4BQ1kB,GAAOskB,MAAMxJ,QAASpc,IAAU,IAAKkmB,cAClDN,EAAMO,uBAhBN,GARAH,EAAQrnB,GAAMG,KAAMiE,WACpB+c,EAASJ,IAAKrhB,KAAM2B,EAAMgmB,GAG1B3nB,KAAM2B,KACNiQ,EAAS6P,EAASzd,IAAKhE,KAAM2B,GAC7B8f,EAASJ,IAAKrhB,KAAM2B,GAAM,GAErBgmB,IAAU/V,EAMd,OAHA2V,EAAMQ,2BACNR,EAAMS,iBAECpW,OAeE+V,IAGXlG,EAASJ,IAAKrhB,KAAM2B,EAAMsB,GAAOskB,MAAMU,QACtCN,EAAO,GACPA,EAAMrnB,MAAO,GACbN,OAWDunB,EAAMO,kBACNP,EAAMW,8BAAgCjB,aArENhhB,IAA7Bwb,EAASzd,IAAK0K,EAAI/M,IACtBsB,GAAOskB,MAAMrN,IAAKxL,EAAI/M,EAAMslB,IA5a/BhkB,GAAOskB,MAAQ,CAEd/nB,OAAQ,GAER0a,IAAK,SAAUzW,EAAM2jB,EAAOtJ,EAASwD,EAAMpe,GAE1C,IAAIilB,EAAaC,EAAaxF,EAC7ByF,EAAQC,EAAGC,EACXxK,EAASyK,EAAU7mB,EAAM8mB,EAAYC,EACrCC,EAAWlH,EAASzd,IAAKP,GAG1B,GAAMsd,EAAYtd,GAAlB,CAKKqa,EAAQA,UAEZA,GADAqK,EAAcrK,GACQA,QACtB5a,EAAWilB,EAAYjlB,UAKnBA,GACJD,GAAO4J,KAAK2D,gBAAiBzJ,EAAiB7D,GAIzC4a,EAAQ9V,OACb8V,EAAQ9V,KAAO/E,GAAO+E,SAIfqgB,EAASM,EAASN,UACzBA,EAASM,EAASN,OAASjoB,OAAOwoB,OAAQ,QAEnCR,EAAcO,EAASE,UAC9BT,EAAcO,EAASE,OAAS,SAAUlc,GAIzC,MAAyB,oBAAX1J,IAA0BA,GAAOskB,MAAMuB,YAAcnc,EAAEhL,KACpEsB,GAAOskB,MAAMwB,SAASpoB,MAAO8C,EAAMiB,gBAAcuB,IAMpDqiB,GADAlB,GAAUA,GAAS,IAAKna,MAAO2N,IAAmB,CAAE,KAC1CrX,OACV,MAAQ+kB,IAEP3mB,EAAO+mB,GADP9F,EAAMoE,GAAe3Z,KAAM+Z,EAAOkB,KAAS,IACpB,GACvBG,GAAe7F,EAAK,IAAO,IAAKza,MAAO,KAAM9C,OAGvC1D,IAKNoc,EAAU9a,GAAOskB,MAAMxJ,QAASpc,IAAU,GAG1CA,GAASuB,EAAW6a,EAAQ8J,aAAe9J,EAAQiL,WAAcrnB,EAGjEoc,EAAU9a,GAAOskB,MAAMxJ,QAASpc,IAAU,GAG1C4mB,EAAYtlB,GAAOsC,OAAQ,CAC1B5D,KAAMA,EACN+mB,SAAUA,EACVpH,KAAMA,EACNxD,QAASA,EACT9V,KAAM8V,EAAQ9V,KACd9E,SAAUA,EACVqI,aAAcrI,GAAYD,GAAOqN,KAAKrD,MAAM1B,aAAa9D,KAAMvE,GAC/DmE,UAAWohB,EAAW3a,KAAM,MAC1Bqa,IAGKK,EAAWH,EAAQ1mB,OAC1B6mB,EAAWH,EAAQ1mB,GAAS,IACnBsnB,cAAgB,EAGnBlL,EAAQmL,QACiD,IAA9DnL,EAAQmL,MAAMzoB,KAAMgD,EAAM6d,EAAMmH,EAAYL,IAEvC3kB,EAAK8L,kBACT9L,EAAK8L,iBAAkB5N,EAAMymB,IAK3BrK,EAAQ7D,MACZ6D,EAAQ7D,IAAIzZ,KAAMgD,EAAM8kB,GAElBA,EAAUzK,QAAQ9V,OACvBugB,EAAUzK,QAAQ9V,KAAO8V,EAAQ9V,OAK9B9E,EACJslB,EAASljB,OAAQkjB,EAASS,gBAAiB,EAAGV,GAE9CC,EAAS5nB,KAAM2nB,GAIhBtlB,GAAOskB,MAAM/nB,OAAQmC,IAAS,KAMhC4a,OAAQ,SAAU9Y,EAAM2jB,EAAOtJ,EAAS5a,EAAUimB,GAEjD,IAAIhkB,EAAGikB,EAAWxG,EACjByF,EAAQC,EAAGC,EACXxK,EAASyK,EAAU7mB,EAAM8mB,EAAYC,EACrCC,EAAWlH,EAASD,QAAS/d,IAAUge,EAASzd,IAAKP,GAEtD,GAAMklB,IAAeN,EAASM,EAASN,QAAvC,CAMAC,GADAlB,GAAUA,GAAS,IAAKna,MAAO2N,IAAmB,CAAE,KAC1CrX,OACV,MAAQ+kB,IAMP,GAJA3mB,EAAO+mB,GADP9F,EAAMoE,GAAe3Z,KAAM+Z,EAAOkB,KAAS,IACpB,GACvBG,GAAe7F,EAAK,IAAO,IAAKza,MAAO,KAAM9C,OAGvC1D,EAAN,CAOAoc,EAAU9a,GAAOskB,MAAMxJ,QAASpc,IAAU,GAE1C6mB,EAAWH,EADX1mB,GAASuB,EAAW6a,EAAQ8J,aAAe9J,EAAQiL,WAAcrnB,IACpC,GAC7BihB,EAAMA,EAAK,IACV,IAAIpa,OAAQ,UAAYigB,EAAW3a,KAAM,iBAAoB,WAG9Dsb,EAAYjkB,EAAIqjB,EAASjlB,OACzB,MAAQ4B,IACPojB,EAAYC,EAAUrjB,IAEfgkB,GAAeT,IAAaH,EAAUG,UACzC5K,GAAWA,EAAQ9V,OAASugB,EAAUvgB,MACtC4a,IAAOA,EAAInb,KAAM8gB,EAAUlhB,YAC3BnE,GAAYA,IAAaqlB,EAAUrlB,WACxB,OAAbA,IAAqBqlB,EAAUrlB,YAChCslB,EAASljB,OAAQH,EAAG,GAEfojB,EAAUrlB,UACdslB,EAASS,gBAELlL,EAAQxB,QACZwB,EAAQxB,OAAO9b,KAAMgD,EAAM8kB,IAOzBa,IAAcZ,EAASjlB,SACrBwa,EAAQsL,WACkD,IAA/DtL,EAAQsL,SAAS5oB,KAAMgD,EAAMglB,EAAYE,EAASE,SAElD5lB,GAAOqmB,YAAa7lB,EAAM9B,EAAMgnB,EAASE,eAGnCR,EAAQ1mB,SA1Cf,IAAMA,KAAQ0mB,EACbplB,GAAOskB,MAAMhL,OAAQ9Y,EAAM9B,EAAOylB,EAAOkB,GAAKxK,EAAS5a,GAAU,GA8C/DD,GAAO2D,cAAeyhB,IAC1B5G,EAASlF,OAAQ9Y,EAAM,mBAIzBslB,SAAU,SAAUQ,GAEnB,IAAIpnB,EAAGgD,EAAGf,EAAK4O,EAASuV,EAAWiB,EAClC3W,EAAO,IAAI9M,MAAOrB,UAAUnB,QAG5BgkB,EAAQtkB,GAAOskB,MAAMkC,IAAKF,GAE1Bf,GACC/G,EAASzd,IAAKhE,KAAM,WAAcI,OAAOwoB,OAAQ,OAC/CrB,EAAM5lB,OAAU,GACnBoc,EAAU9a,GAAOskB,MAAMxJ,QAASwJ,EAAM5lB,OAAU,GAKjD,IAFAkR,EAAM,GAAM0U,EAENplB,EAAI,EAAGA,EAAIuC,UAAUnB,OAAQpB,IAClC0Q,EAAM1Q,GAAMuC,UAAWvC,GAMxB,GAHAolB,EAAMmC,eAAiB1pB,MAGlB+d,EAAQ4L,cAA2D,IAA5C5L,EAAQ4L,YAAYlpB,KAAMT,KAAMunB,GAA5D,CAKAiC,EAAevmB,GAAOskB,MAAMiB,SAAS/nB,KAAMT,KAAMunB,EAAOiB,GAGxDrmB,EAAI,EACJ,OAAU6Q,EAAUwW,EAAcrnB,QAAYolB,EAAMqC,uBAAyB,CAC5ErC,EAAMsC,cAAgB7W,EAAQvP,KAE9B0B,EAAI,EACJ,OAAUojB,EAAYvV,EAAQwV,SAAUrjB,QACtCoiB,EAAMW,gCAIDX,EAAMuC,aAAsC,IAAxBvB,EAAUlhB,YACnCkgB,EAAMuC,WAAWriB,KAAM8gB,EAAUlhB,aAEjCkgB,EAAMgB,UAAYA,EAClBhB,EAAMjG,KAAOiH,EAAUjH,UAKVrb,KAHb7B,IAAUnB,GAAOskB,MAAMxJ,QAASwK,EAAUG,WAAc,IAAKG,QAC5DN,EAAUzK,SAAUnd,MAAOqS,EAAQvP,KAAMoP,MAGT,KAAzB0U,EAAM3V,OAASxN,KACrBmjB,EAAMS,iBACNT,EAAMO,oBAYX,OAJK/J,EAAQgM,cACZhM,EAAQgM,aAAatpB,KAAMT,KAAMunB,GAG3BA,EAAM3V,SAGd4W,SAAU,SAAUjB,EAAOiB,GAC1B,IAAIrmB,EAAGomB,EAAWnf,EAAK4gB,EAAiBC,EACvCT,EAAe,GACfP,EAAgBT,EAASS,cACzBrP,EAAM2N,EAAM3hB,OAGb,GAAKqjB,GAIJrP,EAAIrY,YAOc,UAAfgmB,EAAM5lB,MAAoC,GAAhB4lB,EAAM9S,QAEnC,KAAQmF,IAAQ5Z,KAAM4Z,EAAMA,EAAIhX,YAAc5C,KAI7C,GAAsB,IAAjB4Z,EAAIrY,WAAoC,UAAfgmB,EAAM5lB,OAAqC,IAAjBiY,EAAIrN,UAAsB,CAGjF,IAFAyd,EAAkB,GAClBC,EAAmB,GACb9nB,EAAI,EAAGA,EAAI8mB,EAAe9mB,SAME8D,IAA5BgkB,EAFL7gB,GAHAmf,EAAYC,EAAUrmB,IAGNe,SAAW,OAG1B+mB,EAAkB7gB,GAAQmf,EAAUhd,cACC,EAApCtI,GAAQmG,EAAKpJ,MAAOga,MAAOJ,GAC3B3W,GAAO4J,KAAMzD,EAAKpJ,KAAM,KAAM,CAAE4Z,IAAQrW,QAErC0mB,EAAkB7gB,IACtB4gB,EAAgBppB,KAAM2nB,GAGnByB,EAAgBzmB,QACpBimB,EAAa5oB,KAAM,CAAE6C,KAAMmW,EAAK4O,SAAUwB,IAY9C,OALApQ,EAAM5Z,KACDipB,EAAgBT,EAASjlB,QAC7BimB,EAAa5oB,KAAM,CAAE6C,KAAMmW,EAAK4O,SAAUA,EAASloB,MAAO2oB,KAGpDO,GAGRU,QAAS,SAAUxmB,EAAMymB,GACxB/pB,OAAO+gB,eAAgBle,GAAOmnB,MAAMxmB,UAAWF,EAAM,CACpD2mB,YAAY,EACZjJ,cAAc,EAEdpd,IAAK3C,EAAY8oB,GAChB,WACC,GAAKnqB,KAAKsqB,cACT,OAAOH,EAAMnqB,KAAKsqB,gBAGpB,WACC,GAAKtqB,KAAKsqB,cACT,OAAOtqB,KAAKsqB,cAAe5mB,IAI9B2d,IAAK,SAAUtZ,GACd3H,OAAO+gB,eAAgBnhB,KAAM0D,EAAM,CAClC2mB,YAAY,EACZjJ,cAAc,EACdmJ,UAAU,EACVxiB,MAAOA,QAMX0hB,IAAK,SAAUa,GACd,OAAOA,EAAernB,GAAOiD,SAC5BokB,EACA,IAAIrnB,GAAOmnB,MAAOE,IAGpBvM,QAAS,CACRyM,KAAM,CAGLC,UAAU,GAEXC,MAAO,CAGNxB,MAAO,SAAU5H,GAIhB,IAAI5S,EAAK1O,MAAQshB,EAWjB,OARKuD,GAAepd,KAAMiH,EAAG/M,OAC5B+M,EAAGgc,OAASlnB,GAAUkL,EAAI,UAG1B+Y,GAAgB/Y,EAAI,SAAS,IAIvB,GAERuZ,QAAS,SAAU3G,GAIlB,IAAI5S,EAAK1O,MAAQshB,EAUjB,OAPKuD,GAAepd,KAAMiH,EAAG/M,OAC5B+M,EAAGgc,OAASlnB,GAAUkL,EAAI,UAE1B+Y,GAAgB/Y,EAAI,UAId,GAKRiX,SAAU,SAAU4B,GACnB,IAAI3hB,EAAS2hB,EAAM3hB,OACnB,OAAOif,GAAepd,KAAM7B,EAAOjE,OAClCiE,EAAO8kB,OAASlnB,GAAUoC,EAAQ,UAClC6b,EAASzd,IAAK4B,EAAQ,UACtBpC,GAAUoC,EAAQ,OAIrB+kB,aAAc,CACbZ,aAAc,SAAUxC,QAIDthB,IAAjBshB,EAAM3V,QAAwB2V,EAAM+C,gBACxC/C,EAAM+C,cAAcM,YAAcrD,EAAM3V,YA0F7C3O,GAAOqmB,YAAc,SAAU7lB,EAAM9B,EAAMknB,GAGrCplB,EAAKmc,qBACTnc,EAAKmc,oBAAqBje,EAAMknB,IAIlC5lB,GAAOmnB,MAAQ,SAAUxoB,EAAKipB,GAG7B,KAAQ7qB,gBAAgBiD,GAAOmnB,OAC9B,OAAO,IAAInnB,GAAOmnB,MAAOxoB,EAAKipB,GAI1BjpB,GAAOA,EAAID,MACf3B,KAAKsqB,cAAgB1oB,EACrB5B,KAAK2B,KAAOC,EAAID,KAIhB3B,KAAK8qB,mBAAqBlpB,EAAImpB,uBACH9kB,IAAzBrE,EAAImpB,mBAGgB,IAApBnpB,EAAIgpB,YACL3D,GACAC,GAKDlnB,KAAK4F,OAAWhE,EAAIgE,QAAkC,IAAxBhE,EAAIgE,OAAOrE,SACxCK,EAAIgE,OAAOhD,WACXhB,EAAIgE,OAEL5F,KAAK6pB,cAAgBjoB,EAAIioB,cACzB7pB,KAAKgrB,cAAgBppB,EAAIopB,eAIzBhrB,KAAK2B,KAAOC,EAIRipB,GACJ5nB,GAAOsC,OAAQvF,KAAM6qB,GAItB7qB,KAAKirB,UAAYrpB,GAAOA,EAAIqpB,WAAaC,KAAKC,MAG9CnrB,KAAMiD,GAAOiD,UAAY,GAK1BjD,GAAOmnB,MAAMxmB,UAAY,CACxBE,YAAab,GAAOmnB,MACpBU,mBAAoB5D,GACpB0C,qBAAsB1C,GACtBgB,8BAA+BhB,GAC/BkE,aAAa,EAEbpD,eAAgB,WACf,IAAIrb,EAAI3M,KAAKsqB,cAEbtqB,KAAK8qB,mBAAqB7D,GAErBta,IAAM3M,KAAKorB,aACfze,EAAEqb,kBAGJF,gBAAiB,WAChB,IAAInb,EAAI3M,KAAKsqB,cAEbtqB,KAAK4pB,qBAAuB3C,GAEvBta,IAAM3M,KAAKorB,aACfze,EAAEmb,mBAGJC,yBAA0B,WACzB,IAAIpb,EAAI3M,KAAKsqB,cAEbtqB,KAAKkoB,8BAAgCjB,GAEhCta,IAAM3M,KAAKorB,aACfze,EAAEob,2BAGH/nB,KAAK8nB,oBAKP7kB,GAAOsB,KAAM,CACZ8mB,QAAQ,EACRC,SAAS,EACTC,YAAY,EACZC,gBAAgB,EAChBC,SAAS,EACTC,QAAQ,EACRC,YAAY,EACZC,SAAS,EACTC,OAAO,EACPC,OAAO,EACPC,UAAU,EACVC,MAAM,EACNC,QAAQ,EACRjqB,MAAM,EACNkqB,UAAU,EACV7d,KAAK,EACL8d,SAAS,EACT1X,QAAQ,EACR2X,SAAS,EACTC,SAAS,EACTC,SAAS,EACTC,SAAS,EACTC,SAAS,EACTC,WAAW,EACXC,aAAa,EACbC,SAAS,EACTC,SAAS,EACTC,eAAe,EACfC,WAAW,EACXC,SAAS,EACTC,OAAO,GACL/pB,GAAOskB,MAAM2C,SAEhBjnB,GAAOsB,KAAM,CAAEoP,MAAO,UAAWsZ,KAAM,YAAc,SAAUtrB,EAAMkmB,GAEpE,SAASqF,EAAoB3D,GAC5B,GAAK3pB,EAASutB,aAAe,CAS5B,IAAItE,EAASpH,EAASzd,IAAKhE,KAAM,UAChCunB,EAAQtkB,GAAOskB,MAAMkC,IAAKF,GAC3BhC,EAAM5lB,KAA4B,YAArB4nB,EAAY5nB,KAAqB,QAAU,OACxD4lB,EAAM6D,aAAc,EAGpBvC,EAAQU,GAMHhC,EAAM3hB,SAAW2hB,EAAMsC,eAK3BhB,EAAQtB,QAMTtkB,GAAOskB,MAAM6F,SAAUvF,EAAc0B,EAAY3jB,OAChD3C,GAAOskB,MAAMkC,IAAKF,IAIrBtmB,GAAOskB,MAAMxJ,QAASpc,GAAS,CAG9BunB,MAAO,WAEN,IAAImE,EAOJ,GAFA5F,GAAgBznB,KAAM2B,GAAM,IAEvB/B,EAASutB,aAcb,OAAO,GARPE,EAAW5L,EAASzd,IAAKhE,KAAM6nB,KAE9B7nB,KAAKuP,iBAAkBsY,EAAcqF,GAEtCzL,EAASJ,IAAKrhB,KAAM6nB,GAAgBwF,GAAY,GAAM,IAOxDpF,QAAS,WAMR,OAHAR,GAAgBznB,KAAM2B,IAGf,GAGR0nB,SAAU,WACT,IAAIgE,EAEJ,IAAKztB,EAASutB,aAWb,OAAO,GAVPE,EAAW5L,EAASzd,IAAKhE,KAAM6nB,GAAiB,GAK/CpG,EAASJ,IAAKrhB,KAAM6nB,EAAcwF,IAHlCrtB,KAAK4f,oBAAqBiI,EAAcqF,GACxCzL,EAASlF,OAAQvc,KAAM6nB,KAa1BlC,SAAU,SAAU4B,GACnB,OAAO9F,EAASzd,IAAKujB,EAAM3hB,OAAQjE,IAGpCkmB,aAAcA,GAef5kB,GAAOskB,MAAMxJ,QAAS8J,GAAiB,CACtCqB,MAAO,WAIN,IAAIhnB,EAAMlC,KAAKwH,eAAiBxH,KAAKJ,UAAYI,KAChDstB,EAAa1tB,EAASutB,aAAentB,KAAOkC,EAC5CmrB,EAAW5L,EAASzd,IAAKspB,EAAYzF,GAMhCwF,IACAztB,EAASutB,aACbntB,KAAKuP,iBAAkBsY,EAAcqF,GAErChrB,EAAIqN,iBAAkB5N,EAAMurB,GAAoB,IAGlDzL,EAASJ,IAAKiM,EAAYzF,GAAgBwF,GAAY,GAAM,IAE7DhE,SAAU,WACT,IAAInnB,EAAMlC,KAAKwH,eAAiBxH,KAAKJ,UAAYI,KAChDstB,EAAa1tB,EAASutB,aAAentB,KAAOkC,EAC5CmrB,EAAW5L,EAASzd,IAAKspB,EAAYzF,GAAiB,EAEjDwF,EAQL5L,EAASJ,IAAKiM,EAAYzF,EAAcwF,IAPnCztB,EAASutB,aACbntB,KAAK4f,oBAAqBiI,EAAcqF,GAExChrB,EAAI0d,oBAAqBje,EAAMurB,GAAoB,GAEpDzL,EAASlF,OAAQ+Q,EAAYzF,QAgBjC5kB,GAAOsB,KAAM,CACZgpB,WAAY,YACZC,WAAY,WACZC,aAAc,cACdC,aAAc,cACZ,SAAUC,EAAMlE,GAClBxmB,GAAOskB,MAAMxJ,QAAS4P,GAAS,CAC9B9F,aAAc4B,EACdT,SAAUS,EAEVZ,OAAQ,SAAUtB,GACjB,IAAInjB,EAEHwpB,EAAUrG,EAAMyD,cAChBzC,EAAYhB,EAAMgB,UASnB,OALMqF,IAAaA,IANT5tB,MAMgCiD,GAAOwF,SANvCzI,KAMyD4tB,MAClErG,EAAM5lB,KAAO4mB,EAAUG,SACvBtkB,EAAMmkB,EAAUzK,QAAQnd,MAAOX,KAAM0E,WACrC6iB,EAAM5lB,KAAO8nB,GAEPrlB,MAKVnB,GAAOG,GAAGmC,OAAQ,CAEjB4hB,GAAI,SAAUC,EAAOlkB,EAAUoe,EAAMle,GACpC,OAAO+jB,GAAInnB,KAAMonB,EAAOlkB,EAAUoe,EAAMle,IAEzCikB,IAAK,SAAUD,EAAOlkB,EAAUoe,EAAMle,GACrC,OAAO+jB,GAAInnB,KAAMonB,EAAOlkB,EAAUoe,EAAMle,EAAI,IAE7CokB,IAAK,SAAUJ,EAAOlkB,EAAUE,GAC/B,IAAImlB,EAAW5mB,EACf,GAAKylB,GAASA,EAAMY,gBAAkBZ,EAAMmB,UAW3C,OARAA,EAAYnB,EAAMmB,UAClBtlB,GAAQmkB,EAAMsC,gBAAiBlC,IAC9Be,EAAUlhB,UACTkhB,EAAUG,SAAW,IAAMH,EAAUlhB,UACrCkhB,EAAUG,SACXH,EAAUrlB,SACVqlB,EAAUzK,SAEJ9d,KAER,GAAsB,iBAAVonB,EAAqB,CAGhC,IAAMzlB,KAAQylB,EACbpnB,KAAKwnB,IAAK7lB,EAAMuB,EAAUkkB,EAAOzlB,IAElC,OAAO3B,KAWR,OATkB,IAAbkD,GAA0C,mBAAbA,IAGjCE,EAAKF,EACLA,OAAW+C,IAEA,IAAP7C,IACJA,EAAK8jB,IAEClnB,KAAKuE,KAAM,WACjBtB,GAAOskB,MAAMhL,OAAQvc,KAAMonB,EAAOhkB,EAAIF,QAMzC,IAKC2qB,GAAe,wBAGfC,GAAW,oCAEXC,GAAe,6BAGhB,SAASC,GAAoBvqB,EAAMiX,GAClC,OAAKlX,GAAUC,EAAM,UACpBD,GAA+B,KAArBkX,EAAQnZ,SAAkBmZ,EAAUA,EAAQhI,WAAY,OAE3DzP,GAAQQ,GAAO+V,SAAU,SAAW,IAGrC/V,EAIR,SAASwqB,GAAexqB,GAEvB,OADAA,EAAK9B,MAAyC,OAAhC8B,EAAKjB,aAAc,SAAsB,IAAMiB,EAAK9B,KAC3D8B,EAER,SAASyqB,GAAezqB,GAOvB,MAN2C,WAApCA,EAAK9B,MAAQ,IAAKrB,MAAO,EAAG,GAClCmD,EAAK9B,KAAO8B,EAAK9B,KAAKrB,MAAO,GAE7BmD,EAAKwK,gBAAiB,QAGhBxK,EAGR,SAAS0qB,GAAgBvsB,EAAKwsB,GAC7B,IAAIjsB,EAAG2X,EAAGnY,EAAgB0sB,EAAUC,EAAUjG,EAE9C,GAAuB,IAAlB+F,EAAK7sB,SAAV,CAKA,GAAKkgB,EAASD,QAAS5f,KAEtBymB,EADW5G,EAASzd,IAAKpC,GACPymB,QAKjB,IAAM1mB,KAFN8f,EAASlF,OAAQ6R,EAAM,iBAET/F,EACb,IAAMlmB,EAAI,EAAG2X,EAAIuO,EAAQ1mB,GAAO4B,OAAQpB,EAAI2X,EAAG3X,IAC9Cc,GAAOskB,MAAMrN,IAAKkU,EAAMzsB,EAAM0mB,EAAQ1mB,GAAQQ,IAO7Cuf,EAASF,QAAS5f,KACtBysB,EAAW3M,EAASzB,OAAQre,GAC5B0sB,EAAWrrB,GAAOsC,OAAQ,GAAI8oB,GAE9B3M,EAASL,IAAK+M,EAAME,KAkBtB,SAASC,GAAUC,EAAY3b,EAAMrO,EAAUiiB,GAG9C5T,EAAOtS,EAAMsS,GAEb,IAAI+T,EAAUjiB,EAAO4hB,EAASkI,EAAYxsB,EAAMC,EAC/CC,EAAI,EACJ2X,EAAI0U,EAAWjrB,OACfmrB,EAAW5U,EAAI,EACf/R,EAAQ8K,EAAM,GACd8b,EAAkBttB,EAAY0G,GAG/B,GAAK4mB,GACG,EAAJ7U,GAA0B,iBAAV/R,IAChB3G,GAAQ6jB,YAAc6I,GAASrmB,KAAMM,GACxC,OAAOymB,EAAWjqB,KAAM,SAAUyV,GACjC,IAAId,EAAOsV,EAAW5pB,GAAIoV,GACrB2U,IACJ9b,EAAM,GAAM9K,EAAMtH,KAAMT,KAAMga,EAAOd,EAAK0V,SAE3CL,GAAUrV,EAAMrG,EAAMrO,EAAUiiB,KAIlC,GAAK3M,IAEJnV,GADAiiB,EAAWN,GAAezT,EAAM2b,EAAY,GAAIhnB,eAAe,EAAOgnB,EAAY/H,IACjE/T,WAEmB,IAA/BkU,EAASla,WAAWnJ,SACxBqjB,EAAWjiB,GAIPA,GAAS8hB,GAAU,CAOvB,IALAgI,GADAlI,EAAUtjB,GAAOwB,IAAKmhB,GAAQgB,EAAU,UAAYqH,KAC/B1qB,OAKbpB,EAAI2X,EAAG3X,IACdF,EAAO2kB,EAEFzkB,IAAMusB,IACVzsB,EAAOgB,GAAO0C,MAAO1D,GAAM,GAAM,GAG5BwsB,GAIJxrB,GAAOoB,MAAOkiB,EAASX,GAAQ3jB,EAAM,YAIvCuC,EAAS/D,KAAM+tB,EAAYrsB,GAAKF,EAAME,GAGvC,GAAKssB,EAOJ,IANAvsB,EAAMqkB,EAASA,EAAQhjB,OAAS,GAAIiE,cAGpCvE,GAAOwB,IAAK8hB,EAAS2H,IAGf/rB,EAAI,EAAGA,EAAIssB,EAAYtsB,IAC5BF,EAAOskB,EAASpkB,GACX4iB,GAAYtd,KAAMxF,EAAKN,MAAQ,MAClC8f,EAASxB,OAAQhe,EAAM,eACxBgB,GAAOwF,SAAUvG,EAAKD,KAEjBA,EAAKL,KAA8C,YAArCK,EAAKN,MAAQ,IAAKgC,cAG/BV,GAAO4rB,WAAa5sB,EAAKH,UAC7BmB,GAAO4rB,SAAU5sB,EAAKL,IAAK,CAC1BC,MAAOI,EAAKJ,OAASI,EAAKO,aAAc,UACtCN,GASJH,EAASE,EAAK6E,YAAYT,QAAS0nB,GAAc,IAAM9rB,EAAMC,IAQnE,OAAOssB,EAGR,SAASjS,GAAQ9Y,EAAMP,EAAU4rB,GAKhC,IAJA,IAAI7sB,EACH4kB,EAAQ3jB,EAAWD,GAAO4M,OAAQ3M,EAAUO,GAASA,EACrDtB,EAAI,EAE4B,OAAvBF,EAAO4kB,EAAO1kB,IAAeA,IAChC2sB,GAA8B,IAAlB7sB,EAAKV,UACtB0B,GAAO8rB,UAAWnJ,GAAQ3jB,IAGtBA,EAAKW,aACJksB,GAAY3L,EAAYlhB,IAC5B4jB,GAAeD,GAAQ3jB,EAAM,WAE9BA,EAAKW,WAAWC,YAAaZ,IAI/B,OAAOwB,EAGRR,GAAOsC,OAAQ,CACduhB,cAAe,SAAU8H,GACxB,OAAOA,GAGRjpB,MAAO,SAAUlC,EAAMurB,EAAeC,GACrC,IAAI9sB,EAAG2X,EAAGoV,EAAaC,EA1INvtB,EAAKwsB,EACnB5qB,EA0IFmC,EAAQlC,EAAKyhB,WAAW,GACxBkK,EAASjM,EAAY1f,GAGtB,KAAMrC,GAAQ+jB,gBAAsC,IAAlB1hB,EAAKlC,UAAoC,KAAlBkC,EAAKlC,UAC3D0B,GAAOmE,SAAU3D,IAOnB,IAHA0rB,EAAevJ,GAAQjgB,GAGjBxD,EAAI,EAAG2X,GAFboV,EAActJ,GAAQniB,IAEOF,OAAQpB,EAAI2X,EAAG3X,IAvJ5BP,EAwJLstB,EAAa/sB,GAxJHisB,EAwJQe,EAAchtB,QAvJzCqB,EAGc,WAHdA,EAAW4qB,EAAK5qB,SAASG,gBAGAkhB,GAAepd,KAAM7F,EAAID,MACrDysB,EAAKja,QAAUvS,EAAIuS,QAGK,UAAb3Q,GAAqC,aAAbA,IACnC4qB,EAAKhJ,aAAexjB,EAAIwjB,cAoJxB,GAAK4J,EACJ,GAAKC,EAIJ,IAHAC,EAAcA,GAAetJ,GAAQniB,GACrC0rB,EAAeA,GAAgBvJ,GAAQjgB,GAEjCxD,EAAI,EAAG2X,EAAIoV,EAAY3rB,OAAQpB,EAAI2X,EAAG3X,IAC3CgsB,GAAgBe,EAAa/sB,GAAKgtB,EAAchtB,SAGjDgsB,GAAgB1qB,EAAMkC,GAWxB,OAL2B,GAD3BwpB,EAAevJ,GAAQjgB,EAAO,WACZpC,QACjBsiB,GAAesJ,GAAeC,GAAUxJ,GAAQniB,EAAM,WAIhDkC,GAGRopB,UAAW,SAAU5qB,GAKpB,IAJA,IAAImd,EAAM7d,EAAM9B,EACfoc,EAAU9a,GAAOskB,MAAMxJ,QACvB5b,EAAI,OAE6B8D,KAAxBxC,EAAOU,EAAOhC,IAAqBA,IAC5C,GAAK4e,EAAYtd,GAAS,CACzB,GAAO6d,EAAO7d,EAAMge,EAASvb,SAAc,CAC1C,GAAKob,EAAK+G,OACT,IAAM1mB,KAAQ2f,EAAK+G,OACbtK,EAASpc,GACbsB,GAAOskB,MAAMhL,OAAQ9Y,EAAM9B,GAI3BsB,GAAOqmB,YAAa7lB,EAAM9B,EAAM2f,EAAKuH,QAOxCplB,EAAMge,EAASvb,cAAYD,EAEvBxC,EAAMie,EAASxb,WAInBzC,EAAMie,EAASxb,cAAYD,OAOhChD,GAAOG,GAAGmC,OAAQ,CACjB8pB,OAAQ,SAAUnsB,GACjB,OAAOqZ,GAAQvc,KAAMkD,GAAU,IAGhCqZ,OAAQ,SAAUrZ,GACjB,OAAOqZ,GAAQvc,KAAMkD,IAGtBX,KAAM,SAAUwF,GACf,OAAOkY,EAAQjgB,KAAM,SAAU+H,GAC9B,YAAiB9B,IAAV8B,EACN9E,GAAOV,KAAMvC,MACbA,KAAKsU,QAAQ/P,KAAM,WACK,IAAlBvE,KAAKuB,UAAoC,KAAlBvB,KAAKuB,UAAqC,IAAlBvB,KAAKuB,WACxDvB,KAAK8G,YAAciB,MAGpB,KAAMA,EAAOrD,UAAUnB,SAG3B+rB,OAAQ,WACP,OAAOf,GAAUvuB,KAAM0E,UAAW,SAAUjB,GACpB,IAAlBzD,KAAKuB,UAAoC,KAAlBvB,KAAKuB,UAAqC,IAAlBvB,KAAKuB,UAC3CysB,GAAoBhuB,KAAMyD,GAChCd,YAAac,MAKvB8rB,QAAS,WACR,OAAOhB,GAAUvuB,KAAM0E,UAAW,SAAUjB,GAC3C,GAAuB,IAAlBzD,KAAKuB,UAAoC,KAAlBvB,KAAKuB,UAAqC,IAAlBvB,KAAKuB,SAAiB,CACzE,IAAIqE,EAASooB,GAAoBhuB,KAAMyD,GACvCmC,EAAO4pB,aAAc/rB,EAAMmC,EAAO8M,gBAKrC+c,OAAQ,WACP,OAAOlB,GAAUvuB,KAAM0E,UAAW,SAAUjB,GACtCzD,KAAK4C,YACT5C,KAAK4C,WAAW4sB,aAAc/rB,EAAMzD,SAKvC0vB,MAAO,WACN,OAAOnB,GAAUvuB,KAAM0E,UAAW,SAAUjB,GACtCzD,KAAK4C,YACT5C,KAAK4C,WAAW4sB,aAAc/rB,EAAMzD,KAAKuU,gBAK5CD,MAAO,WAIN,IAHA,IAAI7Q,EACHtB,EAAI,EAE2B,OAAtBsB,EAAOzD,KAAMmC,IAAeA,IACd,IAAlBsB,EAAKlC,WAGT0B,GAAO8rB,UAAWnJ,GAAQniB,GAAM,IAGhCA,EAAKqD,YAAc,IAIrB,OAAO9G,MAGR2F,MAAO,SAAUqpB,EAAeC,GAI/B,OAHAD,EAAiC,MAAjBA,GAAgCA,EAChDC,EAAyC,MAArBA,EAA4BD,EAAgBC,EAEzDjvB,KAAKyE,IAAK,WAChB,OAAOxB,GAAO0C,MAAO3F,KAAMgvB,EAAeC,MAI5CL,KAAM,SAAU7mB,GACf,OAAOkY,EAAQjgB,KAAM,SAAU+H,GAC9B,IAAItE,EAAOzD,KAAM,IAAO,GACvBmC,EAAI,EACJ2X,EAAI9Z,KAAKuD,OAEV,QAAe0C,IAAV8B,GAAyC,IAAlBtE,EAAKlC,SAChC,OAAOkC,EAAK0M,UAIb,GAAsB,iBAAVpI,IAAuB8lB,GAAapmB,KAAMM,KACpDud,IAAWR,GAASzX,KAAMtF,IAAW,CAAE,GAAI,KAAQ,GAAIpE,eAAkB,CAE1EoE,EAAQ9E,GAAO6jB,cAAe/e,GAE9B,IACC,KAAQ5F,EAAI2X,EAAG3X,IAIS,KAHvBsB,EAAOzD,KAAMmC,IAAO,IAGVZ,WACT0B,GAAO8rB,UAAWnJ,GAAQniB,GAAM,IAChCA,EAAK0M,UAAYpI,GAInBtE,EAAO,EAGN,MAAQkJ,KAGNlJ,GACJzD,KAAKsU,QAAQgb,OAAQvnB,IAEpB,KAAMA,EAAOrD,UAAUnB,SAG3BosB,YAAa,WACZ,IAAIlJ,EAAU,GAGd,OAAO8H,GAAUvuB,KAAM0E,UAAW,SAAUjB,GAC3C,IAAI8O,EAASvS,KAAK4C,WAEbK,GAAOkE,QAASnH,KAAMymB,GAAY,IACtCxjB,GAAO8rB,UAAWnJ,GAAQ5lB,OACrBuS,GACJA,EAAOqd,aAAcnsB,EAAMzD,QAK3BymB,MAILxjB,GAAOsB,KAAM,CACZsrB,SAAU,SACVC,UAAW,UACXN,aAAc,SACdO,YAAa,QACbC,WAAY,eACV,SAAUtsB,EAAMusB,GAClBhtB,GAAOG,GAAIM,GAAS,SAAUR,GAO7B,IANA,IAAIiB,EACHC,EAAM,GACN8rB,EAASjtB,GAAQC,GACjB2B,EAAOqrB,EAAO3sB,OAAS,EACvBpB,EAAI,EAEGA,GAAK0C,EAAM1C,IAClBgC,EAAQhC,IAAM0C,EAAO7E,KAAOA,KAAK2F,OAAO,GACxC1C,GAAQitB,EAAQ/tB,IAAO8tB,GAAY9rB,GAInCvD,EAAKD,MAAOyD,EAAKD,EAAMH,OAGxB,OAAOhE,KAAKkE,UAAWE,MAGzB,IAAI+rB,GAAY,IAAI3nB,OAAQ,KAAOua,EAAO,kBAAmB,KAEzDqN,GAAc,MAGdC,GAAY,SAAU5sB,GAKxB,IAAIuoB,EAAOvoB,EAAK+D,cAAc6H,YAM9B,OAJM2c,GAASA,EAAKsE,SACnBtE,EAAOjsB,IAGDisB,EAAKuE,iBAAkB9sB,IAG5B+sB,GAAO,SAAU/sB,EAAM+B,EAAShB,GACnC,IAAIJ,EAAKV,EACR+sB,EAAM,GAGP,IAAM/sB,KAAQ8B,EACbirB,EAAK/sB,GAASD,EAAK8f,MAAO7f,GAC1BD,EAAK8f,MAAO7f,GAAS8B,EAAS9B,GAM/B,IAAMA,KAHNU,EAAMI,EAAS/D,KAAMgD,GAGP+B,EACb/B,EAAK8f,MAAO7f,GAAS+sB,EAAK/sB,GAG3B,OAAOU,GAIJssB,GAAY,IAAIloB,OAAQ0a,EAAUpV,KAAM,KAAO,KAiJnD,SAAS6iB,GAAQltB,EAAMC,EAAMktB,GAC5B,IAAIC,EAAOC,EAAUC,EAAU3sB,EAC9B4sB,EAAeZ,GAAY3oB,KAAM/D,GAMjC6f,EAAQ9f,EAAK8f,MAoEd,OAlEAqN,EAAWA,GAAYP,GAAW5sB,MAgBjCW,EAAMwsB,EAASK,iBAAkBvtB,IAAUktB,EAAUltB,GAEhDstB,GAAgB5sB,IAkBpBA,EAAMA,EAAIiC,QAASkC,GAAU,YAAUtC,GAG3B,KAAR7B,GAAe+e,EAAY1f,KAC/BW,EAAMnB,GAAOsgB,MAAO9f,EAAMC,KAQrBtC,GAAQ8vB,kBAAoBf,GAAU1oB,KAAMrD,IAASssB,GAAUjpB,KAAM/D,KAG1EmtB,EAAQtN,EAAMsN,MACdC,EAAWvN,EAAMuN,SACjBC,EAAWxN,EAAMwN,SAGjBxN,EAAMuN,SAAWvN,EAAMwN,SAAWxN,EAAMsN,MAAQzsB,EAChDA,EAAMwsB,EAASC,MAGftN,EAAMsN,MAAQA,EACdtN,EAAMuN,SAAWA,EACjBvN,EAAMwN,SAAWA,SAIJ9qB,IAAR7B,EAINA,EAAM,GACNA,EAIF,SAAS+sB,GAAcC,EAAaC,GAGnC,MAAO,CACNrtB,IAAK,WACJ,IAAKotB,IASL,OAASpxB,KAAKgE,IAAMqtB,GAAS1wB,MAAOX,KAAM0E,kBALlC1E,KAAKgE,OA3OhB,WAIC,SAASstB,IAGR,GAAM1M,EAAN,CAIA2M,EAAUhO,MAAMiO,QAAU,+EAE1B5M,EAAIrB,MAAMiO,QACT,4HAGDzqB,EAAgBpE,YAAa4uB,GAAY5uB,YAAaiiB,GAEtD,IAAI6M,EAAW1xB,GAAOwwB,iBAAkB3L,GACxC8M,EAAoC,OAAjBD,EAASniB,IAG5BqiB,EAAsE,KAA9CC,EAAoBH,EAASI,YAIrDjN,EAAIrB,MAAMuO,MAAQ,MAClBC,EAA6D,KAAzCH,EAAoBH,EAASK,OAIjDE,EAAgE,KAAzCJ,EAAoBH,EAASZ,OAMpDjM,EAAIrB,MAAM0O,SAAW,WACrBC,EAAiE,KAA9CN,EAAoBhN,EAAIuN,YAAc,GAEzDprB,EAAgBlE,YAAa0uB,GAI7B3M,EAAM,MAGP,SAASgN,EAAoBQ,GAC5B,OAAOjsB,KAAKksB,MAAOC,WAAYF,IAGhC,IAAIV,EAAkBM,EAAsBE,EAAkBH,EAC7DQ,EAAyBZ,EACzBJ,EAAY3xB,EAAS0C,cAAe,OACpCsiB,EAAMhlB,EAAS0C,cAAe,OAGzBsiB,EAAIrB,QAMVqB,EAAIrB,MAAMiP,eAAiB,cAC3B5N,EAAIM,WAAW,GAAO3B,MAAMiP,eAAiB,GAC7CpxB,GAAQqxB,gBAA+C,gBAA7B7N,EAAIrB,MAAMiP,eAEpCvvB,GAAOsC,OAAQnE,GAAS,CACvBsxB,kBAAmB,WAElB,OADApB,IACOU,GAERd,eAAgB,WAEf,OADAI,IACOS,GAERY,cAAe,WAEd,OADArB,IACOI,GAERkB,mBAAoB,WAEnB,OADAtB,IACOK,GAERkB,cAAe,WAEd,OADAvB,IACOY,GAYRY,qBAAsB,WACrB,IAAIC,EAAOtN,EAAIuN,EAASC,EAmCxB,OAlCgC,MAA3BV,IACJQ,EAAQnzB,EAAS0C,cAAe,SAChCmjB,EAAK7lB,EAAS0C,cAAe,MAC7B0wB,EAAUpzB,EAAS0C,cAAe,OAElCywB,EAAMxP,MAAMiO,QAAU,2DACtB/L,EAAGlC,MAAMiO,QAAU,0CAKnB/L,EAAGlC,MAAM2P,OAAS,MAClBF,EAAQzP,MAAM2P,OAAS,MAQvBF,EAAQzP,MAAMC,QAAU,QAExBzc,EACEpE,YAAaowB,GACbpwB,YAAa8iB,GACb9iB,YAAaqwB,GAEfC,EAAUlzB,GAAOwwB,iBAAkB9K,GACnC8M,EAA4BY,SAAUF,EAAQC,OAAQ,IACrDC,SAAUF,EAAQG,eAAgB,IAClCD,SAAUF,EAAQI,kBAAmB,MAAW5N,EAAG6N,aAEpDvsB,EAAgBlE,YAAakwB,IAEvBR,MAvIV,GAsPA,IAAIgB,GAAc,CAAE,SAAU,MAAO,MACpCC,GAAa5zB,EAAS0C,cAAe,OAAQihB,MAC7CkQ,GAAc,GAkBf,SAASC,GAAehwB,GACvB,IAAIiwB,EAAQ1wB,GAAO2wB,SAAUlwB,IAAU+vB,GAAa/vB,GAEpD,OAAKiwB,IAGAjwB,KAAQ8vB,GACL9vB,EAED+vB,GAAa/vB,GAxBrB,SAAyBA,GAGxB,IAAImwB,EAAUnwB,EAAM,GAAIkd,cAAgBld,EAAKpD,MAAO,GACnD6B,EAAIoxB,GAAYhwB,OAEjB,MAAQpB,IAEP,IADAuB,EAAO6vB,GAAapxB,GAAM0xB,KACbL,GACZ,OAAO9vB,EAeoBowB,CAAgBpwB,IAAUA,GAIxD,IAKCqwB,GAAe,4BACfC,GAAU,CAAE/B,SAAU,WAAYgC,WAAY,SAAUzQ,QAAS,SACjE0Q,GAAqB,CACpBC,cAAe,IACfC,WAAY,OAGd,SAASC,GAAmBrvB,EAAO+C,EAAOusB,GAIzC,IAAI1sB,EAAUqb,EAAQ5V,KAAMtF,GAC5B,OAAOH,EAGNzB,KAAKouB,IAAK,EAAG3sB,EAAS,IAAQ0sB,GAAY,KAAU1sB,EAAS,IAAO,MACpEG,EAGF,SAASysB,GAAoB/wB,EAAMgxB,EAAWC,EAAKC,EAAaC,EAAQC,GACvE,IAAI1yB,EAAkB,UAAdsyB,EAAwB,EAAI,EACnCK,EAAQ,EACRC,EAAQ,EACRC,EAAc,EAGf,GAAKN,KAAUC,EAAc,SAAW,WACvC,OAAO,EAGR,KAAQxyB,EAAI,EAAGA,GAAK,EAKN,WAARuyB,IACJM,GAAe/xB,GAAOwgB,IAAKhgB,EAAMixB,EAAMxR,EAAW/gB,IAAK,EAAMyyB,IAIxDD,GAmBQ,YAARD,IACJK,GAAS9xB,GAAOwgB,IAAKhgB,EAAM,UAAYyf,EAAW/gB,IAAK,EAAMyyB,IAIjD,WAARF,IACJK,GAAS9xB,GAAOwgB,IAAKhgB,EAAM,SAAWyf,EAAW/gB,GAAM,SAAS,EAAMyyB,MAtBvEG,GAAS9xB,GAAOwgB,IAAKhgB,EAAM,UAAYyf,EAAW/gB,IAAK,EAAMyyB,GAGhD,YAARF,EACJK,GAAS9xB,GAAOwgB,IAAKhgB,EAAM,SAAWyf,EAAW/gB,GAAM,SAAS,EAAMyyB,GAItEE,GAAS7xB,GAAOwgB,IAAKhgB,EAAM,SAAWyf,EAAW/gB,GAAM,SAAS,EAAMyyB,IAoCzE,OAhBMD,GAA8B,GAAfE,IAIpBE,GAAS5uB,KAAKouB,IAAK,EAAGpuB,KAAK8uB,KAC1BxxB,EAAM,SAAWgxB,EAAW,GAAI7T,cAAgB6T,EAAUn0B,MAAO,IACjEu0B,EACAE,EACAD,EACA,MAIM,GAGDC,EAAQC,EAGhB,SAASE,GAAkBzxB,EAAMgxB,EAAWK,GAG3C,IAAIF,EAASvE,GAAW5sB,GAKvBkxB,IADmBvzB,GAAQsxB,qBAAuBoC,IAEE,eAAnD7xB,GAAOwgB,IAAKhgB,EAAM,aAAa,EAAOmxB,GACvCO,EAAmBR,EAEnBvyB,EAAMuuB,GAAQltB,EAAMgxB,EAAWG,GAC/BQ,EAAa,SAAWX,EAAW,GAAI7T,cAAgB6T,EAAUn0B,MAAO,GAIzE,GAAK6vB,GAAU1oB,KAAMrF,GAAQ,CAC5B,IAAM0yB,EACL,OAAO1yB,EAERA,EAAM,OAyCP,QAlCQhB,GAAQsxB,qBAAuBiC,IAMrCvzB,GAAQ0xB,wBAA0BtvB,GAAUC,EAAM,OAI3C,SAARrB,IAICkwB,WAAYlwB,IAA0D,WAAjDa,GAAOwgB,IAAKhgB,EAAM,WAAW,EAAOmxB,KAG1DnxB,EAAK4xB,iBAAiB9xB,SAEtBoxB,EAAiE,eAAnD1xB,GAAOwgB,IAAKhgB,EAAM,aAAa,EAAOmxB,IAKpDO,EAAmBC,KAAc3xB,KAEhCrB,EAAMqB,EAAM2xB,MAKdhzB,EAAMkwB,WAAYlwB,IAAS,GAI1BoyB,GACC/wB,EACAgxB,EACAK,IAAWH,EAAc,SAAW,WACpCQ,EACAP,EAGAxyB,GAEE,KAwTL,SAASkzB,GAAO7xB,EAAM+B,EAAS+b,EAAMnc,EAAKmwB,GACzC,OAAO,IAAID,GAAM1xB,UAAUP,KAAMI,EAAM+B,EAAS+b,EAAMnc,EAAKmwB,GAtT5DtyB,GAAOsC,OAAQ,CAIdiwB,SAAU,CACTC,QAAS,CACRzxB,IAAK,SAAUP,EAAMmtB,GACpB,GAAKA,EAAW,CAGf,IAAIxsB,EAAMusB,GAAQltB,EAAM,WACxB,MAAe,KAARW,EAAa,IAAMA,MAO9B+f,UAAW,CACVuR,yBAAyB,EACzBC,aAAa,EACbC,kBAAkB,EAClBC,aAAa,EACbC,UAAU,EACVC,YAAY,EACZ3B,YAAY,EACZ4B,UAAU,EACVC,YAAY,EACZC,eAAe,EACfC,iBAAiB,EACjBC,SAAS,EACTC,YAAY,EACZC,cAAc,EACdC,YAAY,EACZd,SAAS,EACTe,OAAO,EACPC,SAAS,EACT3S,OAAO,EACP4S,QAAQ,EACRC,QAAQ,EACRC,MAAM,EAGNC,aAAa,EACbC,cAAc,EACdC,aAAa,EACbC,kBAAkB,EAClBC,eAAe,GAKhBrD,SAAU,GAGVrQ,MAAO,SAAU9f,EAAMC,EAAMqE,EAAO+sB,GAGnC,GAAMrxB,GAA0B,IAAlBA,EAAKlC,UAAoC,IAAlBkC,EAAKlC,UAAmBkC,EAAK8f,MAAlE,CAKA,IAAInf,EAAKzC,EAAM2gB,EACd4U,EAAWrW,EAAWnd,GACtBstB,EAAeZ,GAAY3oB,KAAM/D,GACjC6f,EAAQ9f,EAAK8f,MAad,GARMyN,IACLttB,EAAOgwB,GAAewD,IAIvB5U,EAAQrf,GAAOuyB,SAAU9xB,IAAUT,GAAOuyB,SAAU0B,QAGrCjxB,IAAV8B,EA0CJ,OAAKua,GAAS,QAASA,QACwBrc,KAA5C7B,EAAMke,EAAMte,IAAKP,GAAM,EAAOqxB,IAEzB1wB,EAIDmf,EAAO7f,GA7CA,YAHd/B,SAAcoG,KAGc3D,EAAM6e,EAAQ5V,KAAMtF,KAAa3D,EAAK,KACjE2D,EAAQ2b,GAAWjgB,EAAMC,EAAMU,GAG/BzC,EAAO,UAIM,MAAToG,GAAiBA,GAAUA,IAOlB,WAATpG,GAAsBqvB,IAC1BjpB,GAAS3D,GAAOA,EAAK,KAASnB,GAAOkhB,UAAW+S,GAAa,GAAK,OAI7D91B,GAAQqxB,iBAA6B,KAAV1qB,GAAiD,IAAjCrE,EAAK7C,QAAS,gBAC9D0iB,EAAO7f,GAAS,WAIX4e,GAAY,QAASA,QACsBrc,KAA9C8B,EAAQua,EAAMjB,IAAK5d,EAAMsE,EAAO+sB,MAE7B9D,EACJzN,EAAM4T,YAAazzB,EAAMqE,GAEzBwb,EAAO7f,GAASqE,MAkBpB0b,IAAK,SAAUhgB,EAAMC,EAAMoxB,EAAOF,GACjC,IAAIxyB,EAAK6B,EAAKqe,EACb4U,EAAWrW,EAAWnd,GA6BvB,OA5BgB0sB,GAAY3oB,KAAM/D,KAMjCA,EAAOgwB,GAAewD,KAIvB5U,EAAQrf,GAAOuyB,SAAU9xB,IAAUT,GAAOuyB,SAAU0B,KAGtC,QAAS5U,IACtBlgB,EAAMkgB,EAAMte,IAAKP,GAAM,EAAMqxB,SAIjB7uB,IAAR7D,IACJA,EAAMuuB,GAAQltB,EAAMC,EAAMkxB,IAId,WAARxyB,GAAoBsB,KAAQwwB,KAChC9xB,EAAM8xB,GAAoBxwB,IAIZ,KAAVoxB,GAAgBA,GACpB7wB,EAAMquB,WAAYlwB,IACD,IAAV0yB,GAAkBsC,SAAUnzB,GAAQA,GAAO,EAAI7B,GAGhDA,KAITa,GAAOsB,KAAM,CAAE,SAAU,SAAW,SAAU6D,EAAIqsB,GACjDxxB,GAAOuyB,SAAUf,GAAc,CAC9BzwB,IAAK,SAAUP,EAAMmtB,EAAUkE,GAC9B,GAAKlE,EAIJ,OAAOmD,GAAatsB,KAAMxE,GAAOwgB,IAAKhgB,EAAM,aAQxCA,EAAK4xB,iBAAiB9xB,QAAWE,EAAK4zB,wBAAwBxG,MAIjEqE,GAAkBzxB,EAAMgxB,EAAWK,GAHnCtE,GAAM/sB,EAAMuwB,GAAS,WACpB,OAAOkB,GAAkBzxB,EAAMgxB,EAAWK,MAM9CzT,IAAK,SAAU5d,EAAMsE,EAAO+sB,GAC3B,IAAIltB,EACHgtB,EAASvE,GAAW5sB,GAIpB6zB,GAAsBl2B,GAAQyxB,iBACT,aAApB+B,EAAO3C,SAIR0C,GADkB2C,GAAsBxC,IAEY,eAAnD7xB,GAAOwgB,IAAKhgB,EAAM,aAAa,EAAOmxB,GACvCN,EAAWQ,EACVN,GACC/wB,EACAgxB,EACAK,EACAH,EACAC,GAED,EAqBF,OAjBKD,GAAe2C,IACnBhD,GAAYnuB,KAAK8uB,KAChBxxB,EAAM,SAAWgxB,EAAW,GAAI7T,cAAgB6T,EAAUn0B,MAAO,IACjEgyB,WAAYsC,EAAQH,IACpBD,GAAoB/wB,EAAMgxB,EAAW,UAAU,EAAOG,GACtD,KAKGN,IAAc1sB,EAAUqb,EAAQ5V,KAAMtF,KACb,QAA3BH,EAAS,IAAO,QAElBnE,EAAK8f,MAAOkR,GAAc1sB,EAC1BA,EAAQ9E,GAAOwgB,IAAKhgB,EAAMgxB,IAGpBJ,GAAmB5wB,EAAMsE,EAAOusB,OAK1CrxB,GAAOuyB,SAAS3D,WAAaV,GAAc/vB,GAAQwxB,mBAClD,SAAUnvB,EAAMmtB,GACf,GAAKA,EACJ,OAAS0B,WAAY3B,GAAQltB,EAAM,gBAClCA,EAAK4zB,wBAAwBE,KAC5B/G,GAAM/sB,EAAM,CAAEouB,WAAY,GAAK,WAC9B,OAAOpuB,EAAK4zB,wBAAwBE,QAEnC,OAMPt0B,GAAOsB,KAAM,CACZizB,OAAQ,GACRC,QAAS,GACTC,OAAQ,SACN,SAAUC,EAAQC,GACpB30B,GAAOuyB,SAAUmC,EAASC,GAAW,CACpCC,OAAQ,SAAU9vB,GAOjB,IANA,IAAI5F,EAAI,EACP21B,EAAW,GAGXC,EAAyB,iBAAVhwB,EAAqBA,EAAMI,MAAO,KAAQ,CAAEJ,GAEpD5F,EAAI,EAAGA,IACd21B,EAAUH,EAASzU,EAAW/gB,GAAMy1B,GACnCG,EAAO51B,IAAO41B,EAAO51B,EAAI,IAAO41B,EAAO,GAGzC,OAAOD,IAIO,WAAXH,IACJ10B,GAAOuyB,SAAUmC,EAASC,GAASvW,IAAMgT,MAI3CpxB,GAAOG,GAAGmC,OAAQ,CACjBke,IAAK,SAAU/f,EAAMqE,GACpB,OAAOkY,EAAQjgB,KAAM,SAAUyD,EAAMC,EAAMqE,GAC1C,IAAI6sB,EAAQ1vB,EACXT,EAAM,GACNtC,EAAI,EAEL,GAAK4D,MAAMC,QAAStC,GAAS,CAI5B,IAHAkxB,EAASvE,GAAW5sB,GACpByB,EAAMxB,EAAKH,OAEHpB,EAAI+C,EAAK/C,IAChBsC,EAAKf,EAAMvB,IAAQc,GAAOwgB,IAAKhgB,EAAMC,EAAMvB,IAAK,EAAOyyB,GAGxD,OAAOnwB,EAGR,YAAiBwB,IAAV8B,EACN9E,GAAOsgB,MAAO9f,EAAMC,EAAMqE,GAC1B9E,GAAOwgB,IAAKhgB,EAAMC,IACjBA,EAAMqE,EAA0B,EAAnBrD,UAAUnB,aAQ5BN,GAAOqyB,MAAQA,IAET1xB,UAAY,CACjBE,YAAawxB,GACbjyB,KAAM,SAAUI,EAAM+B,EAAS+b,EAAMnc,EAAKmwB,EAAQrR,GACjDlkB,KAAKyD,KAAOA,EACZzD,KAAKuhB,KAAOA,EACZvhB,KAAKu1B,OAASA,GAAUtyB,GAAOsyB,OAAO5P,SACtC3lB,KAAKwF,QAAUA,EACfxF,KAAKsS,MAAQtS,KAAKmrB,IAAMnrB,KAAK4Z,MAC7B5Z,KAAKoF,IAAMA,EACXpF,KAAKkkB,KAAOA,IAAUjhB,GAAOkhB,UAAW5C,GAAS,GAAK,OAEvD3H,IAAK,WACJ,IAAI0I,EAAQgT,GAAM0C,UAAWh4B,KAAKuhB,MAElC,OAAOe,GAASA,EAAMte,IACrBse,EAAMte,IAAKhE,MACXs1B,GAAM0C,UAAUrS,SAAS3hB,IAAKhE,OAEhCi4B,IAAK,SAAUC,GACd,IAAIC,EACH7V,EAAQgT,GAAM0C,UAAWh4B,KAAKuhB,MAoB/B,OAlBKvhB,KAAKwF,QAAQ4yB,SACjBp4B,KAAKq4B,IAAMF,EAAQl1B,GAAOsyB,OAAQv1B,KAAKu1B,QACtC2C,EAASl4B,KAAKwF,QAAQ4yB,SAAWF,EAAS,EAAG,EAAGl4B,KAAKwF,QAAQ4yB,UAG9Dp4B,KAAKq4B,IAAMF,EAAQD,EAEpBl4B,KAAKmrB,KAAQnrB,KAAKoF,IAAMpF,KAAKsS,OAAU6lB,EAAQn4B,KAAKsS,MAE/CtS,KAAKwF,QAAQ8yB,MACjBt4B,KAAKwF,QAAQ8yB,KAAK73B,KAAMT,KAAKyD,KAAMzD,KAAKmrB,IAAKnrB,MAGzCsiB,GAASA,EAAMjB,IACnBiB,EAAMjB,IAAKrhB,MAEXs1B,GAAM0C,UAAUrS,SAAStE,IAAKrhB,MAExBA,QAIOqD,KAAKO,UAAY0xB,GAAM1xB,WAEvC0xB,GAAM0C,UAAY,CACjBrS,SAAU,CACT3hB,IAAK,SAAU4f,GACd,IAAIhS,EAIJ,OAA6B,IAAxBgS,EAAMngB,KAAKlC,UACa,MAA5BqiB,EAAMngB,KAAMmgB,EAAMrC,OAAoD,MAAlCqC,EAAMngB,KAAK8f,MAAOK,EAAMrC,MACrDqC,EAAMngB,KAAMmgB,EAAMrC,OAO1B3P,EAAS3O,GAAOwgB,IAAKG,EAAMngB,KAAMmgB,EAAMrC,KAAM,MAGhB,SAAX3P,EAAwBA,EAAJ,GAEvCyP,IAAK,SAAUuC,GAKT3gB,GAAOs1B,GAAGD,KAAM1U,EAAMrC,MAC1Bte,GAAOs1B,GAAGD,KAAM1U,EAAMrC,MAAQqC,GACK,IAAxBA,EAAMngB,KAAKlC,WACtB0B,GAAOuyB,SAAU5R,EAAMrC,OAC6B,MAAnDqC,EAAMngB,KAAK8f,MAAOmQ,GAAe9P,EAAMrC,OAGxCqC,EAAMngB,KAAMmgB,EAAMrC,MAASqC,EAAMuH,IAFjCloB,GAAOsgB,MAAOK,EAAMngB,KAAMmgB,EAAMrC,KAAMqC,EAAMuH,IAAMvH,EAAMM,UAU5CsU,UAAYlD,GAAM0C,UAAUS,WAAa,CACxDpX,IAAK,SAAUuC,GACTA,EAAMngB,KAAKlC,UAAYqiB,EAAMngB,KAAKb,aACtCghB,EAAMngB,KAAMmgB,EAAMrC,MAASqC,EAAMuH,OAKpCloB,GAAOsyB,OAAS,CACfmD,OAAQ,SAAUC,GACjB,OAAOA,GAERC,MAAO,SAAUD,GAChB,MAAO,GAAMxyB,KAAK0yB,IAAKF,EAAIxyB,KAAK2yB,IAAO,GAExCnT,SAAU,SAGX1iB,GAAOs1B,GAAKjD,GAAM1xB,UAAUP,KAG5BJ,GAAOs1B,GAAGD,KAAO,GAKjB,IACCS,GAAOC,GAkrBH9oB,GAEH+oB,GAnrBDC,GAAW,yBACXC,GAAO,cAER,SAASC,KACHJ,MACqB,IAApBp5B,EAASy5B,QAAoBt5B,GAAOu5B,sBACxCv5B,GAAOu5B,sBAAuBF,IAE9Br5B,GAAO2e,WAAY0a,GAAUn2B,GAAOs1B,GAAGgB,UAGxCt2B,GAAOs1B,GAAGiB,QAKZ,SAASC,KAIR,OAHA15B,GAAO2e,WAAY,WAClBqa,QAAQ9yB,IAEA8yB,GAAQ7N,KAAKC,MAIvB,SAASuO,GAAO/3B,EAAMg4B,GACrB,IAAI3M,EACH7qB,EAAI,EACJggB,EAAQ,CAAE+Q,OAAQvxB,GAKnB,IADAg4B,EAAeA,EAAe,EAAI,EAC1Bx3B,EAAI,EAAGA,GAAK,EAAIw3B,EAEvBxX,EAAO,UADP6K,EAAQ9J,EAAW/gB,KACSggB,EAAO,UAAY6K,GAAUrrB,EAO1D,OAJKg4B,IACJxX,EAAMsT,QAAUtT,EAAM0O,MAAQlvB,GAGxBwgB,EAGR,SAASyX,GAAa7xB,EAAOwZ,EAAMsY,GAKlC,IAJA,IAAIjW,EACH4K,GAAesL,GAAUC,SAAUxY,IAAU,IAAK7gB,OAAQo5B,GAAUC,SAAU,MAC9E/f,EAAQ,EACRzW,EAASirB,EAAWjrB,OACbyW,EAAQzW,EAAQyW,IACvB,GAAO4J,EAAQ4K,EAAYxU,GAAQvZ,KAAMo5B,EAAWtY,EAAMxZ,GAGzD,OAAO6b,EAsNV,SAASkW,GAAWr2B,EAAMu2B,EAAYx0B,GACrC,IAAIoM,EACHqoB,EACAjgB,EAAQ,EACRzW,EAASu2B,GAAUI,WAAW32B,OAC9ByZ,EAAW/Z,GAAO0Z,WAAWI,OAAQ,kBAG7Byc,EAAK/1B,OAEb+1B,EAAO,WACN,GAAKS,EACJ,OAAO,EAYR,IAVA,IAAIE,EAAcpB,IAASU,KAC1B3a,EAAY3Y,KAAKouB,IAAK,EAAGsF,EAAUO,UAAYP,EAAUzB,SAAW+B,GAKpEjC,EAAU,GADHpZ,EAAY+a,EAAUzB,UAAY,GAEzCpe,EAAQ,EACRzW,EAASs2B,EAAUQ,OAAO92B,OAEnByW,EAAQzW,EAAQyW,IACvB6f,EAAUQ,OAAQrgB,GAAQie,IAAKC,GAMhC,OAHAlb,EAASmB,WAAY1a,EAAM,CAAEo2B,EAAW3B,EAASpZ,IAG5CoZ,EAAU,GAAK30B,EACZub,GAIFvb,GACLyZ,EAASmB,WAAY1a,EAAM,CAAEo2B,EAAW,EAAG,IAI5C7c,EAASoB,YAAa3a,EAAM,CAAEo2B,KACvB,IAERA,EAAY7c,EAAS1B,QAAS,CAC7B7X,KAAMA,EACNonB,MAAO5nB,GAAOsC,OAAQ,GAAIy0B,GAC1BM,KAAMr3B,GAAOsC,QAAQ,EAAM,CAC1Bg1B,cAAe,GACfhF,OAAQtyB,GAAOsyB,OAAO5P,UACpBngB,GACHg1B,mBAAoBR,EACpBS,gBAAiBj1B,EACjB40B,UAAWrB,IAASU,KACpBrB,SAAU5yB,EAAQ4yB,SAClBiC,OAAQ,GACRT,YAAa,SAAUrY,EAAMnc,GAC5B,IAAIwe,EAAQ3gB,GAAOqyB,MAAO7xB,EAAMo2B,EAAUS,KAAM/Y,EAAMnc,EACrDy0B,EAAUS,KAAKC,cAAehZ,IAAUsY,EAAUS,KAAK/E,QAExD,OADAsE,EAAUQ,OAAOz5B,KAAMgjB,GAChBA,GAERnB,KAAM,SAAUiY,GACf,IAAI1gB,EAAQ,EAIXzW,EAASm3B,EAAUb,EAAUQ,OAAO92B,OAAS,EAC9C,GAAK02B,EACJ,OAAOj6B,KAGR,IADAi6B,GAAU,EACFjgB,EAAQzW,EAAQyW,IACvB6f,EAAUQ,OAAQrgB,GAAQie,IAAK,GAUhC,OANKyC,GACJ1d,EAASmB,WAAY1a,EAAM,CAAEo2B,EAAW,EAAG,IAC3C7c,EAASoB,YAAa3a,EAAM,CAAEo2B,EAAWa,KAEzC1d,EAASuB,WAAY9a,EAAM,CAAEo2B,EAAWa,IAElC16B,QAGT6qB,EAAQgP,EAAUhP,MAInB,KA/HD,SAAqBA,EAAO0P,GAC3B,IAAIvgB,EAAOtW,EAAM6xB,EAAQxtB,EAAOua,EAGhC,IAAMtI,KAAS6Q,EAed,GAbA0K,EAASgF,EADT72B,EAAOmd,EAAW7G,IAElBjS,EAAQ8iB,EAAO7Q,GACVjU,MAAMC,QAAS+B,KACnBwtB,EAASxtB,EAAO,GAChBA,EAAQ8iB,EAAO7Q,GAAUjS,EAAO,IAG5BiS,IAAUtW,IACdmnB,EAAOnnB,GAASqE,SACT8iB,EAAO7Q,KAGfsI,EAAQrf,GAAOuyB,SAAU9xB,KACX,WAAY4e,EAMzB,IAAMtI,KALNjS,EAAQua,EAAMuV,OAAQ9vB,UACf8iB,EAAOnnB,GAICqE,EACNiS,KAAS6Q,IAChBA,EAAO7Q,GAAUjS,EAAOiS,GACxBugB,EAAevgB,GAAUub,QAI3BgF,EAAe72B,GAAS6xB,EA6F1BoF,CAAY9P,EAAOgP,EAAUS,KAAKC,eAE1BvgB,EAAQzW,EAAQyW,IAEvB,GADApI,EAASkoB,GAAUI,WAAYlgB,GAAQvZ,KAAMo5B,EAAWp2B,EAAMonB,EAAOgP,EAAUS,MAM9E,OAJKj5B,EAAYuQ,EAAO6Q,QACvBxf,GAAOsf,YAAasX,EAAUp2B,KAAMo2B,EAAUS,KAAKpe,OAAQuG,KAC1D7Q,EAAO6Q,KAAKmY,KAAMhpB,IAEbA,EAyBT,OArBA3O,GAAOwB,IAAKomB,EAAO+O,GAAaC,GAE3Bx4B,EAAYw4B,EAAUS,KAAKhoB,QAC/BunB,EAAUS,KAAKhoB,MAAM7R,KAAMgD,EAAMo2B,GAIlCA,EACEtc,SAAUsc,EAAUS,KAAK/c,UACzBzT,KAAM+vB,EAAUS,KAAKxwB,KAAM+vB,EAAUS,KAAKO,UAC1Ctf,KAAMse,EAAUS,KAAK/e,MACrBwB,OAAQ8c,EAAUS,KAAKvd,QAEzB9Z,GAAOs1B,GAAGuC,MACT73B,GAAOsC,OAAQi0B,EAAM,CACpB/1B,KAAMA,EACNs3B,KAAMlB,EACN3d,MAAO2d,EAAUS,KAAKpe,SAIjB2d,EAGR52B,GAAO62B,UAAY72B,GAAOsC,OAAQu0B,GAAW,CAE5CC,SAAU,CACTiB,IAAK,CAAE,SAAUzZ,EAAMxZ,GACtB,IAAI6b,EAAQ5jB,KAAK45B,YAAarY,EAAMxZ,GAEpC,OADA2b,GAAWE,EAAMngB,KAAM8d,EAAM0B,EAAQ5V,KAAMtF,GAAS6b,GAC7CA,KAITqX,QAAS,SAAUpQ,EAAOrmB,GACpBnD,EAAYwpB,IAChBrmB,EAAWqmB,EACXA,EAAQ,CAAE,MAEVA,EAAQA,EAAM5d,MAAO2N,GAOtB,IAJA,IAAI2G,EACHvH,EAAQ,EACRzW,EAASsnB,EAAMtnB,OAERyW,EAAQzW,EAAQyW,IACvBuH,EAAOsJ,EAAO7Q,GACd8f,GAAUC,SAAUxY,GAASuY,GAAUC,SAAUxY,IAAU,GAC3DuY,GAAUC,SAAUxY,GAAOiB,QAAShe,IAItC01B,WAAY,CA3Wb,SAA2Bz2B,EAAMonB,EAAOyP,GACvC,IAAI/Y,EAAMxZ,EAAO4c,EAAQrC,EAAO4Y,EAASC,EAAWC,EAAgB5X,EACnE6X,EAAQ,UAAWxQ,GAAS,WAAYA,EACxCkQ,EAAO/6B,KACP2tB,EAAO,GACPpK,EAAQ9f,EAAK8f,MACb8V,EAAS51B,EAAKlC,UAAY+hB,GAAoB7f,GAC9C63B,EAAW7Z,EAASzd,IAAKP,EAAM,UA6BhC,IAAM8d,KA1BA+Y,EAAKpe,QAEa,OADvBoG,EAAQrf,GAAOsf,YAAa9e,EAAM,OACvB83B,WACVjZ,EAAMiZ,SAAW,EACjBL,EAAU5Y,EAAMhO,MAAM8H,KACtBkG,EAAMhO,MAAM8H,KAAO,WACZkG,EAAMiZ,UACXL,MAIH5Y,EAAMiZ,WAENR,EAAKhe,OAAQ,WAGZge,EAAKhe,OAAQ,WACZuF,EAAMiZ,WACAt4B,GAAOiZ,MAAOzY,EAAM,MAAOF,QAChC+e,EAAMhO,MAAM8H,YAOFyO,EAEb,GADA9iB,EAAQ8iB,EAAOtJ,GACV2X,GAASzxB,KAAMM,GAAU,CAG7B,UAFO8iB,EAAOtJ,GACdoD,EAASA,GAAoB,WAAV5c,EACdA,KAAYsxB,EAAS,OAAS,QAAW,CAI7C,GAAe,SAAVtxB,IAAoBuzB,QAAiCr1B,IAArBq1B,EAAU/Z,GAK9C,SAJA8X,GAAS,EAOX1L,EAAMpM,GAAS+Z,GAAYA,EAAU/Z,IAAUte,GAAOsgB,MAAO9f,EAAM8d,GAMrE,IADA4Z,GAAal4B,GAAO2D,cAAeikB,MAChB5nB,GAAO2D,cAAe+mB,GA8DzC,IAAMpM,KAzDD8Z,GAA2B,IAAlB53B,EAAKlC,WAMlB+4B,EAAKkB,SAAW,CAAEjY,EAAMiY,SAAUjY,EAAMkY,UAAWlY,EAAMmY,WAIlC,OADvBN,EAAiBE,GAAYA,EAAS9X,WAErC4X,EAAiB3Z,EAASzd,IAAKP,EAAM,YAGrB,UADjB+f,EAAUvgB,GAAOwgB,IAAKhgB,EAAM,cAEtB23B,EACJ5X,EAAU4X,GAIV9W,GAAU,CAAE7gB,IAAQ,GACpB23B,EAAiB33B,EAAK8f,MAAMC,SAAW4X,EACvC5X,EAAUvgB,GAAOwgB,IAAKhgB,EAAM,WAC5B6gB,GAAU,CAAE7gB,OAKG,WAAZ+f,GAAoC,iBAAZA,GAAgD,MAAlB4X,IACrB,SAAhCn4B,GAAOwgB,IAAKhgB,EAAM,WAGhB03B,IACLJ,EAAKjxB,KAAM,WACVyZ,EAAMC,QAAU4X,IAEM,MAAlBA,IACJ5X,EAAUD,EAAMC,QAChB4X,EAA6B,SAAZ5X,EAAqB,GAAKA,IAG7CD,EAAMC,QAAU,iBAKd8W,EAAKkB,WACTjY,EAAMiY,SAAW,SACjBT,EAAKhe,OAAQ,WACZwG,EAAMiY,SAAWlB,EAAKkB,SAAU,GAChCjY,EAAMkY,UAAYnB,EAAKkB,SAAU,GACjCjY,EAAMmY,UAAYpB,EAAKkB,SAAU,MAKnCL,GAAY,EACExN,EAGPwN,IACAG,EACC,WAAYA,IAChBjC,EAASiC,EAASjC,QAGnBiC,EAAW7Z,EAASxB,OAAQxc,EAAM,SAAU,CAAE+f,QAAS4X,IAInDzW,IACJ2W,EAASjC,QAAUA,GAIfA,GACJ/U,GAAU,CAAE7gB,IAAQ,GAKrBs3B,EAAKjxB,KAAM,WASV,IAAMyX,KAJA8X,GACL/U,GAAU,CAAE7gB,IAEbge,EAASlF,OAAQ9Y,EAAM,UACTkqB,EACb1qB,GAAOsgB,MAAO9f,EAAM8d,EAAMoM,EAAMpM,OAMnC4Z,EAAYvB,GAAaP,EAASiC,EAAU/Z,GAAS,EAAGA,EAAMwZ,GACtDxZ,KAAQ+Z,IACfA,EAAU/Z,GAAS4Z,EAAU7oB,MACxB+mB,IACJ8B,EAAU/1B,IAAM+1B,EAAU7oB,MAC1B6oB,EAAU7oB,MAAQ,MAuMrBqpB,UAAW,SAAUn3B,EAAU+qB,GACzBA,EACJuK,GAAUI,WAAW1X,QAAShe,GAE9Bs1B,GAAUI,WAAWt5B,KAAM4D,MAK9BvB,GAAO24B,MAAQ,SAAUA,EAAOrG,EAAQnyB,GACvC,IAAI61B,EAAM2C,GAA0B,iBAAVA,EAAqB34B,GAAOsC,OAAQ,GAAIq2B,GAAU,CAC3Ef,SAAUz3B,IAAOA,GAAMmyB,GACtBl0B,EAAYu6B,IAAWA,EACxBxD,SAAUwD,EACVrG,OAAQnyB,GAAMmyB,GAAUA,IAAWl0B,EAAYk0B,IAAYA,GAoC5D,OAhCKtyB,GAAOs1B,GAAG/Q,IACdyR,EAAIb,SAAW,EAGc,iBAAjBa,EAAIb,WACVa,EAAIb,YAAYn1B,GAAOs1B,GAAGsD,OAC9B5C,EAAIb,SAAWn1B,GAAOs1B,GAAGsD,OAAQ5C,EAAIb,UAGrCa,EAAIb,SAAWn1B,GAAOs1B,GAAGsD,OAAOlW,UAMjB,MAAbsT,EAAI/c,QAA+B,IAAd+c,EAAI/c,QAC7B+c,EAAI/c,MAAQ,MAIb+c,EAAIxI,IAAMwI,EAAI4B,SAEd5B,EAAI4B,SAAW,WACTx5B,EAAY43B,EAAIxI,MACpBwI,EAAIxI,IAAIhwB,KAAMT,MAGVi5B,EAAI/c,OACRjZ,GAAOmf,QAASpiB,KAAMi5B,EAAI/c,QAIrB+c,GAGRh2B,GAAOG,GAAGmC,OAAQ,CACjBu2B,OAAQ,SAAUF,EAAOG,EAAIxG,EAAQ/wB,GAGpC,OAAOxE,KAAK6P,OAAQyT,IAAqBG,IAAK,UAAW,GAAIc,OAG3Dnf,MAAM42B,QAAS,CAAEvG,QAASsG,GAAMH,EAAOrG,EAAQ/wB,IAElDw3B,QAAS,SAAUza,EAAMqa,EAAOrG,EAAQ/wB,GACvC,IAAI8P,EAAQrR,GAAO2D,cAAe2a,GACjC0a,EAASh5B,GAAO24B,MAAOA,EAAOrG,EAAQ/wB,GACtC03B,EAAc,WAGb,IAAInB,EAAOjB,GAAW95B,KAAMiD,GAAOsC,OAAQ,GAAIgc,GAAQ0a,IAGlD3nB,GAASmN,EAASzd,IAAKhE,KAAM,YACjC+6B,EAAKtY,MAAM,IAMd,OAFAyZ,EAAYC,OAASD,EAEd5nB,IAA0B,IAAjB2nB,EAAO/f,MACtBlc,KAAKuE,KAAM23B,GACXl8B,KAAKkc,MAAO+f,EAAO/f,MAAOggB,IAE5BzZ,KAAM,SAAU9gB,EAAMghB,EAAY+X,GACjC,IAAI0B,EAAY,SAAU9Z,GACzB,IAAIG,EAAOH,EAAMG,YACVH,EAAMG,KACbA,EAAMiY,IAYP,MATqB,iBAAT/4B,IACX+4B,EAAU/X,EACVA,EAAahhB,EACbA,OAAOsE,GAEH0c,GACJ3iB,KAAKkc,MAAOva,GAAQ,KAAM,IAGpB3B,KAAKuE,KAAM,WACjB,IAAI6d,GAAU,EACbpI,EAAgB,MAARrY,GAAgBA,EAAO,aAC/B06B,EAASp5B,GAAOo5B,OAChB/a,EAAOG,EAASzd,IAAKhE,MAEtB,GAAKga,EACCsH,EAAMtH,IAAWsH,EAAMtH,GAAQyI,MACnC2Z,EAAW9a,EAAMtH,SAGlB,IAAMA,KAASsH,EACTA,EAAMtH,IAAWsH,EAAMtH,GAAQyI,MAAQ0W,GAAK1xB,KAAMuS,IACtDoiB,EAAW9a,EAAMtH,IAKpB,IAAMA,EAAQqiB,EAAO94B,OAAQyW,KACvBqiB,EAAQriB,GAAQvW,OAASzD,MACnB,MAAR2B,GAAgB06B,EAAQriB,GAAQkC,QAAUva,IAE5C06B,EAAQriB,GAAQ+gB,KAAKtY,KAAMiY,GAC3BtY,GAAU,EACVia,EAAO/2B,OAAQ0U,EAAO,KAOnBoI,GAAYsY,GAChBz3B,GAAOmf,QAASpiB,KAAM2B,MAIzBw6B,OAAQ,SAAUx6B,GAIjB,OAHc,IAATA,IACJA,EAAOA,GAAQ,MAET3B,KAAKuE,KAAM,WACjB,IAAIyV,EACHsH,EAAOG,EAASzd,IAAKhE,MACrBkc,EAAQoF,EAAM3f,EAAO,SACrB2gB,EAAQhB,EAAM3f,EAAO,cACrB06B,EAASp5B,GAAOo5B,OAChB94B,EAAS2Y,EAAQA,EAAM3Y,OAAS,EAajC,IAVA+d,EAAK6a,QAAS,EAGdl5B,GAAOiZ,MAAOlc,KAAM2B,EAAM,IAErB2gB,GAASA,EAAMG,MACnBH,EAAMG,KAAKhiB,KAAMT,MAAM,GAIlBga,EAAQqiB,EAAO94B,OAAQyW,KACvBqiB,EAAQriB,GAAQvW,OAASzD,MAAQq8B,EAAQriB,GAAQkC,QAAUva,IAC/D06B,EAAQriB,GAAQ+gB,KAAKtY,MAAM,GAC3B4Z,EAAO/2B,OAAQ0U,EAAO,IAKxB,IAAMA,EAAQ,EAAGA,EAAQzW,EAAQyW,IAC3BkC,EAAOlC,IAAWkC,EAAOlC,GAAQmiB,QACrCjgB,EAAOlC,GAAQmiB,OAAO17B,KAAMT,aAKvBshB,EAAK6a,YAKfl5B,GAAOsB,KAAM,CAAE,SAAU,OAAQ,QAAU,SAAU6D,EAAI1E,GACxD,IAAI44B,EAAQr5B,GAAOG,GAAIM,GACvBT,GAAOG,GAAIM,GAAS,SAAUk4B,EAAOrG,EAAQ/wB,GAC5C,OAAgB,MAATo3B,GAAkC,kBAAVA,EAC9BU,EAAM37B,MAAOX,KAAM0E,WACnB1E,KAAKg8B,QAAStC,GAAOh2B,GAAM,GAAQk4B,EAAOrG,EAAQ/wB,MAKrDvB,GAAOsB,KAAM,CACZg4B,UAAW7C,GAAO,QAClB8C,QAAS9C,GAAO,QAChB+C,YAAa/C,GAAO,UACpBgD,OAAQ,CAAEjH,QAAS,QACnBkH,QAAS,CAAElH,QAAS,QACpBmH,WAAY,CAAEnH,QAAS,WACrB,SAAU/xB,EAAMmnB,GAClB5nB,GAAOG,GAAIM,GAAS,SAAUk4B,EAAOrG,EAAQ/wB,GAC5C,OAAOxE,KAAKg8B,QAASnR,EAAO+Q,EAAOrG,EAAQ/wB,MAI7CvB,GAAOo5B,OAAS,GAChBp5B,GAAOs1B,GAAGiB,KAAO,WAChB,IAAIsB,EACH34B,EAAI,EACJk6B,EAASp5B,GAAOo5B,OAIjB,IAFAtD,GAAQ7N,KAAKC,MAELhpB,EAAIk6B,EAAO94B,OAAQpB,KAC1B24B,EAAQuB,EAAQl6B,OAGCk6B,EAAQl6B,KAAQ24B,GAChCuB,EAAO/2B,OAAQnD,IAAK,GAIhBk6B,EAAO94B,QACZN,GAAOs1B,GAAG9V,OAEXsW,QAAQ9yB,GAGThD,GAAOs1B,GAAGuC,MAAQ,SAAUA,GAC3B73B,GAAOo5B,OAAOz7B,KAAMk6B,GACpB73B,GAAOs1B,GAAGjmB,SAGXrP,GAAOs1B,GAAGgB,SAAW,GACrBt2B,GAAOs1B,GAAGjmB,MAAQ,WACZ0mB,KAILA,IAAa,EACbI,OAGDn2B,GAAOs1B,GAAG9V,KAAO,WAChBuW,GAAa,MAGd/1B,GAAOs1B,GAAGsD,OAAS,CAClBgB,KAAM,IACNC,KAAM,IAGNnX,SAAU,KAKX1iB,GAAOG,GAAG25B,MAAQ,SAAUC,EAAMr7B,GAIjC,OAHAq7B,EAAO/5B,GAAOs1B,IAAKt1B,GAAOs1B,GAAGsD,OAAQmB,IAAiBA,EACtDr7B,EAAOA,GAAQ,KAER3B,KAAKkc,MAAOva,EAAM,SAAU8K,EAAM6V,GACxC,IAAI2a,EAAUl9B,GAAO2e,WAAYjS,EAAMuwB,GACvC1a,EAAMG,KAAO,WACZ1iB,GAAOm9B,aAAcD,OAOnB/sB,GAAQtQ,EAAS0C,cAAe,SAEnC22B,GADSr5B,EAAS0C,cAAe,UACpBK,YAAa/C,EAAS0C,cAAe,WAEnD4N,GAAMvO,KAAO,WAIbP,GAAQ+7B,QAA0B,KAAhBjtB,GAAMnI,MAIxB3G,GAAQg8B,YAAcnE,GAAI7kB,UAI1BlE,GAAQtQ,EAAS0C,cAAe,UAC1ByF,MAAQ,IACdmI,GAAMvO,KAAO,QACbP,GAAQi8B,WAA6B,MAAhBntB,GAAMnI,MAI5B,IAAIu1B,GACH5sB,GAAazN,GAAOqN,KAAKI,WAE1BzN,GAAOG,GAAGmC,OAAQ,CACjBkL,KAAM,SAAU/M,EAAMqE,GACrB,OAAOkY,EAAQjgB,KAAMiD,GAAOwN,KAAM/M,EAAMqE,EAA0B,EAAnBrD,UAAUnB,SAG1Dg6B,WAAY,SAAU75B,GACrB,OAAO1D,KAAKuE,KAAM,WACjBtB,GAAOs6B,WAAYv9B,KAAM0D,QAK5BT,GAAOsC,OAAQ,CACdkL,KAAM,SAAUhN,EAAMC,EAAMqE,GAC3B,IAAI3D,EAAKke,EACRkb,EAAQ/5B,EAAKlC,SAGd,GAAe,IAAVi8B,GAAyB,IAAVA,GAAyB,IAAVA,EAKnC,MAAkC,oBAAtB/5B,EAAKjB,aACTS,GAAOse,KAAM9d,EAAMC,EAAMqE,IAKlB,IAAVy1B,GAAgBv6B,GAAOmE,SAAU3D,KACrC6e,EAAQrf,GAAOw6B,UAAW/5B,EAAKC,iBAC5BV,GAAOqN,KAAKrD,MAAM3B,KAAK7D,KAAM/D,GAAS45B,QAAWr3B,SAGtCA,IAAV8B,EACW,OAAVA,OACJ9E,GAAOs6B,WAAY95B,EAAMC,GAIrB4e,GAAS,QAASA,QACuBrc,KAA3C7B,EAAMke,EAAMjB,IAAK5d,EAAMsE,EAAOrE,IACzBU,GAGRX,EAAKhB,aAAciB,EAAMqE,EAAQ,IAC1BA,GAGHua,GAAS,QAASA,GAA+C,QAApCle,EAAMke,EAAMte,IAAKP,EAAMC,IACjDU,EAMM,OAHdA,EAAMnB,GAAO4J,KAAK4D,KAAMhN,EAAMC,SAGTuC,EAAY7B,IAGlCq5B,UAAW,CACV97B,KAAM,CACL0f,IAAK,SAAU5d,EAAMsE,GACpB,IAAM3G,GAAQi8B,YAAwB,UAAVt1B,GAC3BvE,GAAUC,EAAM,SAAY,CAC5B,IAAIrB,EAAMqB,EAAKsE,MAKf,OAJAtE,EAAKhB,aAAc,OAAQsF,GACtB3F,IACJqB,EAAKsE,MAAQ3F,GAEP2F,MAMXw1B,WAAY,SAAU95B,EAAMsE,GAC3B,IAAIrE,EACHvB,EAAI,EAIJu7B,EAAY31B,GAASA,EAAMkF,MAAO2N,GAEnC,GAAK8iB,GAA+B,IAAlBj6B,EAAKlC,SACtB,MAAUmC,EAAOg6B,EAAWv7B,KAC3BsB,EAAKwK,gBAAiBvK,MAO1B45B,GAAW,CACVjc,IAAK,SAAU5d,EAAMsE,EAAOrE,GAQ3B,OAPe,IAAVqE,EAGJ9E,GAAOs6B,WAAY95B,EAAMC,GAEzBD,EAAKhB,aAAciB,EAAMA,GAEnBA,IAITT,GAAOsB,KAAMtB,GAAOqN,KAAKrD,MAAM3B,KAAK0X,OAAO/V,MAAO,QAAU,SAAU7E,EAAI1E,GACzE,IAAIi6B,EAASjtB,GAAYhN,IAAUT,GAAO4J,KAAK4D,KAE/CC,GAAYhN,GAAS,SAAUD,EAAMC,EAAM6U,GAC1C,IAAInU,EAAKykB,EACR+U,EAAgBl6B,EAAKC,cAYtB,OAVM4U,IAGLsQ,EAASnY,GAAYktB,GACrBltB,GAAYktB,GAAkBx5B,EAC9BA,EAAqC,MAA/Bu5B,EAAQl6B,EAAMC,EAAM6U,GACzBqlB,EACA,KACDltB,GAAYktB,GAAkB/U,GAExBzkB,KAOT,IAAIy5B,GAAa,sCAChBC,GAAa,gBAwIb,SAASC,GAAkBh2B,GAE1B,OADaA,EAAMkF,MAAO2N,IAAmB,IAC/B9M,KAAM,KAItB,SAASkwB,GAAUv6B,GAClB,OAAOA,EAAKjB,cAAgBiB,EAAKjB,aAAc,UAAa,GAG7D,SAASy7B,GAAgBl2B,GACxB,OAAKhC,MAAMC,QAAS+B,GACZA,EAEc,iBAAVA,GACJA,EAAMkF,MAAO2N,IAEd,GAvJR3X,GAAOG,GAAGmC,OAAQ,CACjBgc,KAAM,SAAU7d,EAAMqE,GACrB,OAAOkY,EAAQjgB,KAAMiD,GAAOse,KAAM7d,EAAMqE,EAA0B,EAAnBrD,UAAUnB,SAG1D26B,WAAY,SAAUx6B,GACrB,OAAO1D,KAAKuE,KAAM,kBACVvE,KAAMiD,GAAOk7B,QAASz6B,IAAUA,QAK1CT,GAAOsC,OAAQ,CACdgc,KAAM,SAAU9d,EAAMC,EAAMqE,GAC3B,IAAI3D,EAAKke,EACRkb,EAAQ/5B,EAAKlC,SAGd,GAAe,IAAVi8B,GAAyB,IAAVA,GAAyB,IAAVA,EAWnC,OAPe,IAAVA,GAAgBv6B,GAAOmE,SAAU3D,KAGrCC,EAAOT,GAAOk7B,QAASz6B,IAAUA,EACjC4e,EAAQrf,GAAO+0B,UAAWt0B,SAGZuC,IAAV8B,EACCua,GAAS,QAASA,QACuBrc,KAA3C7B,EAAMke,EAAMjB,IAAK5d,EAAMsE,EAAOrE,IACzBU,EAGCX,EAAMC,GAASqE,EAGpBua,GAAS,QAASA,GAA+C,QAApCle,EAAMke,EAAMte,IAAKP,EAAMC,IACjDU,EAGDX,EAAMC,IAGds0B,UAAW,CACV/jB,SAAU,CACTjQ,IAAK,SAAUP,GAMd,IAAI26B,EAAWn7B,GAAO4J,KAAK4D,KAAMhN,EAAM,YAEvC,OAAK26B,EACGjL,SAAUiL,EAAU,IAI3BP,GAAWp2B,KAAMhE,EAAKD,WACtBs6B,GAAWr2B,KAAMhE,EAAKD,WACtBC,EAAKuQ,KAEE,GAGA,KAKXmqB,QAAS,CACRE,MAAO,UACPC,QAAS,eAYLl9B,GAAQg8B,cACbn6B,GAAO+0B,UAAU5jB,SAAW,CAC3BpQ,IAAK,SAAUP,GAId,IAAI8O,EAAS9O,EAAKb,WAIlB,OAHK2P,GAAUA,EAAO3P,YACrB2P,EAAO3P,WAAWyR,cAEZ,MAERgN,IAAK,SAAU5d,GAId,IAAI8O,EAAS9O,EAAKb,WACb2P,IACJA,EAAO8B,cAEF9B,EAAO3P,YACX2P,EAAO3P,WAAWyR,kBAOvBpR,GAAOsB,KAAM,CACZ,WACA,WACA,YACA,cACA,cACA,UACA,UACA,SACA,cACA,mBACE,WACFtB,GAAOk7B,QAASn+B,KAAK2D,eAAkB3D,OA4BxCiD,GAAOG,GAAGmC,OAAQ,CACjBg5B,SAAU,SAAUx2B,GACnB,IAAIy2B,EAAY5kB,EAAK6kB,EAAUxuB,EAAW9N,EAAGu8B,EAE7C,OAAKr9B,EAAY0G,GACT/H,KAAKuE,KAAM,SAAUY,GAC3BlC,GAAQjD,MAAOu+B,SAAUx2B,EAAMtH,KAAMT,KAAMmF,EAAG64B,GAAUh+B,WAI1Dw+B,EAAaP,GAAgBl2B,IAEbxE,OACRvD,KAAKuE,KAAM,WAIjB,GAHAk6B,EAAWT,GAAUh+B,MACrB4Z,EAAwB,IAAlB5Z,KAAKuB,UAAoB,IAAMw8B,GAAkBU,GAAa,IAEzD,CACV,IAAMt8B,EAAI,EAAGA,EAAIq8B,EAAWj7B,OAAQpB,IACnC8N,EAAYuuB,EAAYr8B,GACnByX,EAAI/Y,QAAS,IAAMoP,EAAY,KAAQ,IAC3C2J,GAAO3J,EAAY,KAKrByuB,EAAaX,GAAkBnkB,GAC1B6kB,IAAaC,GACjB1+B,KAAKyC,aAAc,QAASi8B,MAMzB1+B,MAGR2+B,YAAa,SAAU52B,GACtB,IAAIy2B,EAAY5kB,EAAK6kB,EAAUxuB,EAAW9N,EAAGu8B,EAE7C,OAAKr9B,EAAY0G,GACT/H,KAAKuE,KAAM,SAAUY,GAC3BlC,GAAQjD,MAAO2+B,YAAa52B,EAAMtH,KAAMT,KAAMmF,EAAG64B,GAAUh+B,UAIvD0E,UAAUnB,QAIhBi7B,EAAaP,GAAgBl2B,IAEbxE,OACRvD,KAAKuE,KAAM,WAMjB,GALAk6B,EAAWT,GAAUh+B,MAGrB4Z,EAAwB,IAAlB5Z,KAAKuB,UAAoB,IAAMw8B,GAAkBU,GAAa,IAEzD,CACV,IAAMt8B,EAAI,EAAGA,EAAIq8B,EAAWj7B,OAAQpB,IAAM,CACzC8N,EAAYuuB,EAAYr8B,GAGxB,OAAgD,EAAxCyX,EAAI/Y,QAAS,IAAMoP,EAAY,KACtC2J,EAAMA,EAAIvT,QAAS,IAAM4J,EAAY,IAAK,KAK5CyuB,EAAaX,GAAkBnkB,GAC1B6kB,IAAaC,GACjB1+B,KAAKyC,aAAc,QAASi8B,MAMzB1+B,KA/BCA,KAAKyQ,KAAM,QAAS,KAkC7BmuB,YAAa,SAAU72B,EAAO82B,GAC7B,IAAIL,EAAYvuB,EAAW9N,EAAG+W,EAC7BvX,SAAcoG,EACd+2B,EAAwB,WAATn9B,GAAqBoE,MAAMC,QAAS+B,GAEpD,OAAK1G,EAAY0G,GACT/H,KAAKuE,KAAM,SAAUpC,GAC3Bc,GAAQjD,MAAO4+B,YACd72B,EAAMtH,KAAMT,KAAMmC,EAAG67B,GAAUh+B,MAAQ6+B,GACvCA,KAKsB,kBAAbA,GAA0BC,EAC9BD,EAAW7+B,KAAKu+B,SAAUx2B,GAAU/H,KAAK2+B,YAAa52B,IAG9Dy2B,EAAaP,GAAgBl2B,GAEtB/H,KAAKuE,KAAM,WACjB,GAAKu6B,EAKJ,IAFA5lB,EAAOjW,GAAQjD,MAETmC,EAAI,EAAGA,EAAIq8B,EAAWj7B,OAAQpB,IACnC8N,EAAYuuB,EAAYr8B,GAGnB+W,EAAK6lB,SAAU9uB,GACnBiJ,EAAKylB,YAAa1uB,GAElBiJ,EAAKqlB,SAAUtuB,aAKIhK,IAAV8B,GAAgC,YAATpG,KAClCsO,EAAY+tB,GAAUh+B,QAIrByhB,EAASJ,IAAKrhB,KAAM,gBAAiBiQ,GAOjCjQ,KAAKyC,cACTzC,KAAKyC,aAAc,QAClBwN,IAAuB,IAAVlI,EACZ,GACA0Z,EAASzd,IAAKhE,KAAM,kBAAqB,SAO/C++B,SAAU,SAAU77B,GACnB,IAAI+M,EAAWxM,EACdtB,EAAI,EAEL8N,EAAY,IAAM/M,EAAW,IAC7B,MAAUO,EAAOzD,KAAMmC,KACtB,GAAuB,IAAlBsB,EAAKlC,WACoE,GAA3E,IAAMw8B,GAAkBC,GAAUv6B,IAAW,KAAM5C,QAASoP,GAC9D,OAAO,EAIT,OAAO,KAOT,IAAI+uB,GAAU,MAEd/7B,GAAOG,GAAGmC,OAAQ,CACjBnD,IAAK,SAAU2F,GACd,IAAIua,EAAOle,EAAKuqB,EACflrB,EAAOzD,KAAM,GAEd,OAAM0E,UAAUnB,QA0BhBorB,EAAkBttB,EAAY0G,GAEvB/H,KAAKuE,KAAM,SAAUpC,GAC3B,IAAIC,EAEmB,IAAlBpC,KAAKuB,WAWE,OANXa,EADIusB,EACE5mB,EAAMtH,KAAMT,KAAMmC,EAAGc,GAAQjD,MAAOoC,OAEpC2F,GAKN3F,EAAM,GAEoB,iBAARA,EAClBA,GAAO,GAEI2D,MAAMC,QAAS5D,KAC1BA,EAAMa,GAAOwB,IAAKrC,EAAK,SAAU2F,GAChC,OAAgB,MAATA,EAAgB,GAAKA,EAAQ,OAItCua,EAAQrf,GAAOg8B,SAAUj/B,KAAK2B,OAAUsB,GAAOg8B,SAAUj/B,KAAKwD,SAASG,iBAGrD,QAAS2e,QAA+Crc,IAApCqc,EAAMjB,IAAKrhB,KAAMoC,EAAK,WAC3DpC,KAAK+H,MAAQ3F,OAzDTqB,GACJ6e,EAAQrf,GAAOg8B,SAAUx7B,EAAK9B,OAC7BsB,GAAOg8B,SAAUx7B,EAAKD,SAASG,iBAG/B,QAAS2e,QACgCrc,KAAvC7B,EAAMke,EAAMte,IAAKP,EAAM,UAElBW,EAMY,iBAHpBA,EAAMX,EAAKsE,OAIH3D,EAAIiC,QAAS24B,GAAS,IAIhB,MAAP56B,EAAc,GAAKA,OAG3B,KAyCHnB,GAAOsC,OAAQ,CACd05B,SAAU,CACT5Z,OAAQ,CACPrhB,IAAK,SAAUP,GAEd,IAAIrB,EAAMa,GAAO4J,KAAK4D,KAAMhN,EAAM,SAClC,OAAc,MAAPrB,EACNA,EAMA27B,GAAkB96B,GAAOV,KAAMkB,MAGlCyK,OAAQ,CACPlK,IAAK,SAAUP,GACd,IAAIsE,EAAOsd,EAAQljB,EAClBqD,EAAU/B,EAAK+B,QACfwU,EAAQvW,EAAK4Q,cACbgT,EAAoB,eAAd5jB,EAAK9B,KACX6iB,EAAS6C,EAAM,KAAO,GACtBkN,EAAMlN,EAAMrN,EAAQ,EAAIxU,EAAQjC,OAUjC,IAPCpB,EADI6X,EAAQ,EACRua,EAGAlN,EAAMrN,EAAQ,EAIX7X,EAAIoyB,EAAKpyB,IAKhB,KAJAkjB,EAAS7f,EAASrD,IAIJiS,UAAYjS,IAAM6X,KAG7BqL,EAAO9Y,YACL8Y,EAAOziB,WAAW2J,WACnB/I,GAAU6hB,EAAOziB,WAAY,aAAiB,CAMjD,GAHAmF,EAAQ9E,GAAQoiB,GAASjjB,MAGpBilB,EACJ,OAAOtf,EAIRyc,EAAO5jB,KAAMmH,GAIf,OAAOyc,GAGRnD,IAAK,SAAU5d,EAAMsE,GACpB,IAAIm3B,EAAW7Z,EACd7f,EAAU/B,EAAK+B,QACfgf,EAASvhB,GAAOgE,UAAWc,GAC3B5F,EAAIqD,EAAQjC,OAEb,MAAQpB,MACPkjB,EAAS7f,EAASrD,IAINiS,UACuD,EAAlEnR,GAAOkE,QAASlE,GAAOg8B,SAAS5Z,OAAOrhB,IAAKqhB,GAAUb,MAEtD0a,GAAY,GAUd,OAHMA,IACLz7B,EAAK4Q,eAAiB,GAEhBmQ,OAOXvhB,GAAOsB,KAAM,CAAE,QAAS,YAAc,WACrCtB,GAAOg8B,SAAUj/B,MAAS,CACzBqhB,IAAK,SAAU5d,EAAMsE,GACpB,GAAKhC,MAAMC,QAAS+B,GACnB,OAAStE,EAAK0Q,SAA2D,EAAjDlR,GAAOkE,QAASlE,GAAQQ,GAAOrB,MAAO2F,KAI3D3G,GAAQ+7B,UACbl6B,GAAOg8B,SAAUj/B,MAAOgE,IAAM,SAAUP,GACvC,OAAwC,OAAjCA,EAAKjB,aAAc,SAAqB,KAAOiB,EAAKsE,UAS9D,IAAI0L,GAAW1T,GAAO0T,SAElB5R,GAAQ,CAAEmG,KAAMkjB,KAAKC,OAErBgU,GAAS,KAKbl8B,GAAOm8B,SAAW,SAAU9d,GAC3B,IAAInP,EAAKktB,EACT,IAAM/d,GAAwB,iBAATA,EACpB,OAAO,KAKR,IACCnP,GAAM,IAAMpS,GAAOu/B,WAAcC,gBAAiBje,EAAM,YACvD,MAAQ3U,IAYV,OAVA0yB,EAAkBltB,GAAOA,EAAI3E,qBAAsB,eAAiB,GAC9D2E,IAAOktB,GACZp8B,GAAOsD,MAAO,iBACb84B,EACCp8B,GAAOwB,IAAK46B,EAAgB3yB,WAAY,SAAUgC,GACjD,OAAOA,EAAG5H,cACPgH,KAAM,MACVwT,IAGInP,GAIR,IAAIqtB,GAAc,kCACjBC,GAA0B,SAAU9yB,GACnCA,EAAEmb,mBAGJ7kB,GAAOsC,OAAQtC,GAAOskB,MAAO,CAE5BU,QAAS,SAAUV,EAAOjG,EAAM7d,EAAMi8B,GAErC,IAAIv9B,EAAGyX,EAAKgJ,EAAK+c,EAAYC,EAAQ/W,EAAQ9K,EAAS8hB,EACrDC,EAAY,CAAEr8B,GAAQ7D,GACtB+B,EAAOX,GAAOP,KAAM8mB,EAAO,QAAWA,EAAM5lB,KAAO4lB,EACnDkB,EAAaznB,GAAOP,KAAM8mB,EAAO,aAAgBA,EAAMlgB,UAAUc,MAAO,KAAQ,GAKjF,GAHAyR,EAAMimB,EAAcjd,EAAMnf,EAAOA,GAAQ7D,EAGlB,IAAlB6D,EAAKlC,UAAoC,IAAlBkC,EAAKlC,WAK5Bi+B,GAAY/3B,KAAM9F,EAAOsB,GAAOskB,MAAMuB,cAIf,EAAvBnnB,EAAKd,QAAS,OAIlBc,GADA8mB,EAAa9mB,EAAKwG,MAAO,MACPoG,QAClBka,EAAWpjB,QAEZu6B,EAASj+B,EAAKd,QAAS,KAAQ,GAAK,KAAOc,GAG3C4lB,EAAQA,EAAOtkB,GAAOiD,SACrBqhB,EACA,IAAItkB,GAAOmnB,MAAOzoB,EAAuB,iBAAV4lB,GAAsBA,IAGhDK,UAAY8X,EAAe,EAAI,EACrCnY,EAAMlgB,UAAYohB,EAAW3a,KAAM,KACnCyZ,EAAMuC,WAAavC,EAAMlgB,UACxB,IAAImB,OAAQ,UAAYigB,EAAW3a,KAAM,iBAAoB,WAC7D,KAGDyZ,EAAM3V,YAAS3L,EACTshB,EAAM3hB,SACX2hB,EAAM3hB,OAASnC,GAIhB6d,EAAe,MAARA,EACN,CAAEiG,GACFtkB,GAAOgE,UAAWqa,EAAM,CAAEiG,IAG3BxJ,EAAU9a,GAAOskB,MAAMxJ,QAASpc,IAAU,GACpC+9B,IAAgB3hB,EAAQkK,UAAmD,IAAxClK,EAAQkK,QAAQtnB,MAAO8C,EAAM6d,IAAtE,CAMA,IAAMoe,IAAiB3hB,EAAQ0M,WAAahpB,EAAUgC,GAAS,CAM9D,IAJAk8B,EAAa5hB,EAAQ8J,cAAgBlmB,EAC/B69B,GAAY/3B,KAAMk4B,EAAah+B,KACpCiY,EAAMA,EAAIhX,YAEHgX,EAAKA,EAAMA,EAAIhX,WACtBk9B,EAAUl/B,KAAMgZ,GAChBgJ,EAAMhJ,EAIFgJ,KAAUnf,EAAK+D,eAAiB5H,IACpCkgC,EAAUl/B,KAAMgiB,EAAIvT,aAAeuT,EAAImd,cAAgBhgC,IAKzDoC,EAAI,EACJ,OAAUyX,EAAMkmB,EAAW39B,QAAYolB,EAAMqC,uBAC5CiW,EAAcjmB,EACd2N,EAAM5lB,KAAW,EAAJQ,EACZw9B,EACA5hB,EAAQiL,UAAYrnB,GAGrBknB,GAAWpH,EAASzd,IAAK4V,EAAK,WAAcxZ,OAAOwoB,OAAQ,OAAUrB,EAAM5lB,OAC1E8f,EAASzd,IAAK4V,EAAK,YAEnBiP,EAAOloB,MAAOiZ,EAAK0H,IAIpBuH,EAAS+W,GAAUhmB,EAAKgmB,KACT/W,EAAOloB,OAASogB,EAAYnH,KAC1C2N,EAAM3V,OAASiX,EAAOloB,MAAOiZ,EAAK0H,IACZ,IAAjBiG,EAAM3V,QACV2V,EAAMS,kBA8CT,OA1CAT,EAAM5lB,KAAOA,EAGP+9B,GAAiBnY,EAAMuD,sBAEpB/M,EAAQ4H,WACqC,IAApD5H,EAAQ4H,SAAShlB,MAAOm/B,EAAUz3B,MAAOiZ,KACzCP,EAAYtd,IAIPm8B,GAAUv+B,EAAYoC,EAAM9B,MAAaF,EAAUgC,MAGvDmf,EAAMnf,EAAMm8B,MAGXn8B,EAAMm8B,GAAW,MAIlB38B,GAAOskB,MAAMuB,UAAYnnB,EAEpB4lB,EAAMqC,wBACViW,EAAYtwB,iBAAkB5N,EAAM89B,IAGrCh8B,EAAM9B,KAED4lB,EAAMqC,wBACViW,EAAYjgB,oBAAqBje,EAAM89B,IAGxCx8B,GAAOskB,MAAMuB,eAAY7iB,EAEpB2c,IACJnf,EAAMm8B,GAAWhd,IAMd2E,EAAM3V,SAKdwb,SAAU,SAAUzrB,EAAM8B,EAAM8jB,GAC/B,IAAI5a,EAAI1J,GAAOsC,OACd,IAAItC,GAAOmnB,MACX7C,EACA,CACC5lB,KAAMA,EACNypB,aAAa,IAIfnoB,GAAOskB,MAAMU,QAAStb,EAAG,KAAMlJ,MAKjCR,GAAOG,GAAGmC,OAAQ,CAEjB0iB,QAAS,SAAUtmB,EAAM2f,GACxB,OAAOthB,KAAKuE,KAAM,WACjBtB,GAAOskB,MAAMU,QAAStmB,EAAM2f,EAAMthB,SAGpCggC,eAAgB,SAAUr+B,EAAM2f,GAC/B,IAAI7d,EAAOzD,KAAM,GACjB,GAAKyD,EACJ,OAAOR,GAAOskB,MAAMU,QAAStmB,EAAM2f,EAAM7d,GAAM,MAMlD,IACCw8B,GAAW,QACXC,GAAQ,SACRC,GAAkB,wCAClBC,GAAe,qCAEhB,SAASC,GAAa1I,EAAQr2B,EAAKg/B,EAAapmB,GAC/C,IAAIxW,EAEJ,GAAKqC,MAAMC,QAAS1E,GAGnB2B,GAAOsB,KAAMjD,EAAK,SAAUa,EAAG2Y,GACzBwlB,GAAeL,GAASx4B,KAAMkwB,GAGlCzd,EAAKyd,EAAQ7c,GAKbulB,GACC1I,EAAS,KAAqB,iBAAN7c,GAAuB,MAALA,EAAY3Y,EAAI,IAAO,IACjE2Y,EACAwlB,EACApmB,UAKG,GAAMomB,GAAiC,WAAlBx9B,EAAQxB,GAUnC4Y,EAAKyd,EAAQr2B,QAPb,IAAMoC,KAAQpC,EACb++B,GAAa1I,EAAS,IAAMj0B,EAAO,IAAKpC,EAAKoC,GAAQ48B,EAAapmB,GAYrEjX,GAAOs9B,MAAQ,SAAU73B,EAAG43B,GAC3B,IAAI3I,EACH6I,EAAI,GACJtmB,EAAM,SAAU7L,EAAKoyB,GAGpB,IAAI14B,EAAQ1G,EAAYo/B,GACvBA,IACAA,EAEDD,EAAGA,EAAEj9B,QAAWm9B,mBAAoBryB,GAAQ,IAC3CqyB,mBAA6B,MAAT34B,EAAgB,GAAKA,IAG5C,GAAU,MAALW,EACJ,MAAO,GAIR,GAAK3C,MAAMC,QAAS0C,IAASA,EAAE7E,SAAWZ,GAAO6C,cAAe4C,GAG/DzF,GAAOsB,KAAMmE,EAAG,WACfwR,EAAKla,KAAK0D,KAAM1D,KAAK+H,cAOtB,IAAM4vB,KAAUjvB,EACf23B,GAAa1I,EAAQjvB,EAAGivB,GAAU2I,EAAapmB,GAKjD,OAAOsmB,EAAE1yB,KAAM,MAGhB7K,GAAOG,GAAGmC,OAAQ,CACjBo7B,UAAW,WACV,OAAO19B,GAAOs9B,MAAOvgC,KAAK4gC,mBAE3BA,eAAgB,WACf,OAAO5gC,KAAKyE,IAAK,WAGhB,IAAI8L,EAAWtN,GAAOse,KAAMvhB,KAAM,YAClC,OAAOuQ,EAAWtN,GAAOgE,UAAWsJ,GAAavQ,OAC9C6P,OAAQ,WACX,IAAIlO,EAAO3B,KAAK2B,KAGhB,OAAO3B,KAAK0D,OAAST,GAAQjD,MAAO2Y,GAAI,cACvCynB,GAAa34B,KAAMzH,KAAKwD,YAAe28B,GAAgB14B,KAAM9F,KAC3D3B,KAAKmU,UAAY0Q,GAAepd,KAAM9F,MACtC8C,IAAK,SAAU2D,EAAI3E,GACtB,IAAIrB,EAAMa,GAAQjD,MAAOoC,MAEzB,OAAY,MAAPA,EACG,KAGH2D,MAAMC,QAAS5D,GACZa,GAAOwB,IAAKrC,EAAK,SAAUA,GACjC,MAAO,CAAEsB,KAAMD,EAAKC,KAAMqE,MAAO3F,EAAIiE,QAAS65B,GAAO,WAIhD,CAAEx8B,KAAMD,EAAKC,KAAMqE,MAAO3F,EAAIiE,QAAS65B,GAAO,WAClDl8B,SAKN,IACC68B,GAAM,OACNC,GAAQ,OACRC,GAAa,gBACbC,GAAW,6BAIXC,GAAa,iBACbC,GAAY,QAWZhH,GAAa,GAObiH,GAAa,GAGbC,GAAW,KAAK1gC,OAAQ,KAGxB2gC,GAAezhC,EAAS0C,cAAe,KAKxC,SAASg/B,GAA6BC,GAGrC,OAAO,SAAUC,EAAoB5kB,GAED,iBAAvB4kB,IACX5kB,EAAO4kB,EACPA,EAAqB,KAGtB,IAAIC,EACHt/B,EAAI,EACJu/B,EAAYF,EAAmB79B,cAAcsJ,MAAO2N,IAAmB,GAExE,GAAKvZ,EAAYub,GAGhB,MAAU6kB,EAAWC,EAAWv/B,KAGR,MAAlBs/B,EAAU,IACdA,EAAWA,EAASnhC,MAAO,IAAO,KAChCihC,EAAWE,GAAaF,EAAWE,IAAc,IAAKjf,QAAS5F,KAI/D2kB,EAAWE,GAAaF,EAAWE,IAAc,IAAK7gC,KAAMgc,IAQnE,SAAS+kB,GAA+BJ,EAAW/7B,EAASi1B,EAAiBmH,GAE5E,IAAIC,EAAY,GACfC,EAAqBP,IAAcJ,GAEpC,SAASY,EAASN,GACjB,IAAIrtB,EAcJ,OAbAytB,EAAWJ,IAAa,EACxBx+B,GAAOsB,KAAMg9B,EAAWE,IAAc,GAAI,SAAU9lB,EAAGqmB,GACtD,IAAIC,EAAsBD,EAAoBx8B,EAASi1B,EAAiBmH,GACxE,MAAoC,iBAAxBK,GACVH,GAAqBD,EAAWI,GAKtBH,IACD1tB,EAAW6tB,QADf,GAHNz8B,EAAQk8B,UAAUlf,QAASyf,GAC3BF,EAASE,IACF,KAKF7tB,EAGR,OAAO2tB,EAASv8B,EAAQk8B,UAAW,MAAUG,EAAW,MAASE,EAAS,KAM3E,SAASG,GAAYt8B,EAAQhE,GAC5B,IAAIyM,EAAKxI,EACRs8B,EAAcl/B,GAAOm/B,aAAaD,aAAe,GAElD,IAAM9zB,KAAOzM,OACQqE,IAAfrE,EAAKyM,MACP8zB,EAAa9zB,GAAQzI,EAAWC,IAAUA,EAAO,KAAUwI,GAAQzM,EAAKyM,IAO5E,OAJKxI,GACJ5C,GAAOsC,QAAQ,EAAMK,EAAQC,GAGvBD,EA/ERy7B,GAAartB,KAAOP,GAASO,KAgP7B/Q,GAAOsC,OAAQ,CAGd88B,OAAQ,EAGRC,aAAc,GACdC,KAAM,GAENH,aAAc,CACbI,IAAK/uB,GAASO,KACdrS,KAAM,MACN8gC,QAxRgB,4DAwRQh7B,KAAMgM,GAASivB,UACvCljC,QAAQ,EACRmjC,aAAa,EACbC,OAAO,EACPC,YAAa,mDAcbC,QAAS,CACR9H,IAAKoG,GACL7+B,KAAM,aACNqsB,KAAM,YACNzc,IAAK,4BACL4wB,KAAM,qCAGPtpB,SAAU,CACTtH,IAAK,UACLyc,KAAM,SACNmU,KAAM,YAGPC,eAAgB,CACf7wB,IAAK,cACL5P,KAAM,eACNwgC,KAAM,gBAKPE,WAAY,CAGXC,SAAUj3B,OAGVk3B,aAAa,EAGbC,YAAathB,KAAKC,MAGlBshB,WAAYpgC,GAAOm8B,UAOpB+C,YAAa,CACZK,KAAK,EACLr/B,SAAS,IAOXmgC,UAAW,SAAU19B,EAAQ29B,GAC5B,OAAOA,EAGNrB,GAAYA,GAAYt8B,EAAQ3C,GAAOm/B,cAAgBmB,GAGvDrB,GAAYj/B,GAAOm/B,aAAcx8B,IAGnC49B,cAAelC,GAA6BpH,IAC5CuJ,cAAenC,GAA6BH,IAG5CuC,KAAM,SAAUlB,EAAKh9B,GAGA,iBAARg9B,IACXh9B,EAAUg9B,EACVA,OAAMv8B,GAIPT,EAAUA,GAAW,GAErB,IAAIm+B,EAGHC,EAGAC,EACAC,EAGAC,EAGAC,EAGArkB,EAGAskB,EAGA9hC,EAGA+hC,EAGA1D,EAAIv9B,GAAOqgC,UAAW,GAAI99B,GAG1B2+B,EAAkB3D,EAAEr9B,SAAWq9B,EAG/B4D,EAAqB5D,EAAEr9B,UACpBghC,EAAgB5iC,UAAY4iC,EAAgBtgC,QAC9CZ,GAAQkhC,GACRlhC,GAAOskB,MAGRvK,EAAW/Z,GAAO0Z,WAClB0nB,EAAmBphC,GAAOwY,UAAW,eAGrC6oB,EAAa9D,EAAE8D,YAAc,GAG7BC,EAAiB,GACjBC,EAAsB,GAGtBC,EAAW,WAGX7C,EAAQ,CACP7hB,WAAY,EAGZ2kB,kBAAmB,SAAUr2B,GAC5B,IAAIpB,EACJ,GAAK0S,EAAY,CAChB,IAAMmkB,EAAkB,CACvBA,EAAkB,GAClB,MAAU72B,EAAQ+zB,GAAS3zB,KAAMw2B,GAChCC,EAAiB72B,EAAO,GAAItJ,cAAgB,MACzCmgC,EAAiB72B,EAAO,GAAItJ,cAAgB,MAAS,IACrDjD,OAAQuM,EAAO,IAGpBA,EAAQ62B,EAAiBz1B,EAAI1K,cAAgB,KAE9C,OAAgB,MAATsJ,EAAgB,KAAOA,EAAMa,KAAM,OAI3C62B,sBAAuB,WACtB,OAAOhlB,EAAYkkB,EAAwB,MAI5Ce,iBAAkB,SAAUlhC,EAAMqE,GAMjC,OALkB,MAAb4X,IACJjc,EAAO8gC,EAAqB9gC,EAAKC,eAChC6gC,EAAqB9gC,EAAKC,gBAAmBD,EAC9C6gC,EAAgB7gC,GAASqE,GAEnB/H,MAIR6kC,iBAAkB,SAAUljC,GAI3B,OAHkB,MAAbge,IACJ6gB,EAAEsE,SAAWnjC,GAEP3B,MAIRskC,WAAY,SAAU7/B,GACrB,IAAIzC,EACJ,GAAKyC,EACJ,GAAKkb,EAGJiiB,EAAM7kB,OAAQtY,EAAKm9B,EAAMmD,cAIzB,IAAM/iC,KAAQyC,EACb6/B,EAAYtiC,GAAS,CAAEsiC,EAAYtiC,GAAQyC,EAAKzC,IAInD,OAAOhC,MAIRglC,MAAO,SAAUC,GAChB,IAAIC,EAAYD,GAAcR,EAK9B,OAJKd,GACJA,EAAUqB,MAAOE,GAElBp7B,EAAM,EAAGo7B,GACFllC,OAoBV,GAfAgd,EAAS1B,QAASsmB,GAKlBpB,EAAEgC,MAAUA,GAAOhC,EAAEgC,KAAO/uB,GAASO,MAAS,IAC5C3N,QAAS66B,GAAWztB,GAASivB,SAAW,MAG1ClC,EAAE7+B,KAAO6D,EAAQ6V,QAAU7V,EAAQ7D,MAAQ6+B,EAAEnlB,QAAUmlB,EAAE7+B,KAGzD6+B,EAAEkB,WAAclB,EAAEiB,UAAY,KAAM99B,cAAcsJ,MAAO2N,IAAmB,CAAE,IAGxD,MAAjB4lB,EAAE2E,YAAsB,CAC5BnB,EAAYpkC,EAAS0C,cAAe,KAKpC,IACC0hC,EAAUhwB,KAAOwsB,EAAEgC,IAInBwB,EAAUhwB,KAAOgwB,EAAUhwB,KAC3BwsB,EAAE2E,YAAc9D,GAAaqB,SAAW,KAAOrB,GAAa+D,MAC3DpB,EAAUtB,SAAW,KAAOsB,EAAUoB,KACtC,MAAQz4B,GAIT6zB,EAAE2E,aAAc,GAalB,GARK3E,EAAElf,MAAQkf,EAAEmC,aAAiC,iBAAXnC,EAAElf,OACxCkf,EAAElf,KAAOre,GAAOs9B,MAAOC,EAAElf,KAAMkf,EAAEF,cAIlCqB,GAA+BzH,GAAYsG,EAAGh7B,EAASo8B,GAGlDjiB,EACJ,OAAOiiB,EA8ER,IAAMz/B,KAzEN8hC,EAAchhC,GAAOskB,OAASiZ,EAAEhhC,SAGQ,GAApByD,GAAOo/B,UAC1Bp/B,GAAOskB,MAAMU,QAAS,aAIvBuY,EAAE7+B,KAAO6+B,EAAE7+B,KAAKif,cAGhB4f,EAAE6E,YAAcpE,GAAWx5B,KAAM+4B,EAAE7+B,MAKnCiiC,EAAWpD,EAAEgC,IAAIn8B,QAASy6B,GAAO,IAG3BN,EAAE6E,WAwBI7E,EAAElf,MAAQkf,EAAEmC,aACoD,KAAzEnC,EAAEqC,aAAe,IAAKhiC,QAAS,uCACjC2/B,EAAElf,KAAOkf,EAAElf,KAAKjb,QAASw6B,GAAK,OAvB9BqD,EAAW1D,EAAEgC,IAAIliC,MAAOsjC,EAASrgC,QAG5Bi9B,EAAElf,OAAUkf,EAAEmC,aAAiC,iBAAXnC,EAAElf,QAC1CsiB,IAAczE,GAAO13B,KAAMm8B,GAAa,IAAM,KAAQpD,EAAElf,YAGjDkf,EAAElf,OAIO,IAAZkf,EAAEpyB,QACNw1B,EAAWA,EAASv9B,QAAS06B,GAAY,MACzCmD,GAAa/E,GAAO13B,KAAMm8B,GAAa,IAAM,KAAQ,KAAS/hC,GAAMmG,OACnEk8B,GAIF1D,EAAEgC,IAAMoB,EAAWM,GASf1D,EAAE8E,aACDriC,GAAOq/B,aAAcsB,IACzBhC,EAAMgD,iBAAkB,oBAAqB3hC,GAAOq/B,aAAcsB,IAE9D3gC,GAAOs/B,KAAMqB,IACjBhC,EAAMgD,iBAAkB,gBAAiB3hC,GAAOs/B,KAAMqB,MAKnDpD,EAAElf,MAAQkf,EAAE6E,aAAgC,IAAlB7E,EAAEqC,aAAyBr9B,EAAQq9B,cACjEjB,EAAMgD,iBAAkB,eAAgBpE,EAAEqC,aAI3CjB,EAAMgD,iBACL,SACApE,EAAEkB,UAAW,IAAOlB,EAAEsC,QAAStC,EAAEkB,UAAW,IAC3ClB,EAAEsC,QAAStC,EAAEkB,UAAW,KACA,MAArBlB,EAAEkB,UAAW,GAAc,KAAON,GAAW,WAAa,IAC7DZ,EAAEsC,QAAS,MAIFtC,EAAE+E,QACZ3D,EAAMgD,iBAAkBziC,EAAGq+B,EAAE+E,QAASpjC,IAIvC,GAAKq+B,EAAEgF,cAC+C,IAAnDhF,EAAEgF,WAAW/kC,KAAM0jC,EAAiBvC,EAAOpB,IAAiB7gB,GAG9D,OAAOiiB,EAAMoD,QAed,GAXAP,EAAW,QAGXJ,EAAiBnqB,IAAKsmB,EAAE3F,UACxB+G,EAAM93B,KAAM02B,EAAEiF,SACd7D,EAAMrmB,KAAMilB,EAAEj6B,OAGdo9B,EAAYhC,GAA+BR,GAAYX,EAAGh7B,EAASo8B,GAK5D,CASN,GARAA,EAAM7hB,WAAa,EAGdkkB,GACJG,EAAmBnc,QAAS,WAAY,CAAE2Z,EAAOpB,IAI7C7gB,EACJ,OAAOiiB,EAIHpB,EAAEoC,OAAqB,EAAZpC,EAAEvD,UACjB8G,EAAehkC,GAAO2e,WAAY,WACjCkjB,EAAMoD,MAAO,YACXxE,EAAEvD,UAGN,IACCtd,GAAY,EACZgkB,EAAU+B,KAAMnB,EAAgBz6B,GAC/B,MAAQ6C,GAGT,GAAKgT,EACJ,MAAMhT,EAIP7C,GAAO,EAAG6C,SAhCX7C,GAAO,EAAG,gBAqCX,SAASA,EAAMi7B,EAAQY,EAAkBC,EAAWL,GACnD,IAAIM,EAAWJ,EAASl/B,EAAOu/B,EAAUC,EACxCd,EAAaU,EAGThmB,IAILA,GAAY,EAGPokB,GACJhkC,GAAOm9B,aAAc6G,GAKtBJ,OAAY19B,EAGZ49B,EAAwB0B,GAAW,GAGnC3D,EAAM7hB,WAAsB,EAATglB,EAAa,EAAI,EAGpCc,EAAsB,KAAVd,GAAiBA,EAAS,KAAkB,MAAXA,EAGxCa,IACJE,EA7lBJ,SAA8BtF,EAAGoB,EAAOgE,GAEvC,IAAII,EAAIrkC,EAAMskC,EAAeC,EAC5BzsB,EAAW+mB,EAAE/mB,SACbioB,EAAYlB,EAAEkB,UAGf,MAA2B,MAAnBA,EAAW,GAClBA,EAAUnzB,aACEtI,IAAP+/B,IACJA,EAAKxF,EAAEsE,UAAYlD,EAAM8C,kBAAmB,iBAK9C,GAAKsB,EACJ,IAAMrkC,KAAQ8X,EACb,GAAKA,EAAU9X,IAAU8X,EAAU9X,GAAO8F,KAAMu+B,GAAO,CACtDtE,EAAUlf,QAAS7gB,GACnB,MAMH,GAAK+/B,EAAW,KAAOkE,EACtBK,EAAgBvE,EAAW,OACrB,CAGN,IAAM//B,KAAQikC,EAAY,CACzB,IAAMlE,EAAW,IAAOlB,EAAEyC,WAAYthC,EAAO,IAAM+/B,EAAW,IAAQ,CACrEuE,EAAgBtkC,EAChB,MAEKukC,IACLA,EAAgBvkC,GAKlBskC,EAAgBA,GAAiBC,EAMlC,GAAKD,EAIJ,OAHKA,IAAkBvE,EAAW,IACjCA,EAAUlf,QAASyjB,GAEbL,EAAWK,GA0iBLE,CAAqB3F,EAAGoB,EAAOgE,KAIrCC,IACsC,EAA3C5iC,GAAOkE,QAAS,SAAUq5B,EAAEkB,YAC5Bz+B,GAAOkE,QAAS,OAAQq5B,EAAEkB,WAAc,IACxClB,EAAEyC,WAAY,eAAkB,cAIjC6C,EA9iBH,SAAsBtF,EAAGsF,EAAUlE,EAAOiE,GACzC,IAAIO,EAAOC,EAASC,EAAM1jB,EAAKlJ,EAC9BupB,EAAa,GAGbvB,EAAYlB,EAAEkB,UAAUphC,QAGzB,GAAKohC,EAAW,GACf,IAAM4E,KAAQ9F,EAAEyC,WACfA,EAAYqD,EAAK3iC,eAAkB68B,EAAEyC,WAAYqD,GAInDD,EAAU3E,EAAUnzB,QAGpB,MAAQ83B,EAcP,GAZK7F,EAAEwC,eAAgBqD,KACtBzE,EAAOpB,EAAEwC,eAAgBqD,IAAcP,IAIlCpsB,GAAQmsB,GAAarF,EAAE+F,aAC5BT,EAAWtF,EAAE+F,WAAYT,EAAUtF,EAAEiB,WAGtC/nB,EAAO2sB,EACPA,EAAU3E,EAAUnzB,QAKnB,GAAiB,MAAZ83B,EAEJA,EAAU3sB,OAGJ,GAAc,MAATA,GAAgBA,IAAS2sB,EAAU,CAM9C,KAHAC,EAAOrD,EAAYvpB,EAAO,IAAM2sB,IAAapD,EAAY,KAAOoD,IAI/D,IAAMD,KAASnD,EAId,IADArgB,EAAMwjB,EAAMj+B,MAAO,MACT,KAAQk+B,IAGjBC,EAAOrD,EAAYvpB,EAAO,IAAMkJ,EAAK,KACpCqgB,EAAY,KAAOrgB,EAAK,KACb,EAGG,IAAT0jB,EACJA,EAAOrD,EAAYmD,IAGgB,IAAxBnD,EAAYmD,KACvBC,EAAUzjB,EAAK,GACf8e,EAAUlf,QAASI,EAAK,KAEzB,MAOJ,IAAc,IAAT0jB,EAGJ,GAAKA,GAAQ9F,EAAEgG,UACdV,EAAWQ,EAAMR,QAEjB,IACCA,EAAWQ,EAAMR,GAChB,MAAQn5B,GACT,MAAO,CACNmQ,MAAO,cACPvW,MAAO+/B,EAAO35B,EAAI,sBAAwB+M,EAAO,OAAS2sB,IASjE,MAAO,CAAEvpB,MAAO,UAAWwE,KAAMwkB,GAidpBW,CAAajG,EAAGsF,EAAUlE,EAAOiE,GAGvCA,GAGCrF,EAAE8E,cACNS,EAAWnE,EAAM8C,kBAAmB,oBAEnCzhC,GAAOq/B,aAAcsB,GAAamC,IAEnCA,EAAWnE,EAAM8C,kBAAmB,WAEnCzhC,GAAOs/B,KAAMqB,GAAamC,IAKZ,MAAXhB,GAA6B,SAAXvE,EAAE7+B,KACxBsjC,EAAa,YAGS,MAAXF,EACXE,EAAa,eAIbA,EAAaa,EAAShpB,MACtB2oB,EAAUK,EAASxkB,KAEnBukB,IADAt/B,EAAQu/B,EAASv/B,UAMlBA,EAAQ0+B,GACHF,GAAWE,IACfA,EAAa,QACRF,EAAS,IACbA,EAAS,KAMZnD,EAAMmD,OAASA,EACfnD,EAAMqD,YAAeU,GAAoBV,GAAe,GAGnDY,EACJ7oB,EAASoB,YAAa+lB,EAAiB,CAAEsB,EAASR,EAAYrD,IAE9D5kB,EAASuB,WAAY4lB,EAAiB,CAAEvC,EAAOqD,EAAY1+B,IAI5Dq7B,EAAM0C,WAAYA,GAClBA,OAAar+B,EAERg+B,GACJG,EAAmBnc,QAAS4d,EAAY,cAAgB,YACvD,CAAEjE,EAAOpB,EAAGqF,EAAYJ,EAAUl/B,IAIpC89B,EAAiB3nB,SAAUynB,EAAiB,CAAEvC,EAAOqD,IAEhDhB,IACJG,EAAmBnc,QAAS,eAAgB,CAAE2Z,EAAOpB,MAG3Cv9B,GAAOo/B,QAChBp/B,GAAOskB,MAAMU,QAAS,cAKzB,OAAO2Z,GAGR8E,QAAS,SAAUlE,EAAKlhB,EAAM9c,GAC7B,OAAOvB,GAAOe,IAAKw+B,EAAKlhB,EAAM9c,EAAU,SAGzCmiC,UAAW,SAAUnE,EAAKh+B,GACzB,OAAOvB,GAAOe,IAAKw+B,OAAKv8B,EAAWzB,EAAU,aAI/CvB,GAAOsB,KAAM,CAAE,MAAO,QAAU,SAAU6D,EAAIiT,GAC7CpY,GAAQoY,GAAW,SAAUmnB,EAAKlhB,EAAM9c,EAAU7C,GAUjD,OAPKN,EAAYigB,KAChB3f,EAAOA,GAAQ6C,EACfA,EAAW8c,EACXA,OAAOrb,GAIDhD,GAAOygC,KAAMzgC,GAAOsC,OAAQ,CAClCi9B,IAAKA,EACL7gC,KAAM0Z,EACNomB,SAAU9/B,EACV2f,KAAMA,EACNmkB,QAASjhC,GACPvB,GAAO6C,cAAe08B,IAASA,OAIpCv/B,GAAOugC,cAAe,SAAUhD,GAC/B,IAAIr+B,EACJ,IAAMA,KAAKq+B,EAAE+E,QACa,iBAApBpjC,EAAEwB,gBACN68B,EAAEqC,YAAcrC,EAAE+E,QAASpjC,IAAO,MAMrCc,GAAO4rB,SAAW,SAAU2T,EAAKh9B,EAAStD,GACzC,OAAOe,GAAOygC,KAAM,CACnBlB,IAAKA,EAGL7gC,KAAM,MACN8/B,SAAU,SACVrzB,OAAO,EACPw0B,OAAO,EACPpjC,QAAQ,EAKRyjC,WAAY,CACX2D,cAAe,cAEhBL,WAAY,SAAUT,GACrB7iC,GAAO4D,WAAYi/B,EAAUtgC,EAAStD,OAMzCe,GAAOG,GAAGmC,OAAQ,CACjBshC,QAAS,SAAUjY,GAClB,IAAIlI,EAyBJ,OAvBK1mB,KAAM,KACLqB,EAAYutB,KAChBA,EAAOA,EAAKnuB,KAAMT,KAAM,KAIzB0mB,EAAOzjB,GAAQ2rB,EAAM5uB,KAAM,GAAIwH,eAAgB5C,GAAI,GAAIe,OAAO,GAEzD3F,KAAM,GAAI4C,YACd8jB,EAAK8I,aAAcxvB,KAAM,IAG1B0mB,EAAKjiB,IAAK,WACT,IAAIhB,EAAOzD,KAEX,MAAQyD,EAAKqjC,kBACZrjC,EAAOA,EAAKqjC,kBAGb,OAAOrjC,IACJ6rB,OAAQtvB,OAGNA,MAGR+mC,UAAW,SAAUnY,GACpB,OAAKvtB,EAAYutB,GACT5uB,KAAKuE,KAAM,SAAUpC,GAC3Bc,GAAQjD,MAAO+mC,UAAWnY,EAAKnuB,KAAMT,KAAMmC,MAItCnC,KAAKuE,KAAM,WACjB,IAAI2U,EAAOjW,GAAQjD,MAClByZ,EAAWP,EAAKO,WAEZA,EAASlW,OACbkW,EAASotB,QAASjY,GAGlB1V,EAAKoW,OAAQV,MAKhBlI,KAAM,SAAUkI,GACf,IAAIoY,EAAiB3lC,EAAYutB,GAEjC,OAAO5uB,KAAKuE,KAAM,SAAUpC,GAC3Bc,GAAQjD,MAAO6mC,QAASG,EAAiBpY,EAAKnuB,KAAMT,KAAMmC,GAAMysB,MAIlEqY,OAAQ,SAAU/jC,GAIjB,OAHAlD,KAAKuS,OAAQrP,GAAW+P,IAAK,QAAS1O,KAAM,WAC3CtB,GAAQjD,MAAO2vB,YAAa3vB,KAAK0M,cAE3B1M,QAKTiD,GAAOqN,KAAK9F,QAAQ6uB,OAAS,SAAU51B,GACtC,OAAQR,GAAOqN,KAAK9F,QAAQ08B,QAASzjC,IAEtCR,GAAOqN,KAAK9F,QAAQ08B,QAAU,SAAUzjC,GACvC,SAAWA,EAAK0uB,aAAe1uB,EAAK6vB,cAAgB7vB,EAAK4xB,iBAAiB9xB,SAM3EN,GAAOm/B,aAAa+E,IAAM,WACzB,IACC,OAAO,IAAIpnC,GAAOqnC,eACjB,MAAQz6B,MAGX,IAAI06B,GAAmB,CAGrBC,EAAG,IAIHC,KAAM,KAEPC,GAAevkC,GAAOm/B,aAAa+E,MAEpC/lC,GAAQqmC,OAASD,IAAkB,oBAAqBA,GACxDpmC,GAAQsiC,KAAO8D,KAAiBA,GAEhCvkC,GAAOwgC,cAAe,SAAUj+B,GAC/B,IAAIhB,EAAUkjC,EAGd,GAAKtmC,GAAQqmC,MAAQD,KAAiBhiC,EAAQ2/B,YAC7C,MAAO,CACNO,KAAM,SAAUH,EAAS1K,GACxB,IAAI14B,EACHglC,EAAM3hC,EAAQ2hC,MAWf,GATAA,EAAIQ,KACHniC,EAAQ7D,KACR6D,EAAQg9B,IACRh9B,EAAQo9B,MACRp9B,EAAQoiC,SACRpiC,EAAQyP,UAIJzP,EAAQqiC,UACZ,IAAM1lC,KAAKqD,EAAQqiC,UAClBV,EAAKhlC,GAAMqD,EAAQqiC,UAAW1lC,GAmBhC,IAAMA,KAdDqD,EAAQs/B,UAAYqC,EAAItC,kBAC5BsC,EAAItC,iBAAkBr/B,EAAQs/B,UAQzBt/B,EAAQ2/B,aAAgBI,EAAS,sBACtCA,EAAS,oBAAuB,kBAItBA,EACV4B,EAAIvC,iBAAkBziC,EAAGojC,EAASpjC,IAInCqC,EAAW,SAAU7C,GACpB,OAAO,WACD6C,IACJA,EAAWkjC,EAAgBP,EAAIW,OAC9BX,EAAIY,QAAUZ,EAAIa,QAAUb,EAAIc,UAC/Bd,EAAIe,mBAAqB,KAEb,UAATvmC,EACJwlC,EAAInC,QACgB,UAATrjC,EAKgB,iBAAfwlC,EAAIpC,OACflK,EAAU,EAAG,SAEbA,EAGCsM,EAAIpC,OACJoC,EAAIlC,YAINpK,EACCwM,GAAkBF,EAAIpC,SAAYoC,EAAIpC,OACtCoC,EAAIlC,WAK+B,UAAjCkC,EAAIgB,cAAgB,SACM,iBAArBhB,EAAIiB,aACV,CAAEC,OAAQlB,EAAIrB,UACd,CAAEvjC,KAAM4kC,EAAIiB,cACbjB,EAAIxC,4BAQTwC,EAAIW,OAAStjC,IACbkjC,EAAgBP,EAAIY,QAAUZ,EAAIc,UAAYzjC,EAAU,cAKnCyB,IAAhBkhC,EAAIa,QACRb,EAAIa,QAAUN,EAEdP,EAAIe,mBAAqB,WAGA,IAAnBf,EAAIpnB,YAMRhgB,GAAO2e,WAAY,WACbla,GACJkjC,OAQLljC,EAAWA,EAAU,SAErB,IAGC2iC,EAAIzB,KAAMlgC,EAAQ6/B,YAAc7/B,EAAQ8b,MAAQ,MAC/C,MAAQ3U,GAGT,GAAKnI,EACJ,MAAMmI,IAKTq4B,MAAO,WACDxgC,GACJA,QAWLvB,GAAOugC,cAAe,SAAUhD,GAC1BA,EAAE2E,cACN3E,EAAE/mB,SAASpX,QAAS,KAKtBY,GAAOqgC,UAAW,CACjBR,QAAS,CACRzgC,OAAQ,6FAGToX,SAAU,CACTpX,OAAQ,2BAET4gC,WAAY,CACX2D,cAAe,SAAUrkC,GAExB,OADAU,GAAO4D,WAAYtE,GACZA,MAMVU,GAAOugC,cAAe,SAAU,SAAUhD,QACxBv6B,IAAZu6B,EAAEpyB,QACNoyB,EAAEpyB,OAAQ,GAENoyB,EAAE2E,cACN3E,EAAE7+B,KAAO,SAKXsB,GAAOwgC,cAAe,SAAU,SAAUjD,GAIxC,IAAIn+B,EAAQmC,EADb,GAAKg8B,EAAE2E,aAAe3E,EAAE8H,YAEvB,MAAO,CACN5C,KAAM,SAAU/pB,EAAGkf,GAClBx4B,EAASY,GAAQ,YACfwN,KAAM+vB,EAAE8H,aAAe,IACvB/mB,KAAM,CAAEgnB,QAAS/H,EAAEgI,cAAe5mC,IAAK4+B,EAAEgC,MACzCrb,GAAI,aAAc3iB,EAAW,SAAUikC,GACvCpmC,EAAOka,SACP/X,EAAW,KACNikC,GACJ5N,EAAuB,UAAb4N,EAAI9mC,KAAmB,IAAM,IAAK8mC,EAAI9mC,QAKnD/B,EAAS8C,KAAKC,YAAaN,EAAQ,KAEpC2iC,MAAO,WACDxgC,GACJA,QAUL,IAqGKigB,GArGDikB,GAAe,GAClBC,GAAS,oBAGV1lC,GAAOqgC,UAAW,CACjBsF,MAAO,WACPC,cAAe,WACd,IAAIrkC,EAAWkkC,GAAargC,OAAWpF,GAAOiD,QAAU,IAAQrE,GAAMmG,OAEtE,OADAhI,KAAMwE,IAAa,EACZA,KAKTvB,GAAOugC,cAAe,aAAc,SAAUhD,EAAGsI,EAAkBlH,GAElE,IAAImH,EAAcC,EAAaC,EAC9BC,GAAuB,IAAZ1I,EAAEoI,QAAqBD,GAAOlhC,KAAM+4B,EAAEgC,KAChD,MACkB,iBAAXhC,EAAElf,MAE6C,KADnDkf,EAAEqC,aAAe,IACjBhiC,QAAS,sCACX8nC,GAAOlhC,KAAM+4B,EAAElf,OAAU,QAI5B,GAAK4nB,GAAiC,UAArB1I,EAAEkB,UAAW,GA8D7B,OA3DAqH,EAAevI,EAAEqI,cAAgBxnC,EAAYm/B,EAAEqI,eAC9CrI,EAAEqI,gBACFrI,EAAEqI,cAGEK,EACJ1I,EAAG0I,GAAa1I,EAAG0I,GAAW7iC,QAASsiC,GAAQ,KAAOI,IAC/B,IAAZvI,EAAEoI,QACbpI,EAAEgC,MAASrD,GAAO13B,KAAM+4B,EAAEgC,KAAQ,IAAM,KAAQhC,EAAEoI,MAAQ,IAAMG,GAIjEvI,EAAEyC,WAAY,eAAkB,WAI/B,OAHMgG,GACLhmC,GAAOsD,MAAOwiC,EAAe,mBAEvBE,EAAmB,IAI3BzI,EAAEkB,UAAW,GAAM,OAGnBsH,EAAcjpC,GAAQgpC,GACtBhpC,GAAQgpC,GAAiB,WACxBE,EAAoBvkC,WAIrBk9B,EAAM7kB,OAAQ,gBAGQ9W,IAAhB+iC,EACJ/lC,GAAQlD,IAASm+B,WAAY6K,GAI7BhpC,GAAQgpC,GAAiBC,EAIrBxI,EAAGuI,KAGPvI,EAAEqI,cAAgBC,EAAiBD,cAGnCH,GAAa9nC,KAAMmoC,IAIfE,GAAqB5nC,EAAY2nC,IACrCA,EAAaC,EAAmB,IAGjCA,EAAoBD,OAAc/iC,IAI5B,WAYT7E,GAAQ+nC,qBACH1kB,GAAO7kB,EAASwpC,eAAeD,mBAAoB,IAAK1kB,MACvDtU,UAAY,6BACiB,IAA3BsU,GAAK/X,WAAWnJ,QAQxBN,GAAOmW,UAAY,SAAUkI,EAAMne,EAASkmC,GAC3C,MAAqB,iBAAT/nB,EACJ,IAEgB,kBAAZne,IACXkmC,EAAclmC,EACdA,GAAU,GAKLA,IAIA/B,GAAQ+nC,qBAMZxzB,GALAxS,EAAUvD,EAASwpC,eAAeD,mBAAoB,KAKvC7mC,cAAe,SACzB0R,KAAOpU,EAAS6T,SAASO,KAC9B7Q,EAAQT,KAAKC,YAAagT,IAE1BxS,EAAUvD,GAKZ2mB,GAAW8iB,GAAe,IAD1BC,EAASvwB,EAAW1L,KAAMiU,IAKlB,CAAEne,EAAQb,cAAegnC,EAAQ,MAGzCA,EAAShjB,GAAe,CAAEhF,GAAQne,EAASojB,GAEtCA,GAAWA,EAAQhjB,QACvBN,GAAQsjB,GAAUhK,SAGZtZ,GAAOoB,MAAO,GAAIilC,EAAO58B,cAlChC,IAAIiJ,EAAM2zB,EAAQ/iB,GAyCnBtjB,GAAOG,GAAGonB,KAAO,SAAUgY,EAAK+G,EAAQ/kC,GACvC,IAAItB,EAAUvB,EAAMmkC,EACnB5sB,EAAOlZ,KACPwnB,EAAMgb,EAAI3hC,QAAS,KAsDpB,OApDY,EAAP2mB,IACJtkB,EAAW66B,GAAkByE,EAAIliC,MAAOknB,IACxCgb,EAAMA,EAAIliC,MAAO,EAAGknB,IAIhBnmB,EAAYkoC,IAGhB/kC,EAAW+kC,EACXA,OAAStjC,GAGEsjC,GAA4B,iBAAXA,IAC5B5nC,EAAO,QAIW,EAAduX,EAAK3V,QACTN,GAAOygC,KAAM,CACZlB,IAAKA,EAKL7gC,KAAMA,GAAQ,MACd8/B,SAAU,OACVngB,KAAMioB,IACHz/B,KAAM,SAAUs+B,GAGnBtC,EAAWphC,UAEXwU,EAAK0V,KAAM1rB,EAIVD,GAAQ,SAAUqsB,OAAQrsB,GAAOmW,UAAWgvB,IAAiBv7B,KAAM3J,GAGnEklC,KAKErrB,OAAQvY,GAAY,SAAUo9B,EAAOmD,GACxC7rB,EAAK3U,KAAM,WACVC,EAAS7D,MAAOX,KAAM8lC,GAAY,CAAElE,EAAMwG,aAAcrD,EAAQnD,QAK5D5hC,MAMRiD,GAAOqN,KAAK9F,QAAQg/B,SAAW,SAAU/lC,GACxC,OAAOR,GAAO8B,KAAM9B,GAAOo5B,OAAQ,SAAUj5B,GAC5C,OAAOK,IAASL,EAAGK,OAChBF,QAMLN,GAAOwmC,OAAS,CACfC,UAAW,SAAUjmC,EAAM+B,EAASrD,GACnC,IAAIwnC,EAAaC,EAASC,EAAWC,EAAQC,EAAWC,EACvD/X,EAAWhvB,GAAOwgB,IAAKhgB,EAAM,YAC7BwmC,EAAUhnC,GAAQQ,GAClBonB,EAAQ,GAGS,WAAboH,IACJxuB,EAAK8f,MAAM0O,SAAW,YAGvB8X,EAAYE,EAAQR,SACpBI,EAAY5mC,GAAOwgB,IAAKhgB,EAAM,OAC9BumC,EAAa/mC,GAAOwgB,IAAKhgB,EAAM,SACI,aAAbwuB,GAAwC,UAAbA,KACA,GAA9C4X,EAAYG,GAAanpC,QAAS,SAMpCipC,GADAH,EAAcM,EAAQhY,YACD3iB,IACrBs6B,EAAUD,EAAYpS,OAGtBuS,EAASxX,WAAYuX,IAAe,EACpCD,EAAUtX,WAAY0X,IAAgB,GAGlC3oC,EAAYmE,KAGhBA,EAAUA,EAAQ/E,KAAMgD,EAAMtB,EAAGc,GAAOsC,OAAQ,GAAIwkC,KAGjC,MAAfvkC,EAAQ8J,MACZub,EAAMvb,IAAQ9J,EAAQ8J,IAAMy6B,EAAUz6B,IAAQw6B,GAE1B,MAAhBtkC,EAAQ+xB,OACZ1M,EAAM0M,KAAS/xB,EAAQ+xB,KAAOwS,EAAUxS,KAASqS,GAG7C,UAAWpkC,EACfA,EAAQ0kC,MAAMzpC,KAAMgD,EAAMonB,GAG1Bof,EAAQxmB,IAAKoH,KAKhB5nB,GAAOG,GAAGmC,OAAQ,CAGjBkkC,OAAQ,SAAUjkC,GAGjB,GAAKd,UAAUnB,OACd,YAAmB0C,IAAZT,EACNxF,KACAA,KAAKuE,KAAM,SAAUpC,GACpBc,GAAOwmC,OAAOC,UAAW1pC,KAAMwF,EAASrD,KAI3C,IAAIgoC,EAAMC,EACT3mC,EAAOzD,KAAM,GAEd,OAAMyD,EAQAA,EAAK4xB,iBAAiB9xB,QAK5B4mC,EAAO1mC,EAAK4zB,wBACZ+S,EAAM3mC,EAAK+D,cAAc6H,YAClB,CACNC,IAAK66B,EAAK76B,IAAM86B,EAAIC,YACpB9S,KAAM4S,EAAK5S,KAAO6S,EAAIE,cARf,CAAEh7B,IAAK,EAAGioB,KAAM,QATxB,GAuBDtF,SAAU,WACT,GAAMjyB,KAAM,GAAZ,CAIA,IAAIuqC,EAAcd,EAAQvnC,EACzBuB,EAAOzD,KAAM,GACbwqC,EAAe,CAAEl7B,IAAK,EAAGioB,KAAM,GAGhC,GAAwC,UAAnCt0B,GAAOwgB,IAAKhgB,EAAM,YAGtBgmC,EAAShmC,EAAK4zB,4BAER,CACNoS,EAASzpC,KAAKypC,SAIdvnC,EAAMuB,EAAK+D,cACX+iC,EAAe9mC,EAAK8mC,cAAgBroC,EAAI6E,gBACxC,MAAQwjC,IACLA,IAAiBroC,EAAIuiB,MAAQ8lB,IAAiBroC,EAAI6E,kBACT,WAA3C9D,GAAOwgB,IAAK8mB,EAAc,YAE1BA,EAAeA,EAAa3nC,WAExB2nC,GAAgBA,IAAiB9mC,GAAkC,IAA1B8mC,EAAahpC,YAG1DipC,EAAevnC,GAAQsnC,GAAed,UACzBn6B,KAAOrM,GAAOwgB,IAAK8mB,EAAc,kBAAkB,GAChEC,EAAajT,MAAQt0B,GAAOwgB,IAAK8mB,EAAc,mBAAmB,IAKpE,MAAO,CACNj7B,IAAKm6B,EAAOn6B,IAAMk7B,EAAal7B,IAAMrM,GAAOwgB,IAAKhgB,EAAM,aAAa,GACpE8zB,KAAMkS,EAAOlS,KAAOiT,EAAajT,KAAOt0B,GAAOwgB,IAAKhgB,EAAM,cAAc,MAc1E8mC,aAAc,WACb,OAAOvqC,KAAKyE,IAAK,WAChB,IAAI8lC,EAAevqC,KAAKuqC,aAExB,MAAQA,GAA2D,WAA3CtnC,GAAOwgB,IAAK8mB,EAAc,YACjDA,EAAeA,EAAaA,aAG7B,OAAOA,GAAgBxjC,OAM1B9D,GAAOsB,KAAM,CAAEk0B,WAAY,cAAeD,UAAW,eAAiB,SAAUnd,EAAQkG,GACvF,IAAIjS,EAAM,gBAAkBiS,EAE5Bte,GAAOG,GAAIiY,GAAW,SAAUjZ,GAC/B,OAAO6d,EAAQjgB,KAAM,SAAUyD,EAAM4X,EAAQjZ,GAG5C,IAAIgoC,EAOJ,GANK3oC,EAAUgC,GACd2mC,EAAM3mC,EACuB,IAAlBA,EAAKlC,WAChB6oC,EAAM3mC,EAAK4L,kBAGCpJ,IAAR7D,EACJ,OAAOgoC,EAAMA,EAAK7oB,GAAS9d,EAAM4X,GAG7B+uB,EACJA,EAAIK,SACFn7B,EAAY86B,EAAIE,YAAVloC,EACPkN,EAAMlN,EAAMgoC,EAAIC,aAIjB5mC,EAAM4X,GAAWjZ,GAEhBiZ,EAAQjZ,EAAKsC,UAAUnB,WAU5BN,GAAOsB,KAAM,CAAE,MAAO,QAAU,SAAU6D,EAAImZ,GAC7Cte,GAAOuyB,SAAUjU,GAAS4P,GAAc/vB,GAAQuxB,cAC/C,SAAUlvB,EAAMmtB,GACf,GAAKA,EAIJ,OAHAA,EAAWD,GAAQltB,EAAM8d,GAGlB4O,GAAU1oB,KAAMmpB,GACtB3tB,GAAQQ,GAAOwuB,WAAY1Q,GAAS,KACpCqP,MAQL3tB,GAAOsB,KAAM,CAAEmmC,OAAQ,SAAUC,MAAO,SAAW,SAAUjnC,EAAM/B,GAClEsB,GAAOsB,KAAM,CACZkzB,QAAS,QAAU/zB,EACnBgX,QAAS/Y,EACTipC,GAAI,QAAUlnC,GACZ,SAAUmnC,EAAcC,GAG1B7nC,GAAOG,GAAI0nC,GAAa,SAAUtT,EAAQzvB,GACzC,IAAImY,EAAYxb,UAAUnB,SAAYsnC,GAAkC,kBAAXrT,GAC5D1C,EAAQ+V,KAA6B,IAAXrT,IAA6B,IAAVzvB,EAAiB,SAAW,UAE1E,OAAOkY,EAAQjgB,KAAM,SAAUyD,EAAM9B,EAAMoG,GAC1C,IAAI7F,EAEJ,OAAKT,EAAUgC,GAGyB,IAAhCqnC,EAASjqC,QAAS,SACxB4C,EAAM,QAAUC,GAChBD,EAAK7D,SAASmH,gBAAiB,SAAWrD,GAIrB,IAAlBD,EAAKlC,UACTW,EAAMuB,EAAKsD,gBAIJZ,KAAKouB,IACX9wB,EAAKghB,KAAM,SAAW/gB,GAAQxB,EAAK,SAAWwB,GAC9CD,EAAKghB,KAAM,SAAW/gB,GAAQxB,EAAK,SAAWwB,GAC9CxB,EAAK,SAAWwB,UAIDuC,IAAV8B,EAGN9E,GAAOwgB,IAAKhgB,EAAM9B,EAAMmzB,GAGxB7xB,GAAOsgB,MAAO9f,EAAM9B,EAAMoG,EAAO+sB,IAChCnzB,EAAMue,EAAYsX,OAASvxB,EAAWia,QAM5Cjd,GAAOsB,KAAM,CACZ,YACA,WACA,eACA,YACA,cACA,YACE,SAAU6D,EAAIzG,GAChBsB,GAAOG,GAAIzB,GAAS,SAAUyB,GAC7B,OAAOpD,KAAKmnB,GAAIxlB,EAAMyB,MAOxBH,GAAOG,GAAGmC,OAAQ,CAEjBq1B,KAAM,SAAUxT,EAAO9F,EAAMle,GAC5B,OAAOpD,KAAKmnB,GAAIC,EAAO,KAAM9F,EAAMle,IAEpC2nC,OAAQ,SAAU3jB,EAAOhkB,GACxB,OAAOpD,KAAKwnB,IAAKJ,EAAO,KAAMhkB,IAG/B4nC,SAAU,SAAU9nC,EAAUkkB,EAAO9F,EAAMle,GAC1C,OAAOpD,KAAKmnB,GAAIC,EAAOlkB,EAAUoe,EAAMle,IAExC6nC,WAAY,SAAU/nC,EAAUkkB,EAAOhkB,GAGtC,OAA4B,IAArBsB,UAAUnB,OAChBvD,KAAKwnB,IAAKtkB,EAAU,MACpBlD,KAAKwnB,IAAKJ,EAAOlkB,GAAY,KAAME,IAGrC8nC,MAAO,SAAUC,EAAQC,GACxB,OAAOprC,KACLmnB,GAAI,aAAcgkB,GAClBhkB,GAAI,aAAcikB,GAASD,MAI/BloC,GAAOsB,KACN,wLAE4D4D,MAAO,KACnE,SAAUC,EAAI1E,GAGbT,GAAOG,GAAIM,GAAS,SAAU4d,EAAMle,GACnC,OAA0B,EAAnBsB,UAAUnB,OAChBvD,KAAKmnB,GAAIzjB,EAAM,KAAM4d,EAAMle,GAC3BpD,KAAKioB,QAASvkB,MAYlB,IAAI2nC,GAAQ,sDAMZpoC,GAAOqoC,MAAQ,SAAUloC,EAAID,GAC5B,IAAIyf,EAAK/P,EAAMy4B,EAUf,GARwB,iBAAZnoC,IACXyf,EAAMxf,EAAID,GACVA,EAAUC,EACVA,EAAKwf,GAKAvhB,EAAY+B,GAalB,OARAyP,EAAOvS,GAAMG,KAAMiE,UAAW,IAC9B4mC,EAAQ,WACP,OAAOloC,EAAGzC,MAAOwC,GAAWnD,KAAM6S,EAAKnS,OAAQJ,GAAMG,KAAMiE,eAItDsD,KAAO5E,EAAG4E,KAAO5E,EAAG4E,MAAQ/E,GAAO+E,OAElCsjC,GAGRroC,GAAOsoC,UAAY,SAAUC,GACvBA,EACJvoC,GAAO4c,YAEP5c,GAAOoW,OAAO,IAGhBpW,GAAO+C,QAAUD,MAAMC,QACvB/C,GAAOwoC,UAAY3pB,KAAKC,MACxB9e,GAAOO,SAAWA,GAClBP,GAAO5B,WAAaA,EACpB4B,GAAOxB,SAAWA,EAClBwB,GAAO4d,UAAYA,EACnB5d,GAAOtB,KAAOmB,EAEdG,GAAOkoB,IAAMD,KAAKC,IAElBloB,GAAOyoC,UAAY,SAAUpqC,GAK5B,IAAIK,EAAOsB,GAAOtB,KAAML,GACxB,OAAkB,WAATK,GAA8B,WAATA,KAK5BgqC,MAAOrqC,EAAMgxB,WAAYhxB,KAG5B2B,GAAO2oC,KAAO,SAAUrpC,GACvB,OAAe,MAARA,EACN,IACEA,EAAO,IAAK8D,QAASglC,GAAO,OAkBT,mBAAXQ,QAAyBA,OAAOC,KAC3CD,OAAQ,SAAU,GAAI,WACrB,OAAO5oC,KAOT,IAGC8oC,GAAUhsC,GAAOkD,OAGjB+oC,GAAKjsC,GAAOksC,EAwBb,OAtBAhpC,GAAOipC,WAAa,SAAUrmC,GAS7B,OARK9F,GAAOksC,IAAMhpC,KACjBlD,GAAOksC,EAAID,IAGPnmC,GAAQ9F,GAAOkD,SAAWA,KAC9BlD,GAAOkD,OAAS8oC,IAGV9oC,IAMiB,oBAAbhD,IACXF,GAAOkD,OAASlD,GAAOksC,EAAIhpC,IAMrBA","file":"jquery-3.7.1.min.js"} \ No newline at end of file
diff --git a/lib/toaster/toastergui/static/js/jquery.dataTables-1.13.8.min.js b/lib/toaster/toastergui/static/js/jquery.dataTables-1.13.8.min.js
new file mode 100644
index 000000000..b6d9aa8c7
--- /dev/null
+++ b/lib/toaster/toastergui/static/js/jquery.dataTables-1.13.8.min.js
@@ -0,0 +1,4 @@
+/*! DataTables 1.13.8
+ * ©2008-2023 SpryMedia Ltd - datatables.net/license
+ */
+!function(n){"use strict";var a;"function"==typeof define&&define.amd?define(["jquery"],function(t){return n(t,window,document)}):"object"==typeof exports?(a=require("jquery"),"undefined"==typeof window?module.exports=function(t,e){return t=t||window,e=e||a(t),n(e,t,t.document)}:module.exports=n(a,window,window.document)):window.DataTable=n(jQuery,window,document)}(function(P,j,v,H){"use strict";function d(t){var e=parseInt(t,10);return!isNaN(e)&&isFinite(t)?e:null}function l(t,e,n){var a=typeof t,r="string"==a;return"number"==a||"bigint"==a||!!h(t)||(e&&r&&(t=$(t,e)),n&&r&&(t=t.replace(q,"")),!isNaN(parseFloat(t))&&isFinite(t))}function a(t,e,n){var a;return!!h(t)||(h(a=t)||"string"==typeof a)&&!!l(t.replace(V,"").replace(/<script/i,""),e,n)||null}function m(t,e,n,a){var r=[],o=0,i=e.length;if(a!==H)for(;o<i;o++)t[e[o]][n]&&r.push(t[e[o]][n][a]);else for(;o<i;o++)r.push(t[e[o]][n]);return r}function f(t,e){var n,a=[];e===H?(e=0,n=t):(n=e,e=t);for(var r=e;r<n;r++)a.push(r);return a}function _(t){for(var e=[],n=0,a=t.length;n<a;n++)t[n]&&e.push(t[n]);return e}function s(t,e){return-1!==this.indexOf(t,e=e===H?0:e)}var p,e,t,w=function(t,v){if(w.factory(t,v))return w;if(this instanceof w)return P(t).DataTable(v);v=t,this.$=function(t,e){return this.api(!0).$(t,e)},this._=function(t,e){return this.api(!0).rows(t,e).data()},this.api=function(t){return new B(t?ge(this[p.iApiIndex]):this)},this.fnAddData=function(t,e){var n=this.api(!0),t=(Array.isArray(t)&&(Array.isArray(t[0])||P.isPlainObject(t[0]))?n.rows:n.row).add(t);return e!==H&&!e||n.draw(),t.flatten().toArray()},this.fnAdjustColumnSizing=function(t){var e=this.api(!0).columns.adjust(),n=e.settings()[0],a=n.oScroll;t===H||t?e.draw(!1):""===a.sX&&""===a.sY||Qt(n)},this.fnClearTable=function(t){var e=this.api(!0).clear();t!==H&&!t||e.draw()},this.fnClose=function(t){this.api(!0).row(t).child.hide()},this.fnDeleteRow=function(t,e,n){var a=this.api(!0),t=a.rows(t),r=t.settings()[0],o=r.aoData[t[0][0]];return t.remove(),e&&e.call(this,r,o),n!==H&&!n||a.draw(),o},this.fnDestroy=function(t){this.api(!0).destroy(t)},this.fnDraw=function(t){this.api(!0).draw(t)},this.fnFilter=function(t,e,n,a,r,o){var i=this.api(!0);(null===e||e===H?i:i.column(e)).search(t,n,a,o),i.draw()},this.fnGetData=function(t,e){var n,a=this.api(!0);return t!==H?(n=t.nodeName?t.nodeName.toLowerCase():"",e!==H||"td"==n||"th"==n?a.cell(t,e).data():a.row(t).data()||null):a.data().toArray()},this.fnGetNodes=function(t){var e=this.api(!0);return t!==H?e.row(t).node():e.rows().nodes().flatten().toArray()},this.fnGetPosition=function(t){var e=this.api(!0),n=t.nodeName.toUpperCase();return"TR"==n?e.row(t).index():"TD"==n||"TH"==n?[(n=e.cell(t).index()).row,n.columnVisible,n.column]:null},this.fnIsOpen=function(t){return this.api(!0).row(t).child.isShown()},this.fnOpen=function(t,e,n){return this.api(!0).row(t).child(e,n).show().child()[0]},this.fnPageChange=function(t,e){t=this.api(!0).page(t);e!==H&&!e||t.draw(!1)},this.fnSetColumnVis=function(t,e,n){t=this.api(!0).column(t).visible(e);n!==H&&!n||t.columns.adjust().draw()},this.fnSettings=function(){return ge(this[p.iApiIndex])},this.fnSort=function(t){this.api(!0).order(t).draw()},this.fnSortListener=function(t,e,n){this.api(!0).order.listener(t,e,n)},this.fnUpdate=function(t,e,n,a,r){var o=this.api(!0);return(n===H||null===n?o.row(e):o.cell(e,n)).data(t),r!==H&&!r||o.columns.adjust(),a!==H&&!a||o.draw(),0},this.fnVersionCheck=p.fnVersionCheck;var e,y=this,D=v===H,_=this.length;for(e in D&&(v={}),this.oApi=this.internal=p.internal,w.ext.internal)e&&(this[e]=$e(e));return this.each(function(){var r=1<_?be({},v,!0):v,o=0,t=this.getAttribute("id"),i=!1,e=w.defaults,l=P(this);if("table"!=this.nodeName.toLowerCase())W(null,0,"Non-table node initialisation ("+this.nodeName+")",2);else{K(e),Q(e.column),C(e,e,!0),C(e.column,e.column,!0),C(e,P.extend(r,l.data()),!0);for(var n=w.settings,o=0,s=n.length;o<s;o++){var a=n[o];if(a.nTable==this||a.nTHead&&a.nTHead.parentNode==this||a.nTFoot&&a.nTFoot.parentNode==this){var u=(r.bRetrieve!==H?r:e).bRetrieve,c=(r.bDestroy!==H?r:e).bDestroy;if(D||u)return a.oInstance;if(c){a.oInstance.fnDestroy();break}return void W(a,0,"Cannot reinitialise DataTable",3)}if(a.sTableId==this.id){n.splice(o,1);break}}null!==t&&""!==t||(t="DataTables_Table_"+w.ext._unique++,this.id=t);var f,d,h=P.extend(!0,{},w.models.oSettings,{sDestroyWidth:l[0].style.width,sInstance:t,sTableId:t}),p=(h.nTable=this,h.oApi=y.internal,h.oInit=r,n.push(h),h.oInstance=1===y.length?y:l.dataTable(),K(r),Z(r.oLanguage),r.aLengthMenu&&!r.iDisplayLength&&(r.iDisplayLength=(Array.isArray(r.aLengthMenu[0])?r.aLengthMenu[0]:r.aLengthMenu)[0]),r=be(P.extend(!0,{},e),r),F(h.oFeatures,r,["bPaginate","bLengthChange","bFilter","bSort","bSortMulti","bInfo","bProcessing","bAutoWidth","bSortClasses","bServerSide","bDeferRender"]),F(h,r,["asStripeClasses","ajax","fnServerData","fnFormatNumber","sServerMethod","aaSorting","aaSortingFixed","aLengthMenu","sPaginationType","sAjaxSource","sAjaxDataProp","iStateDuration","sDom","bSortCellsTop","iTabIndex","fnStateLoadCallback","fnStateSaveCallback","renderer","searchDelay","rowId",["iCookieDuration","iStateDuration"],["oSearch","oPreviousSearch"],["aoSearchCols","aoPreSearchCols"],["iDisplayLength","_iDisplayLength"]]),F(h.oScroll,r,[["sScrollX","sX"],["sScrollXInner","sXInner"],["sScrollY","sY"],["bScrollCollapse","bCollapse"]]),F(h.oLanguage,r,"fnInfoCallback"),L(h,"aoDrawCallback",r.fnDrawCallback,"user"),L(h,"aoServerParams",r.fnServerParams,"user"),L(h,"aoStateSaveParams",r.fnStateSaveParams,"user"),L(h,"aoStateLoadParams",r.fnStateLoadParams,"user"),L(h,"aoStateLoaded",r.fnStateLoaded,"user"),L(h,"aoRowCallback",r.fnRowCallback,"user"),L(h,"aoRowCreatedCallback",r.fnCreatedRow,"user"),L(h,"aoHeaderCallback",r.fnHeaderCallback,"user"),L(h,"aoFooterCallback",r.fnFooterCallback,"user"),L(h,"aoInitComplete",r.fnInitComplete,"user"),L(h,"aoPreDrawCallback",r.fnPreDrawCallback,"user"),h.rowIdFn=A(r.rowId),tt(h),h.oClasses),g=(P.extend(p,w.ext.classes,r.oClasses),l.addClass(p.sTable),h.iInitDisplayStart===H&&(h.iInitDisplayStart=r.iDisplayStart,h._iDisplayStart=r.iDisplayStart),null!==r.iDeferLoading&&(h.bDeferLoading=!0,t=Array.isArray(r.iDeferLoading),h._iRecordsDisplay=t?r.iDeferLoading[0]:r.iDeferLoading,h._iRecordsTotal=t?r.iDeferLoading[1]:r.iDeferLoading),h.oLanguage),t=(P.extend(!0,g,r.oLanguage),g.sUrl?(P.ajax({dataType:"json",url:g.sUrl,success:function(t){C(e.oLanguage,t),Z(t),P.extend(!0,g,t,h.oInit.oLanguage),R(h,null,"i18n",[h]),Jt(h)},error:function(){Jt(h)}}),i=!0):R(h,null,"i18n",[h]),null===r.asStripeClasses&&(h.asStripeClasses=[p.sStripeOdd,p.sStripeEven]),h.asStripeClasses),b=l.children("tbody").find("tr").eq(0),m=(-1!==P.inArray(!0,P.map(t,function(t,e){return b.hasClass(t)}))&&(P("tbody tr",this).removeClass(t.join(" ")),h.asDestroyStripes=t.slice()),[]),t=this.getElementsByTagName("thead");if(0!==t.length&&(wt(h.aoHeader,t[0]),m=Ct(h)),null===r.aoColumns)for(f=[],o=0,s=m.length;o<s;o++)f.push(null);else f=r.aoColumns;for(o=0,s=f.length;o<s;o++)nt(h,m?m[o]:null);st(h,r.aoColumnDefs,f,function(t,e){at(h,t,e)}),b.length&&(d=function(t,e){return null!==t.getAttribute("data-"+e)?e:null},P(b[0]).children("th, td").each(function(t,e){var n,a=h.aoColumns[t];a||W(h,0,"Incorrect column count",18),a.mData===t&&(n=d(e,"sort")||d(e,"order"),e=d(e,"filter")||d(e,"search"),null===n&&null===e||(a.mData={_:t+".display",sort:null!==n?t+".@data-"+n:H,type:null!==n?t+".@data-"+n:H,filter:null!==e?t+".@data-"+e:H},a._isArrayHost=!0,at(h,t)))}));var S=h.oFeatures,t=function(){if(r.aaSorting===H){var t=h.aaSorting;for(o=0,s=t.length;o<s;o++)t[o][1]=h.aoColumns[o].asSorting[0]}ce(h),S.bSort&&L(h,"aoDrawCallback",function(){var t,n;h.bSorted&&(t=I(h),n={},P.each(t,function(t,e){n[e.src]=e.dir}),R(h,null,"order",[h,t,n]),le(h))}),L(h,"aoDrawCallback",function(){(h.bSorted||"ssp"===E(h)||S.bDeferRender)&&ce(h)},"sc");var e=l.children("caption").each(function(){this._captionSide=P(this).css("caption-side")}),n=l.children("thead"),a=(0===n.length&&(n=P("<thead/>").appendTo(l)),h.nTHead=n[0],l.children("tbody")),n=(0===a.length&&(a=P("<tbody/>").insertAfter(n)),h.nTBody=a[0],l.children("tfoot"));if(0===(n=0===n.length&&0<e.length&&(""!==h.oScroll.sX||""!==h.oScroll.sY)?P("<tfoot/>").appendTo(l):n).length||0===n.children().length?l.addClass(p.sNoFooter):0<n.length&&(h.nTFoot=n[0],wt(h.aoFooter,h.nTFoot)),r.aaData)for(o=0;o<r.aaData.length;o++)x(h,r.aaData[o]);else!h.bDeferLoading&&"dom"!=E(h)||ut(h,P(h.nTBody).children("tr"));h.aiDisplay=h.aiDisplayMaster.slice(),!(h.bInitialised=!0)===i&&Jt(h)};L(h,"aoDrawCallback",de,"state_save"),r.bStateSave?(S.bStateSave=!0,he(h,0,t)):t()}}),y=null,this},c={},U=/[\r\n\u2028]/g,V=/<.*?>/g,X=/^\d{2,4}[\.\/\-]\d{1,2}[\.\/\-]\d{1,2}([T ]{1}\d{1,2}[:\.]\d{2}([\.:]\d{2})?)?$/,J=new RegExp("(\\"+["/",".","*","+","?","|","(",")","[","]","{","}","\\","$","^","-"].join("|\\")+")","g"),q=/['\u00A0,$£€¥%\u2009\u202F\u20BD\u20a9\u20BArfkɃΞ]/gi,h=function(t){return!t||!0===t||"-"===t},$=function(t,e){return c[e]||(c[e]=new RegExp(Ot(e),"g")),"string"==typeof t&&"."!==e?t.replace(/\./g,"").replace(c[e],"."):t},N=function(t,e,n){var a=[],r=0,o=t.length;if(n!==H)for(;r<o;r++)t[r]&&t[r][e]&&a.push(t[r][e][n]);else for(;r<o;r++)t[r]&&a.push(t[r][e]);return a},G=function(t){if(!(t.length<2))for(var e=t.slice().sort(),n=e[0],a=1,r=e.length;a<r;a++){if(e[a]===n)return!1;n=e[a]}return!0},z=function(t){if(G(t))return t.slice();var e,n,a,r=[],o=t.length,i=0;t:for(n=0;n<o;n++){for(e=t[n],a=0;a<i;a++)if(r[a]===e)continue t;r.push(e),i++}return r},Y=function(t,e){if(Array.isArray(e))for(var n=0;n<e.length;n++)Y(t,e[n]);else t.push(e);return t};function i(n){var a,r,o={};P.each(n,function(t,e){(a=t.match(/^([^A-Z]+?)([A-Z])/))&&-1!=="a aa ai ao as b fn i m o s ".indexOf(a[1]+" ")&&(r=t.replace(a[0],a[2].toLowerCase()),o[r]=t,"o"===a[1])&&i(n[t])}),n._hungarianMap=o}function C(n,a,r){var o;n._hungarianMap||i(n),P.each(a,function(t,e){(o=n._hungarianMap[t])===H||!r&&a[o]!==H||("o"===o.charAt(0)?(a[o]||(a[o]={}),P.extend(!0,a[o],a[t]),C(n[o],a[o],r)):a[o]=a[t])})}function Z(t){var e,n=w.defaults.oLanguage,a=n.sDecimal;a&&Me(a),t&&(e=t.sZeroRecords,!t.sEmptyTable&&e&&"No data available in table"===n.sEmptyTable&&F(t,t,"sZeroRecords","sEmptyTable"),!t.sLoadingRecords&&e&&"Loading..."===n.sLoadingRecords&&F(t,t,"sZeroRecords","sLoadingRecords"),t.sInfoThousands&&(t.sThousands=t.sInfoThousands),e=t.sDecimal)&&a!==e&&Me(e)}Array.isArray||(Array.isArray=function(t){return"[object Array]"===Object.prototype.toString.call(t)}),Array.prototype.includes||(Array.prototype.includes=s),String.prototype.trim||(String.prototype.trim=function(){return this.replace(/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,"")}),String.prototype.includes||(String.prototype.includes=s),w.util={throttle:function(a,t){var r,o,i=t!==H?t:200;return function(){var t=this,e=+new Date,n=arguments;r&&e<r+i?(clearTimeout(o),o=setTimeout(function(){r=H,a.apply(t,n)},i)):(r=e,a.apply(t,n))}},escapeRegex:function(t){return t.replace(J,"\\$1")},set:function(a){var d;return P.isPlainObject(a)?w.util.set(a._):null===a?function(){}:"function"==typeof a?function(t,e,n){a(t,"set",e,n)}:"string"!=typeof a||-1===a.indexOf(".")&&-1===a.indexOf("[")&&-1===a.indexOf("(")?function(t,e){t[a]=e}:(d=function(t,e,n){for(var a,r,o,i,l=dt(n),n=l[l.length-1],s=0,u=l.length-1;s<u;s++){if("__proto__"===l[s]||"constructor"===l[s])throw new Error("Cannot set prototype values");if(a=l[s].match(ft),r=l[s].match(g),a){if(l[s]=l[s].replace(ft,""),t[l[s]]=[],(a=l.slice()).splice(0,s+1),i=a.join("."),Array.isArray(e))for(var c=0,f=e.length;c<f;c++)d(o={},e[c],i),t[l[s]].push(o);else t[l[s]]=e;return}r&&(l[s]=l[s].replace(g,""),t=t[l[s]](e)),null!==t[l[s]]&&t[l[s]]!==H||(t[l[s]]={}),t=t[l[s]]}n.match(g)?t[n.replace(g,"")](e):t[n.replace(ft,"")]=e},function(t,e){return d(t,e,a)})},get:function(r){var o,d;return P.isPlainObject(r)?(o={},P.each(r,function(t,e){e&&(o[t]=w.util.get(e))}),function(t,e,n,a){var r=o[e]||o._;return r!==H?r(t,e,n,a):t}):null===r?function(t){return t}:"function"==typeof r?function(t,e,n,a){return r(t,e,n,a)}:"string"!=typeof r||-1===r.indexOf(".")&&-1===r.indexOf("[")&&-1===r.indexOf("(")?function(t,e){return t[r]}:(d=function(t,e,n){var a,r,o;if(""!==n)for(var i=dt(n),l=0,s=i.length;l<s;l++){if(f=i[l].match(ft),a=i[l].match(g),f){if(i[l]=i[l].replace(ft,""),""!==i[l]&&(t=t[i[l]]),r=[],i.splice(0,l+1),o=i.join("."),Array.isArray(t))for(var u=0,c=t.length;u<c;u++)r.push(d(t[u],e,o));var f=f[0].substring(1,f[0].length-1);t=""===f?r:r.join(f);break}if(a)i[l]=i[l].replace(g,""),t=t[i[l]]();else{if(null===t||null===t[i[l]])return null;if(t===H||t[i[l]]===H)return H;t=t[i[l]]}}return t},function(t,e){return d(t,e,r)})}};var r=function(t,e,n){t[e]!==H&&(t[n]=t[e])};function K(t){r(t,"ordering","bSort"),r(t,"orderMulti","bSortMulti"),r(t,"orderClasses","bSortClasses"),r(t,"orderCellsTop","bSortCellsTop"),r(t,"order","aaSorting"),r(t,"orderFixed","aaSortingFixed"),r(t,"paging","bPaginate"),r(t,"pagingType","sPaginationType"),r(t,"pageLength","iDisplayLength"),r(t,"searching","bFilter"),"boolean"==typeof t.sScrollX&&(t.sScrollX=t.sScrollX?"100%":""),"boolean"==typeof t.scrollX&&(t.scrollX=t.scrollX?"100%":"");var e=t.aoSearchCols;if(e)for(var n=0,a=e.length;n<a;n++)e[n]&&C(w.models.oSearch,e[n])}function Q(t){r(t,"orderable","bSortable"),r(t,"orderData","aDataSort"),r(t,"orderSequence","asSorting"),r(t,"orderDataType","sortDataType");var e=t.aDataSort;"number"!=typeof e||Array.isArray(e)||(t.aDataSort=[e])}function tt(t){var e,n,a,r;w.__browser||(w.__browser=e={},r=(a=(n=P("<div/>").css({position:"fixed",top:0,left:-1*P(j).scrollLeft(),height:1,width:1,overflow:"hidden"}).append(P("<div/>").css({position:"absolute",top:1,left:1,width:100,overflow:"scroll"}).append(P("<div/>").css({width:"100%",height:10}))).appendTo("body")).children()).children(),e.barWidth=a[0].offsetWidth-a[0].clientWidth,e.bScrollOversize=100===r[0].offsetWidth&&100!==a[0].clientWidth,e.bScrollbarLeft=1!==Math.round(r.offset().left),e.bBounding=!!n[0].getBoundingClientRect().width,n.remove()),P.extend(t.oBrowser,w.__browser),t.oScroll.iBarWidth=w.__browser.barWidth}function et(t,e,n,a,r,o){var i,l=a,s=!1;for(n!==H&&(i=n,s=!0);l!==r;)t.hasOwnProperty(l)&&(i=s?e(i,t[l],l,t):t[l],s=!0,l+=o);return i}function nt(t,e){var n=w.defaults.column,a=t.aoColumns.length,n=P.extend({},w.models.oColumn,n,{nTh:e||v.createElement("th"),sTitle:n.sTitle||(e?e.innerHTML:""),aDataSort:n.aDataSort||[a],mData:n.mData||a,idx:a}),n=(t.aoColumns.push(n),t.aoPreSearchCols);n[a]=P.extend({},w.models.oSearch,n[a]),at(t,a,P(e).data())}function at(t,e,n){function a(t){return"string"==typeof t&&-1!==t.indexOf("@")}var e=t.aoColumns[e],r=t.oClasses,o=P(e.nTh),i=(!e.sWidthOrig&&(e.sWidthOrig=o.attr("width")||null,u=(o.attr("style")||"").match(/width:\s*(\d+[pxem%]+)/))&&(e.sWidthOrig=u[1]),n!==H&&null!==n&&(Q(n),C(w.defaults.column,n,!0),n.mDataProp===H||n.mData||(n.mData=n.mDataProp),n.sType&&(e._sManualType=n.sType),n.className&&!n.sClass&&(n.sClass=n.className),n.sClass&&o.addClass(n.sClass),u=e.sClass,P.extend(e,n),F(e,n,"sWidth","sWidthOrig"),u!==e.sClass&&(e.sClass=u+" "+e.sClass),n.iDataSort!==H&&(e.aDataSort=[n.iDataSort]),F(e,n,"aDataSort"),e.ariaTitle||(e.ariaTitle=o.attr("aria-label"))),e.mData),l=A(i),s=e.mRender?A(e.mRender):null,u=(e._bAttrSrc=P.isPlainObject(i)&&(a(i.sort)||a(i.type)||a(i.filter)),e._setter=null,e.fnGetData=function(t,e,n){var a=l(t,e,H,n);return s&&e?s(a,e,t,n):a},e.fnSetData=function(t,e,n){return b(i)(t,e,n)},"number"==typeof i||e._isArrayHost||(t._rowReadObject=!0),t.oFeatures.bSort||(e.bSortable=!1,o.addClass(r.sSortableNone)),-1!==P.inArray("asc",e.asSorting)),n=-1!==P.inArray("desc",e.asSorting);e.bSortable&&(u||n)?u&&!n?(e.sSortingClass=r.sSortableAsc,e.sSortingClassJUI=r.sSortJUIAscAllowed):!u&&n?(e.sSortingClass=r.sSortableDesc,e.sSortingClassJUI=r.sSortJUIDescAllowed):(e.sSortingClass=r.sSortable,e.sSortingClassJUI=r.sSortJUI):(e.sSortingClass=r.sSortableNone,e.sSortingClassJUI="")}function O(t){if(!1!==t.oFeatures.bAutoWidth){var e=t.aoColumns;ee(t);for(var n=0,a=e.length;n<a;n++)e[n].nTh.style.width=e[n].sWidth}var r=t.oScroll;""===r.sY&&""===r.sX||Qt(t),R(t,null,"column-sizing",[t])}function rt(t,e){t=it(t,"bVisible");return"number"==typeof t[e]?t[e]:null}function ot(t,e){t=it(t,"bVisible"),e=P.inArray(e,t);return-1!==e?e:null}function T(t){var n=0;return P.each(t.aoColumns,function(t,e){e.bVisible&&"none"!==P(e.nTh).css("display")&&n++}),n}function it(t,n){var a=[];return P.map(t.aoColumns,function(t,e){t[n]&&a.push(e)}),a}function lt(t){for(var e,n,a,r,o,i,l,s=t.aoColumns,u=t.aoData,c=w.ext.type.detect,f=0,d=s.length;f<d;f++)if(l=[],!(o=s[f]).sType&&o._sManualType)o.sType=o._sManualType;else if(!o.sType){for(e=0,n=c.length;e<n;e++){for(a=0,r=u.length;a<r&&(l[a]===H&&(l[a]=S(t,a,f,"type")),(i=c[e](l[a],t))||e===c.length-1)&&("html"!==i||h(l[a]));a++);if(i){o.sType=i;break}}o.sType||(o.sType="string")}}function st(t,e,n,a){var r,o,i,l,s=t.aoColumns;if(e)for(r=e.length-1;0<=r;r--)for(var u,c=(u=e[r]).target!==H?u.target:u.targets!==H?u.targets:u.aTargets,f=0,d=(c=Array.isArray(c)?c:[c]).length;f<d;f++)if("number"==typeof c[f]&&0<=c[f]){for(;s.length<=c[f];)nt(t);a(c[f],u)}else if("number"==typeof c[f]&&c[f]<0)a(s.length+c[f],u);else if("string"==typeof c[f])for(i=0,l=s.length;i<l;i++)"_all"!=c[f]&&!P(s[i].nTh).hasClass(c[f])||a(i,u);if(n)for(r=0,o=n.length;r<o;r++)a(r,n[r])}function x(t,e,n,a){for(var r=t.aoData.length,o=P.extend(!0,{},w.models.oRow,{src:n?"dom":"data",idx:r}),i=(o._aData=e,t.aoData.push(o),t.aoColumns),l=0,s=i.length;l<s;l++)i[l].sType=null;t.aiDisplayMaster.push(r);e=t.rowIdFn(e);return e!==H&&(t.aIds[e]=o),!n&&t.oFeatures.bDeferRender||St(t,r,n,a),r}function ut(n,t){var a;return(t=t instanceof P?t:P(t)).map(function(t,e){return a=mt(n,e),x(n,a.data,e,a.cells)})}function S(t,e,n,a){"search"===a?a="filter":"order"===a&&(a="sort");var r=t.iDraw,o=t.aoColumns[n],i=t.aoData[e]._aData,l=o.sDefaultContent,s=o.fnGetData(i,a,{settings:t,row:e,col:n});if(s===H)return t.iDrawError!=r&&null===l&&(W(t,0,"Requested unknown parameter "+("function"==typeof o.mData?"{function}":"'"+o.mData+"'")+" for row "+e+", column "+n,4),t.iDrawError=r),l;if(s!==i&&null!==s||null===l||a===H){if("function"==typeof s)return s.call(i)}else s=l;return null===s&&"display"===a?"":"filter"===a&&(e=w.ext.type.search)[o.sType]?e[o.sType](s):s}function ct(t,e,n,a){var r=t.aoColumns[n],o=t.aoData[e]._aData;r.fnSetData(o,a,{settings:t,row:e,col:n})}var ft=/\[.*?\]$/,g=/\(\)$/;function dt(t){return P.map(t.match(/(\\.|[^\.])+/g)||[""],function(t){return t.replace(/\\\./g,".")})}var A=w.util.get,b=w.util.set;function ht(t){return N(t.aoData,"_aData")}function pt(t){t.aoData.length=0,t.aiDisplayMaster.length=0,t.aiDisplay.length=0,t.aIds={}}function gt(t,e,n){for(var a=-1,r=0,o=t.length;r<o;r++)t[r]==e?a=r:t[r]>e&&t[r]--;-1!=a&&n===H&&t.splice(a,1)}function bt(n,a,t,e){function r(t,e){for(;t.childNodes.length;)t.removeChild(t.firstChild);t.innerHTML=S(n,a,e,"display")}var o,i,l=n.aoData[a];if("dom"!==t&&(t&&"auto"!==t||"dom"!==l.src)){var s=l.anCells;if(s)if(e!==H)r(s[e],e);else for(o=0,i=s.length;o<i;o++)r(s[o],o)}else l._aData=mt(n,l,e,e===H?H:l._aData).data;l._aSortData=null,l._aFilterData=null;var u=n.aoColumns;if(e!==H)u[e].sType=null;else{for(o=0,i=u.length;o<i;o++)u[o].sType=null;vt(n,l)}}function mt(t,e,n,a){function r(t,e){var n;"string"==typeof t&&-1!==(n=t.indexOf("@"))&&(n=t.substring(n+1),b(t)(a,e.getAttribute(n)))}function o(t){n!==H&&n!==f||(l=d[f],s=t.innerHTML.trim(),l&&l._bAttrSrc?(b(l.mData._)(a,s),r(l.mData.sort,t),r(l.mData.type,t),r(l.mData.filter,t)):h?(l._setter||(l._setter=b(l.mData)),l._setter(a,s)):a[f]=s),f++}var i,l,s,u=[],c=e.firstChild,f=0,d=t.aoColumns,h=t._rowReadObject;a=a!==H?a:h?{}:[];if(c)for(;c;)"TD"!=(i=c.nodeName.toUpperCase())&&"TH"!=i||(o(c),u.push(c)),c=c.nextSibling;else for(var p=0,g=(u=e.anCells).length;p<g;p++)o(u[p]);var e=e.firstChild?e:e.nTr;return e&&(e=e.getAttribute("id"))&&b(t.rowId)(a,e),{data:a,cells:u}}function St(t,e,n,a){var r,o,i,l,s,u,c=t.aoData[e],f=c._aData,d=[];if(null===c.nTr){for(r=n||v.createElement("tr"),c.nTr=r,c.anCells=d,r._DT_RowIndex=e,vt(t,c),l=0,s=t.aoColumns.length;l<s;l++)i=t.aoColumns[l],(o=(u=!n)?v.createElement(i.sCellType):a[l])||W(t,0,"Incorrect column count",18),o._DT_CellIndex={row:e,column:l},d.push(o),!u&&(!i.mRender&&i.mData===l||P.isPlainObject(i.mData)&&i.mData._===l+".display")||(o.innerHTML=S(t,e,l,"display")),i.sClass&&(o.className+=" "+i.sClass),i.bVisible&&!n?r.appendChild(o):!i.bVisible&&n&&o.parentNode.removeChild(o),i.fnCreatedCell&&i.fnCreatedCell.call(t.oInstance,o,S(t,e,l),f,e,l);R(t,"aoRowCreatedCallback",null,[r,f,e,d])}}function vt(t,e){var n=e.nTr,a=e._aData;n&&((t=t.rowIdFn(a))&&(n.id=t),a.DT_RowClass&&(t=a.DT_RowClass.split(" "),e.__rowc=e.__rowc?z(e.__rowc.concat(t)):t,P(n).removeClass(e.__rowc.join(" ")).addClass(a.DT_RowClass)),a.DT_RowAttr&&P(n).attr(a.DT_RowAttr),a.DT_RowData)&&P(n).data(a.DT_RowData)}function yt(t){var e,n,a,r=t.nTHead,o=t.nTFoot,i=0===P("th, td",r).length,l=t.oClasses,s=t.aoColumns;for(i&&(n=P("<tr/>").appendTo(r)),c=0,f=s.length;c<f;c++)a=s[c],e=P(a.nTh).addClass(a.sClass),i&&e.appendTo(n),t.oFeatures.bSort&&(e.addClass(a.sSortingClass),!1!==a.bSortable)&&(e.attr("tabindex",t.iTabIndex).attr("aria-controls",t.sTableId),ue(t,a.nTh,c)),a.sTitle!=e[0].innerHTML&&e.html(a.sTitle),ve(t,"header")(t,e,a,l);if(i&&wt(t.aoHeader,r),P(r).children("tr").children("th, td").addClass(l.sHeaderTH),P(o).children("tr").children("th, td").addClass(l.sFooterTH),null!==o)for(var u=t.aoFooter[0],c=0,f=u.length;c<f;c++)(a=s[c])?(a.nTf=u[c].cell,a.sClass&&P(a.nTf).addClass(a.sClass)):W(t,0,"Incorrect column count",18)}function Dt(t,e,n){var a,r,o,i,l,s,u,c,f,d=[],h=[],p=t.aoColumns.length;if(e){for(n===H&&(n=!1),a=0,r=e.length;a<r;a++){for(d[a]=e[a].slice(),d[a].nTr=e[a].nTr,o=p-1;0<=o;o--)t.aoColumns[o].bVisible||n||d[a].splice(o,1);h.push([])}for(a=0,r=d.length;a<r;a++){if(u=d[a].nTr)for(;s=u.firstChild;)u.removeChild(s);for(o=0,i=d[a].length;o<i;o++)if(f=c=1,h[a][o]===H){for(u.appendChild(d[a][o].cell),h[a][o]=1;d[a+c]!==H&&d[a][o].cell==d[a+c][o].cell;)h[a+c][o]=1,c++;for(;d[a][o+f]!==H&&d[a][o].cell==d[a][o+f].cell;){for(l=0;l<c;l++)h[a+l][o+f]=1;f++}P(d[a][o].cell).attr("rowspan",c).attr("colspan",f)}}}}function y(t,e){n="ssp"==E(s=t),(l=s.iInitDisplayStart)!==H&&-1!==l&&(s._iDisplayStart=!n&&l>=s.fnRecordsDisplay()?0:l,s.iInitDisplayStart=-1);var n=R(t,"aoPreDrawCallback","preDraw",[t]);if(-1!==P.inArray(!1,n))D(t,!1);else{var a=[],r=0,o=t.asStripeClasses,i=o.length,l=t.oLanguage,s="ssp"==E(t),u=t.aiDisplay,n=t._iDisplayStart,c=t.fnDisplayEnd();if(t.bDrawing=!0,t.bDeferLoading)t.bDeferLoading=!1,t.iDraw++,D(t,!1);else if(s){if(!t.bDestroying&&!e)return void xt(t)}else t.iDraw++;if(0!==u.length)for(var f=s?t.aoData.length:c,d=s?0:n;d<f;d++){var h,p=u[d],g=t.aoData[p],b=(null===g.nTr&&St(t,p),g.nTr);0!==i&&(h=o[r%i],g._sRowStripe!=h)&&(P(b).removeClass(g._sRowStripe).addClass(h),g._sRowStripe=h),R(t,"aoRowCallback",null,[b,g._aData,r,d,p]),a.push(b),r++}else{e=l.sZeroRecords;1==t.iDraw&&"ajax"==E(t)?e=l.sLoadingRecords:l.sEmptyTable&&0===t.fnRecordsTotal()&&(e=l.sEmptyTable),a[0]=P("<tr/>",{class:i?o[0]:""}).append(P("<td />",{valign:"top",colSpan:T(t),class:t.oClasses.sRowEmpty}).html(e))[0]}R(t,"aoHeaderCallback","header",[P(t.nTHead).children("tr")[0],ht(t),n,c,u]),R(t,"aoFooterCallback","footer",[P(t.nTFoot).children("tr")[0],ht(t),n,c,u]);s=P(t.nTBody);s.children().detach(),s.append(P(a)),R(t,"aoDrawCallback","draw",[t]),t.bSorted=!1,t.bFiltered=!1,t.bDrawing=!1}}function u(t,e){var n=t.oFeatures,a=n.bSort,n=n.bFilter;a&&ie(t),n?Rt(t,t.oPreviousSearch):t.aiDisplay=t.aiDisplayMaster.slice(),!0!==e&&(t._iDisplayStart=0),t._drawHold=e,y(t),t._drawHold=!1}function _t(t){for(var e,n,a,r,o,i,l,s=t.oClasses,u=P(t.nTable),u=P("<div/>").insertBefore(u),c=t.oFeatures,f=P("<div/>",{id:t.sTableId+"_wrapper",class:s.sWrapper+(t.nTFoot?"":" "+s.sNoFooter)}),d=(t.nHolding=u[0],t.nTableWrapper=f[0],t.nTableReinsertBefore=t.nTable.nextSibling,t.sDom.split("")),h=0;h<d.length;h++){if(e=null,"<"==(n=d[h])){if(a=P("<div/>")[0],"'"==(r=d[h+1])||'"'==r){for(o="",i=2;d[h+i]!=r;)o+=d[h+i],i++;"H"==o?o=s.sJUIHeader:"F"==o&&(o=s.sJUIFooter),-1!=o.indexOf(".")?(l=o.split("."),a.id=l[0].substr(1,l[0].length-1),a.className=l[1]):"#"==o.charAt(0)?a.id=o.substr(1,o.length-1):a.className=o,h+=i}f.append(a),f=P(a)}else if(">"==n)f=f.parent();else if("l"==n&&c.bPaginate&&c.bLengthChange)e=Gt(t);else if("f"==n&&c.bFilter)e=Lt(t);else if("r"==n&&c.bProcessing)e=Zt(t);else if("t"==n)e=Kt(t);else if("i"==n&&c.bInfo)e=Ut(t);else if("p"==n&&c.bPaginate)e=zt(t);else if(0!==w.ext.feature.length)for(var p=w.ext.feature,g=0,b=p.length;g<b;g++)if(n==p[g].cFeature){e=p[g].fnInit(t);break}e&&((l=t.aanFeatures)[n]||(l[n]=[]),l[n].push(e),f.append(e))}u.replaceWith(f),t.nHolding=null}function wt(t,e){var n,a,r,o,i,l,s,u,c,f,d=P(e).children("tr");for(t.splice(0,t.length),r=0,l=d.length;r<l;r++)t.push([]);for(r=0,l=d.length;r<l;r++)for(a=(n=d[r]).firstChild;a;){if("TD"==a.nodeName.toUpperCase()||"TH"==a.nodeName.toUpperCase())for(u=(u=+a.getAttribute("colspan"))&&0!=u&&1!=u?u:1,c=(c=+a.getAttribute("rowspan"))&&0!=c&&1!=c?c:1,s=function(t,e,n){for(var a=t[e];a[n];)n++;return n}(t,r,0),f=1==u,i=0;i<u;i++)for(o=0;o<c;o++)t[r+o][s+i]={cell:a,unique:f},t[r+o].nTr=n;a=a.nextSibling}}function Ct(t,e,n){var a=[];n||(n=t.aoHeader,e&&wt(n=[],e));for(var r=0,o=n.length;r<o;r++)for(var i=0,l=n[r].length;i<l;i++)!n[r][i].unique||a[i]&&t.bSortCellsTop||(a[i]=n[r][i].cell);return a}function Tt(r,t,n){function e(t){var e=r.jqXHR?r.jqXHR.status:null;(null===t||"number"==typeof e&&204==e)&&Ft(r,t={},[]),(e=t.error||t.sError)&&W(r,0,e),r.json=t,R(r,null,"xhr",[r,t,r.jqXHR]),n(t)}R(r,"aoServerParams","serverParams",[t]),t&&Array.isArray(t)&&(a={},o=/(.*?)\[\]$/,P.each(t,function(t,e){var n=e.name.match(o);n?(n=n[0],a[n]||(a[n]=[]),a[n].push(e.value)):a[e.name]=e.value}),t=a);var a,o,i,l=r.ajax,s=r.oInstance,u=(P.isPlainObject(l)&&l.data&&(u="function"==typeof(i=l.data)?i(t,r):i,t="function"==typeof i&&u?u:P.extend(!0,t,u),delete l.data),{data:t,success:e,dataType:"json",cache:!1,type:r.sServerMethod,error:function(t,e,n){var a=R(r,null,"xhr",[r,null,r.jqXHR]);-1===P.inArray(!0,a)&&("parsererror"==e?W(r,0,"Invalid JSON response",1):4===t.readyState&&W(r,0,"Ajax error",7)),D(r,!1)}});r.oAjaxData=t,R(r,null,"preXhr",[r,t]),r.fnServerData?r.fnServerData.call(s,r.sAjaxSource,P.map(t,function(t,e){return{name:e,value:t}}),e,r):r.sAjaxSource||"string"==typeof l?r.jqXHR=P.ajax(P.extend(u,{url:l||r.sAjaxSource})):"function"==typeof l?r.jqXHR=l.call(s,t,e,r):(r.jqXHR=P.ajax(P.extend(u,l)),l.data=i)}function xt(e){e.iDraw++,D(e,!0);var n=e._drawHold;Tt(e,At(e),function(t){e._drawHold=n,It(e,t),e._drawHold=!1})}function At(t){for(var e,n,a,r=t.aoColumns,o=r.length,i=t.oFeatures,l=t.oPreviousSearch,s=t.aoPreSearchCols,u=[],c=I(t),f=t._iDisplayStart,d=!1!==i.bPaginate?t._iDisplayLength:-1,h=function(t,e){u.push({name:t,value:e})},p=(h("sEcho",t.iDraw),h("iColumns",o),h("sColumns",N(r,"sName").join(",")),h("iDisplayStart",f),h("iDisplayLength",d),{draw:t.iDraw,columns:[],order:[],start:f,length:d,search:{value:l.sSearch,regex:l.bRegex}}),g=0;g<o;g++)n=r[g],a=s[g],e="function"==typeof n.mData?"function":n.mData,p.columns.push({data:e,name:n.sName,searchable:n.bSearchable,orderable:n.bSortable,search:{value:a.sSearch,regex:a.bRegex}}),h("mDataProp_"+g,e),i.bFilter&&(h("sSearch_"+g,a.sSearch),h("bRegex_"+g,a.bRegex),h("bSearchable_"+g,n.bSearchable)),i.bSort&&h("bSortable_"+g,n.bSortable);i.bFilter&&(h("sSearch",l.sSearch),h("bRegex",l.bRegex)),i.bSort&&(P.each(c,function(t,e){p.order.push({column:e.col,dir:e.dir}),h("iSortCol_"+t,e.col),h("sSortDir_"+t,e.dir)}),h("iSortingCols",c.length));f=w.ext.legacy.ajax;return null===f?t.sAjaxSource?u:p:f?u:p}function It(t,n){function e(t,e){return n[t]!==H?n[t]:n[e]}var a=Ft(t,n),r=e("sEcho","draw"),o=e("iTotalRecords","recordsTotal"),i=e("iTotalDisplayRecords","recordsFiltered");if(r!==H){if(+r<t.iDraw)return;t.iDraw=+r}a=a||[],pt(t),t._iRecordsTotal=parseInt(o,10),t._iRecordsDisplay=parseInt(i,10);for(var l=0,s=a.length;l<s;l++)x(t,a[l]);t.aiDisplay=t.aiDisplayMaster.slice(),y(t,!0),t._bInitComplete||qt(t,n),D(t,!1)}function Ft(t,e,n){t=P.isPlainObject(t.ajax)&&t.ajax.dataSrc!==H?t.ajax.dataSrc:t.sAjaxDataProp;if(!n)return"data"===t?e.aaData||e[t]:""!==t?A(t)(e):e;b(t)(e,n)}function Lt(n){function e(t){i.f;var e=this.value||"";o.return&&"Enter"!==t.key||e!=o.sSearch&&(Rt(n,{sSearch:e,bRegex:o.bRegex,bSmart:o.bSmart,bCaseInsensitive:o.bCaseInsensitive,return:o.return}),n._iDisplayStart=0,y(n))}var t=n.oClasses,a=n.sTableId,r=n.oLanguage,o=n.oPreviousSearch,i=n.aanFeatures,l='<input type="search" class="'+t.sFilterInput+'"/>',s=(s=r.sSearch).match(/_INPUT_/)?s.replace("_INPUT_",l):s+l,l=P("<div/>",{id:i.f?null:a+"_filter",class:t.sFilter}).append(P("<label/>").append(s)),t=null!==n.searchDelay?n.searchDelay:"ssp"===E(n)?400:0,u=P("input",l).val(o.sSearch).attr("placeholder",r.sSearchPlaceholder).on("keyup.DT search.DT input.DT paste.DT cut.DT",t?ne(e,t):e).on("mouseup.DT",function(t){setTimeout(function(){e.call(u[0],t)},10)}).on("keypress.DT",function(t){if(13==t.keyCode)return!1}).attr("aria-controls",a);return P(n.nTable).on("search.dt.DT",function(t,e){if(n===e)try{u[0]!==v.activeElement&&u.val(o.sSearch)}catch(t){}}),l[0]}function Rt(t,e,n){function a(t){o.sSearch=t.sSearch,o.bRegex=t.bRegex,o.bSmart=t.bSmart,o.bCaseInsensitive=t.bCaseInsensitive,o.return=t.return}function r(t){return t.bEscapeRegex!==H?!t.bEscapeRegex:t.bRegex}var o=t.oPreviousSearch,i=t.aoPreSearchCols;if(lt(t),"ssp"!=E(t)){Ht(t,e.sSearch,n,r(e),e.bSmart,e.bCaseInsensitive),a(e);for(var l=0;l<i.length;l++)jt(t,i[l].sSearch,l,r(i[l]),i[l].bSmart,i[l].bCaseInsensitive);Pt(t)}else a(e);t.bFiltered=!0,R(t,null,"search",[t])}function Pt(t){for(var e,n,a=w.ext.search,r=t.aiDisplay,o=0,i=a.length;o<i;o++){for(var l=[],s=0,u=r.length;s<u;s++)n=r[s],e=t.aoData[n],a[o](t,e._aFilterData,n,e._aData,s)&&l.push(n);r.length=0,P.merge(r,l)}}function jt(t,e,n,a,r,o){if(""!==e){for(var i,l=[],s=t.aiDisplay,u=Nt(e,a,r,o),c=0;c<s.length;c++)i=t.aoData[s[c]]._aFilterData[n],u.test(i)&&l.push(s[c]);t.aiDisplay=l}}function Ht(t,e,n,a,r,o){var i,l,s,u=Nt(e,a,r,o),r=t.oPreviousSearch.sSearch,o=t.aiDisplayMaster,c=[];if(0!==w.ext.search.length&&(n=!0),l=Wt(t),e.length<=0)t.aiDisplay=o.slice();else{for((l||n||a||r.length>e.length||0!==e.indexOf(r)||t.bSorted)&&(t.aiDisplay=o.slice()),i=t.aiDisplay,s=0;s<i.length;s++)u.test(t.aoData[i[s]]._sFilterRow)&&c.push(i[s]);t.aiDisplay=c}}function Nt(t,e,n,a){return t=e?t:Ot(t),n&&(t="^(?=.*?"+P.map(t.match(/["\u201C][^"\u201D]+["\u201D]|[^ ]+/g)||[""],function(t){var e;return'"'===t.charAt(0)?t=(e=t.match(/^"(.*)"$/))?e[1]:t:"“"===t.charAt(0)&&(t=(e=t.match(/^\u201C(.*)\u201D$/))?e[1]:t),t.replace('"',"")}).join(")(?=.*?")+").*$"),new RegExp(t,a?"i":"")}var Ot=w.util.escapeRegex,kt=P("<div>")[0],Mt=kt.textContent!==H;function Wt(t){for(var e,n,a,r,o,i=t.aoColumns,l=!1,s=0,u=t.aoData.length;s<u;s++)if(!(o=t.aoData[s])._aFilterData){for(a=[],e=0,n=i.length;e<n;e++)i[e].bSearchable?"string"!=typeof(r=null===(r=S(t,s,e,"filter"))?"":r)&&r.toString&&(r=r.toString()):r="",r.indexOf&&-1!==r.indexOf("&")&&(kt.innerHTML=r,r=Mt?kt.textContent:kt.innerText),r.replace&&(r=r.replace(/[\r\n\u2028]/g,"")),a.push(r);o._aFilterData=a,o._sFilterRow=a.join(" "),l=!0}return l}function Et(t){return{search:t.sSearch,smart:t.bSmart,regex:t.bRegex,caseInsensitive:t.bCaseInsensitive}}function Bt(t){return{sSearch:t.search,bSmart:t.smart,bRegex:t.regex,bCaseInsensitive:t.caseInsensitive}}function Ut(t){var e=t.sTableId,n=t.aanFeatures.i,a=P("<div/>",{class:t.oClasses.sInfo,id:n?null:e+"_info"});return n||(t.aoDrawCallback.push({fn:Vt,sName:"information"}),a.attr("role","status").attr("aria-live","polite"),P(t.nTable).attr("aria-describedby",e+"_info")),a[0]}function Vt(t){var e,n,a,r,o,i,l=t.aanFeatures.i;0!==l.length&&(i=t.oLanguage,e=t._iDisplayStart+1,n=t.fnDisplayEnd(),a=t.fnRecordsTotal(),o=(r=t.fnRecordsDisplay())?i.sInfo:i.sInfoEmpty,r!==a&&(o+=" "+i.sInfoFiltered),o=Xt(t,o+=i.sInfoPostFix),null!==(i=i.fnInfoCallback)&&(o=i.call(t.oInstance,t,e,n,a,r,o)),P(l).html(o))}function Xt(t,e){var n=t.fnFormatNumber,a=t._iDisplayStart+1,r=t._iDisplayLength,o=t.fnRecordsDisplay(),i=-1===r;return e.replace(/_START_/g,n.call(t,a)).replace(/_END_/g,n.call(t,t.fnDisplayEnd())).replace(/_MAX_/g,n.call(t,t.fnRecordsTotal())).replace(/_TOTAL_/g,n.call(t,o)).replace(/_PAGE_/g,n.call(t,i?1:Math.ceil(a/r))).replace(/_PAGES_/g,n.call(t,i?1:Math.ceil(o/r)))}function Jt(n){var a,t,e,r=n.iInitDisplayStart,o=n.aoColumns,i=n.oFeatures,l=n.bDeferLoading;if(n.bInitialised){for(_t(n),yt(n),Dt(n,n.aoHeader),Dt(n,n.aoFooter),D(n,!0),i.bAutoWidth&&ee(n),a=0,t=o.length;a<t;a++)(e=o[a]).sWidth&&(e.nTh.style.width=M(e.sWidth));R(n,null,"preInit",[n]),u(n);i=E(n);"ssp"==i&&!l||("ajax"==i?Tt(n,[],function(t){var e=Ft(n,t);for(a=0;a<e.length;a++)x(n,e[a]);n.iInitDisplayStart=r,u(n),D(n,!1),qt(n,t)}):(D(n,!1),qt(n)))}else setTimeout(function(){Jt(n)},200)}function qt(t,e){t._bInitComplete=!0,(e||t.oInit.aaData)&&O(t),R(t,null,"plugin-init",[t,e]),R(t,"aoInitComplete","init",[t,e])}function $t(t,e){e=parseInt(e,10);t._iDisplayLength=e,Se(t),R(t,null,"length",[t,e])}function Gt(a){for(var t=a.oClasses,e=a.sTableId,n=a.aLengthMenu,r=Array.isArray(n[0]),o=r?n[0]:n,i=r?n[1]:n,l=P("<select/>",{name:e+"_length","aria-controls":e,class:t.sLengthSelect}),s=0,u=o.length;s<u;s++)l[0][s]=new Option("number"==typeof i[s]?a.fnFormatNumber(i[s]):i[s],o[s]);var c=P("<div><label/></div>").addClass(t.sLength);return a.aanFeatures.l||(c[0].id=e+"_length"),c.children().append(a.oLanguage.sLengthMenu.replace("_MENU_",l[0].outerHTML)),P("select",c).val(a._iDisplayLength).on("change.DT",function(t){$t(a,P(this).val()),y(a)}),P(a.nTable).on("length.dt.DT",function(t,e,n){a===e&&P("select",c).val(n)}),c[0]}function zt(t){function c(t){y(t)}var e=t.sPaginationType,f=w.ext.pager[e],d="function"==typeof f,e=P("<div/>").addClass(t.oClasses.sPaging+e)[0],h=t.aanFeatures;return d||f.fnInit(t,e,c),h.p||(e.id=t.sTableId+"_paginate",t.aoDrawCallback.push({fn:function(t){if(d)for(var e=t._iDisplayStart,n=t._iDisplayLength,a=t.fnRecordsDisplay(),r=-1===n,o=r?0:Math.ceil(e/n),i=r?1:Math.ceil(a/n),l=f(o,i),s=0,u=h.p.length;s<u;s++)ve(t,"pageButton")(t,h.p[s],s,l,o,i);else f.fnUpdate(t,c)},sName:"pagination"})),e}function Yt(t,e,n){var a=t._iDisplayStart,r=t._iDisplayLength,o=t.fnRecordsDisplay(),o=(0===o||-1===r?a=0:"number"==typeof e?o<(a=e*r)&&(a=0):"first"==e?a=0:"previous"==e?(a=0<=r?a-r:0)<0&&(a=0):"next"==e?a+r<o&&(a+=r):"last"==e?a=Math.floor((o-1)/r)*r:W(t,0,"Unknown paging action: "+e,5),t._iDisplayStart!==a);return t._iDisplayStart=a,o?(R(t,null,"page",[t]),n&&y(t)):R(t,null,"page-nc",[t]),o}function Zt(t){return P("<div/>",{id:t.aanFeatures.r?null:t.sTableId+"_processing",class:t.oClasses.sProcessing,role:"status"}).html(t.oLanguage.sProcessing).append("<div><div></div><div></div><div></div><div></div></div>").insertBefore(t.nTable)[0]}function D(t,e){t.oFeatures.bProcessing&&P(t.aanFeatures.r).css("display",e?"block":"none"),R(t,null,"processing",[t,e])}function Kt(t){var e,n,a,r,o,i,l,s,u,c,f,d,h=P(t.nTable),p=t.oScroll;return""===p.sX&&""===p.sY?t.nTable:(e=p.sX,n=p.sY,a=t.oClasses,o=(r=h.children("caption")).length?r[0]._captionSide:null,s=P(h[0].cloneNode(!1)),i=P(h[0].cloneNode(!1)),u=function(t){return t?M(t):null},(l=h.children("tfoot")).length||(l=null),s=P(f="<div/>",{class:a.sScrollWrapper}).append(P(f,{class:a.sScrollHead}).css({overflow:"hidden",position:"relative",border:0,width:e?u(e):"100%"}).append(P(f,{class:a.sScrollHeadInner}).css({"box-sizing":"content-box",width:p.sXInner||"100%"}).append(s.removeAttr("id").css("margin-left",0).append("top"===o?r:null).append(h.children("thead"))))).append(P(f,{class:a.sScrollBody}).css({position:"relative",overflow:"auto",width:u(e)}).append(h)),l&&s.append(P(f,{class:a.sScrollFoot}).css({overflow:"hidden",border:0,width:e?u(e):"100%"}).append(P(f,{class:a.sScrollFootInner}).append(i.removeAttr("id").css("margin-left",0).append("bottom"===o?r:null).append(h.children("tfoot"))))),u=s.children(),c=u[0],f=u[1],d=l?u[2]:null,e&&P(f).on("scroll.DT",function(t){var e=this.scrollLeft;c.scrollLeft=e,l&&(d.scrollLeft=e)}),P(f).css("max-height",n),p.bCollapse||P(f).css("height",n),t.nScrollHead=c,t.nScrollBody=f,t.nScrollFoot=d,t.aoDrawCallback.push({fn:Qt,sName:"scrolling"}),s[0])}function Qt(n){function t(t){(t=t.style).paddingTop="0",t.paddingBottom="0",t.borderTopWidth="0",t.borderBottomWidth="0",t.height=0}var e,a,r,o,i,l=n.oScroll,s=l.sX,u=l.sXInner,c=l.sY,l=l.iBarWidth,f=P(n.nScrollHead),d=f[0].style,h=f.children("div"),p=h[0].style,h=h.children("table"),g=n.nScrollBody,b=P(g),m=g.style,S=P(n.nScrollFoot).children("div"),v=S.children("table"),y=P(n.nTHead),D=P(n.nTable),_=D[0],w=_.style,C=n.nTFoot?P(n.nTFoot):null,T=n.oBrowser,x=T.bScrollOversize,A=(N(n.aoColumns,"nTh"),[]),I=[],F=[],L=[],R=g.scrollHeight>g.clientHeight;n.scrollBarVis!==R&&n.scrollBarVis!==H?(n.scrollBarVis=R,O(n)):(n.scrollBarVis=R,D.children("thead, tfoot").remove(),C&&(R=C.clone().prependTo(D),i=C.find("tr"),a=R.find("tr"),R.find("[id]").removeAttr("id")),R=y.clone().prependTo(D),y=y.find("tr"),e=R.find("tr"),R.find("th, td").removeAttr("tabindex"),R.find("[id]").removeAttr("id"),s||(m.width="100%",f[0].style.width="100%"),P.each(Ct(n,R),function(t,e){r=rt(n,t),e.style.width=n.aoColumns[r].sWidth}),C&&k(function(t){t.style.width=""},a),f=D.outerWidth(),""===s?(w.width="100%",x&&(D.find("tbody").height()>g.offsetHeight||"scroll"==b.css("overflow-y"))&&(w.width=M(D.outerWidth()-l)),f=D.outerWidth()):""!==u&&(w.width=M(u),f=D.outerWidth()),k(t,e),k(function(t){var e=j.getComputedStyle?j.getComputedStyle(t).width:M(P(t).width());F.push(t.innerHTML),A.push(e)},e),k(function(t,e){t.style.width=A[e]},y),P(e).css("height",0),C&&(k(t,a),k(function(t){L.push(t.innerHTML),I.push(M(P(t).css("width")))},a),k(function(t,e){t.style.width=I[e]},i),P(a).height(0)),k(function(t,e){t.innerHTML='<div class="dataTables_sizing">'+F[e]+"</div>",t.childNodes[0].style.height="0",t.childNodes[0].style.overflow="hidden",t.style.width=A[e]},e),C&&k(function(t,e){t.innerHTML='<div class="dataTables_sizing">'+L[e]+"</div>",t.childNodes[0].style.height="0",t.childNodes[0].style.overflow="hidden",t.style.width=I[e]},a),Math.round(D.outerWidth())<Math.round(f)?(o=g.scrollHeight>g.offsetHeight||"scroll"==b.css("overflow-y")?f+l:f,x&&(g.scrollHeight>g.offsetHeight||"scroll"==b.css("overflow-y"))&&(w.width=M(o-l)),""!==s&&""===u||W(n,1,"Possible column misalignment",6)):o="100%",m.width=M(o),d.width=M(o),C&&(n.nScrollFoot.style.width=M(o)),c||x&&(m.height=M(_.offsetHeight+l)),R=D.outerWidth(),h[0].style.width=M(R),p.width=M(R),y=D.height()>g.clientHeight||"scroll"==b.css("overflow-y"),p[i="padding"+(T.bScrollbarLeft?"Left":"Right")]=y?l+"px":"0px",C&&(v[0].style.width=M(R),S[0].style.width=M(R),S[0].style[i]=y?l+"px":"0px"),D.children("colgroup").insertBefore(D.children("thead")),b.trigger("scroll"),!n.bSorted&&!n.bFiltered||n._drawHold||(g.scrollTop=0))}function k(t,e,n){for(var a,r,o=0,i=0,l=e.length;i<l;){for(a=e[i].firstChild,r=n?n[i].firstChild:null;a;)1===a.nodeType&&(n?t(a,r,o):t(a,o),o++),a=a.nextSibling,r=n?r.nextSibling:null;i++}}var te=/<.*?>/g;function ee(t){var e,n,a=t.nTable,r=t.aoColumns,o=t.oScroll,i=o.sY,l=o.sX,o=o.sXInner,s=r.length,u=it(t,"bVisible"),c=P("th",t.nTHead),f=a.getAttribute("width"),d=a.parentNode,h=!1,p=t.oBrowser,g=p.bScrollOversize,b=a.style.width,m=(b&&-1!==b.indexOf("%")&&(f=b),ae(N(r,"sWidthOrig"),d));for(_=0;_<u.length;_++)null!==(e=r[u[_]]).sWidth&&(e.sWidth=m[_],h=!0);if(g||!h&&!l&&!i&&s==T(t)&&s==c.length)for(_=0;_<s;_++){var S=rt(t,_);null!==S&&(r[S].sWidth=M(c.eq(_).width()))}else{var b=P(a).clone().css("visibility","hidden").removeAttr("id"),v=(b.find("tbody tr").remove(),P("<tr/>").appendTo(b.find("tbody")));for(b.find("thead, tfoot").remove(),b.append(P(t.nTHead).clone()).append(P(t.nTFoot).clone()),b.find("tfoot th, tfoot td").css("width",""),c=Ct(t,b.find("thead")[0]),_=0;_<u.length;_++)e=r[u[_]],c[_].style.width=null!==e.sWidthOrig&&""!==e.sWidthOrig?M(e.sWidthOrig):"",e.sWidthOrig&&l&&P(c[_]).append(P("<div/>").css({width:e.sWidthOrig,margin:0,padding:0,border:0,height:1}));if(t.aoData.length)for(_=0;_<u.length;_++)e=r[n=u[_]],P(re(t,n)).clone(!1).append(e.sContentPadding).appendTo(v);P("[name]",b).removeAttr("name");for(var y=P("<div/>").css(l||i?{position:"absolute",top:0,left:0,height:1,right:0,overflow:"hidden"}:{}).append(b).appendTo(d),D=(l&&o?b.width(o):l?(b.css("width","auto"),b.removeAttr("width"),b.width()<d.clientWidth&&f&&b.width(d.clientWidth)):i?b.width(d.clientWidth):f&&b.width(f),0),_=0;_<u.length;_++){var w=P(c[_]),C=w.outerWidth()-w.width(),w=p.bBounding?Math.ceil(c[_].getBoundingClientRect().width):w.outerWidth();D+=w,r[u[_]].sWidth=M(w-C)}a.style.width=M(D),y.remove()}f&&(a.style.width=M(f)),!f&&!l||t._reszEvt||(o=function(){P(j).on("resize.DT-"+t.sInstance,ne(function(){O(t)}))},g?setTimeout(o,1e3):o(),t._reszEvt=!0)}var ne=w.util.throttle;function ae(t,e){for(var n=[],a=[],r=0;r<t.length;r++)t[r]?n.push(P("<div/>").css("width",M(t[r])).appendTo(e||v.body)):n.push(null);for(r=0;r<t.length;r++)a.push(n[r]?n[r][0].offsetWidth:null);return P(n).remove(),a}function re(t,e){var n,a=oe(t,e);return a<0?null:(n=t.aoData[a]).nTr?n.anCells[e]:P("<td/>").html(S(t,a,e,"display"))[0]}function oe(t,e){for(var n,a=-1,r=-1,o=0,i=t.aoData.length;o<i;o++)(n=(n=(n=S(t,o,e,"display")+"").replace(te,"")).replace(/&nbsp;/g," ")).length>a&&(a=n.length,r=o);return r}function M(t){return null===t?"0px":"number"==typeof t?t<0?"0px":t+"px":t.match(/\d$/)?t+"px":t}function I(t){function e(t){t.length&&!Array.isArray(t[0])?h.push(t):P.merge(h,t)}var n,a,r,o,i,l,s,u=[],c=t.aoColumns,f=t.aaSortingFixed,d=P.isPlainObject(f),h=[];for(Array.isArray(f)&&e(f),d&&f.pre&&e(f.pre),e(t.aaSorting),d&&f.post&&e(f.post),n=0;n<h.length;n++)for(r=(o=c[s=h[n][a=0]].aDataSort).length;a<r;a++)l=c[i=o[a]].sType||"string",h[n]._idx===H&&(h[n]._idx=P.inArray(h[n][1],c[i].asSorting)),u.push({src:s,col:i,dir:h[n][1],index:h[n]._idx,type:l,formatter:w.ext.type.order[l+"-pre"]});return u}function ie(t){var e,n,a,r,c,f=[],u=w.ext.type.order,d=t.aoData,o=(t.aoColumns,0),i=t.aiDisplayMaster;for(lt(t),e=0,n=(c=I(t)).length;e<n;e++)(r=c[e]).formatter&&o++,fe(t,r.col);if("ssp"!=E(t)&&0!==c.length){for(e=0,a=i.length;e<a;e++)f[i[e]]=e;o===c.length?i.sort(function(t,e){for(var n,a,r,o,i=c.length,l=d[t]._aSortData,s=d[e]._aSortData,u=0;u<i;u++)if(0!=(r=(n=l[(o=c[u]).col])<(a=s[o.col])?-1:a<n?1:0))return"asc"===o.dir?r:-r;return(n=f[t])<(a=f[e])?-1:a<n?1:0}):i.sort(function(t,e){for(var n,a,r,o=c.length,i=d[t]._aSortData,l=d[e]._aSortData,s=0;s<o;s++)if(n=i[(r=c[s]).col],a=l[r.col],0!==(r=(u[r.type+"-"+r.dir]||u["string-"+r.dir])(n,a)))return r;return(n=f[t])<(a=f[e])?-1:a<n?1:0})}t.bSorted=!0}function le(t){for(var e=t.aoColumns,n=I(t),a=t.oLanguage.oAria,r=0,o=e.length;r<o;r++){var i=e[r],l=i.asSorting,s=i.ariaTitle||i.sTitle.replace(/<.*?>/g,""),u=i.nTh;u.removeAttribute("aria-sort"),i=i.bSortable?s+("asc"===(0<n.length&&n[0].col==r&&(u.setAttribute("aria-sort","asc"==n[0].dir?"ascending":"descending"),l[n[0].index+1])||l[0])?a.sSortAscending:a.sSortDescending):s,u.setAttribute("aria-label",i)}}function se(t,e,n,a){function r(t,e){var n=t._idx;return(n=n===H?P.inArray(t[1],s):n)+1<s.length?n+1:e?null:0}var o,i=t.aoColumns[e],l=t.aaSorting,s=i.asSorting;"number"==typeof l[0]&&(l=t.aaSorting=[l]),n&&t.oFeatures.bSortMulti?-1!==(i=P.inArray(e,N(l,"0")))?null===(o=null===(o=r(l[i],!0))&&1===l.length?0:o)?l.splice(i,1):(l[i][1]=s[o],l[i]._idx=o):(l.push([e,s[0],0]),l[l.length-1]._idx=0):l.length&&l[0][0]==e?(o=r(l[0]),l.length=1,l[0][1]=s[o],l[0]._idx=o):(l.length=0,l.push([e,s[0]]),l[0]._idx=0),u(t),"function"==typeof a&&a(t)}function ue(e,t,n,a){var r=e.aoColumns[n];me(t,{},function(t){!1!==r.bSortable&&(e.oFeatures.bProcessing?(D(e,!0),setTimeout(function(){se(e,n,t.shiftKey,a),"ssp"!==E(e)&&D(e,!1)},0)):se(e,n,t.shiftKey,a))})}function ce(t){var e,n,a,r=t.aLastSort,o=t.oClasses.sSortColumn,i=I(t),l=t.oFeatures;if(l.bSort&&l.bSortClasses){for(e=0,n=r.length;e<n;e++)a=r[e].src,P(N(t.aoData,"anCells",a)).removeClass(o+(e<2?e+1:3));for(e=0,n=i.length;e<n;e++)a=i[e].src,P(N(t.aoData,"anCells",a)).addClass(o+(e<2?e+1:3))}t.aLastSort=i}function fe(t,e){for(var n,a,r,o=t.aoColumns[e],i=w.ext.order[o.sSortDataType],l=(i&&(n=i.call(t.oInstance,t,e,ot(t,e))),w.ext.type.order[o.sType+"-pre"]),s=0,u=t.aoData.length;s<u;s++)(a=t.aoData[s])._aSortData||(a._aSortData=[]),a._aSortData[e]&&!i||(r=i?n[s]:S(t,s,e,"sort"),a._aSortData[e]=l?l(r):r)}function de(n){var t;n._bLoadingState||(t={time:+new Date,start:n._iDisplayStart,length:n._iDisplayLength,order:P.extend(!0,[],n.aaSorting),search:Et(n.oPreviousSearch),columns:P.map(n.aoColumns,function(t,e){return{visible:t.bVisible,search:Et(n.aoPreSearchCols[e])}})},n.oSavedState=t,R(n,"aoStateSaveParams","stateSaveParams",[n,t]),n.oFeatures.bStateSave&&!n.bDestroying&&n.fnStateSaveCallback.call(n.oInstance,n,t))}function he(e,t,n){var a;if(e.oFeatures.bStateSave)return(a=e.fnStateLoadCallback.call(e.oInstance,e,function(t){pe(e,t,n)}))!==H&&pe(e,a,n),!0;n()}function pe(n,t,e){var a,r,o=n.aoColumns,i=(n._bLoadingState=!0,n._bInitComplete?new w.Api(n):null);if(t&&t.time){var l=R(n,"aoStateLoadParams","stateLoadParams",[n,t]);if(-1!==P.inArray(!1,l))n._bLoadingState=!1;else{l=n.iStateDuration;if(0<l&&t.time<+new Date-1e3*l)n._bLoadingState=!1;else if(t.columns&&o.length!==t.columns.length)n._bLoadingState=!1;else{if(n.oLoadedState=P.extend(!0,{},t),t.length!==H&&(i?i.page.len(t.length):n._iDisplayLength=t.length),t.start!==H&&(null===i?(n._iDisplayStart=t.start,n.iInitDisplayStart=t.start):Yt(n,t.start/n._iDisplayLength)),t.order!==H&&(n.aaSorting=[],P.each(t.order,function(t,e){n.aaSorting.push(e[0]>=o.length?[0,e[1]]:e)})),t.search!==H&&P.extend(n.oPreviousSearch,Bt(t.search)),t.columns){for(a=0,r=t.columns.length;a<r;a++){var s=t.columns[a];s.visible!==H&&(i?i.column(a).visible(s.visible,!1):o[a].bVisible=s.visible),s.search!==H&&P.extend(n.aoPreSearchCols[a],Bt(s.search))}i&&i.columns.adjust()}n._bLoadingState=!1,R(n,"aoStateLoaded","stateLoaded",[n,t])}}}else n._bLoadingState=!1;e()}function ge(t){var e=w.settings,t=P.inArray(t,N(e,"nTable"));return-1!==t?e[t]:null}function W(t,e,n,a){if(n="DataTables warning: "+(t?"table id="+t.sTableId+" - ":"")+n,a&&(n+=". For more information about this error, please see https://datatables.net/tn/"+a),e)j.console&&console.log&&console.log(n);else{e=w.ext,e=e.sErrMode||e.errMode;if(t&&R(t,null,"error",[t,a,n]),"alert"==e)alert(n);else{if("throw"==e)throw new Error(n);"function"==typeof e&&e(t,a,n)}}}function F(n,a,t,e){Array.isArray(t)?P.each(t,function(t,e){Array.isArray(e)?F(n,a,e[0],e[1]):F(n,a,e)}):(e===H&&(e=t),a[t]!==H&&(n[e]=a[t]))}function be(t,e,n){var a,r;for(r in e)e.hasOwnProperty(r)&&(a=e[r],P.isPlainObject(a)?(P.isPlainObject(t[r])||(t[r]={}),P.extend(!0,t[r],a)):n&&"data"!==r&&"aaData"!==r&&Array.isArray(a)?t[r]=a.slice():t[r]=a);return t}function me(e,t,n){P(e).on("click.DT",t,function(t){P(e).trigger("blur"),n(t)}).on("keypress.DT",t,function(t){13===t.which&&(t.preventDefault(),n(t))}).on("selectstart.DT",function(){return!1})}function L(t,e,n,a){n&&t[e].push({fn:n,sName:a})}function R(n,t,e,a){var r=[];return t&&(r=P.map(n[t].slice().reverse(),function(t,e){return t.fn.apply(n.oInstance,a)})),null!==e&&(t=P.Event(e+".dt"),(e=P(n.nTable)).trigger(t,a),0===e.parents("body").length&&P("body").trigger(t,a),r.push(t.result)),r}function Se(t){var e=t._iDisplayStart,n=t.fnDisplayEnd(),a=t._iDisplayLength;n<=e&&(e=n-a),e-=e%a,t._iDisplayStart=e=-1===a||e<0?0:e}function ve(t,e){var t=t.renderer,n=w.ext.renderer[e];return P.isPlainObject(t)&&t[e]?n[t[e]]||n._:"string"==typeof t&&n[t]||n._}function E(t){return t.oFeatures.bServerSide?"ssp":t.ajax||t.sAjaxSource?"ajax":"dom"}function ye(t,n){var a;return Array.isArray(t)?P.map(t,function(t){return ye(t,n)}):"number"==typeof t?[n[t]]:(a=P.map(n,function(t,e){return t.nTable}),P(a).filter(t).map(function(t){var e=P.inArray(this,a);return n[e]}).toArray())}function De(r,o,t){var e,n;t&&(e=new B(r)).one("draw",function(){t(e.ajax.json())}),"ssp"==E(r)?u(r,o):(D(r,!0),(n=r.jqXHR)&&4!==n.readyState&&n.abort(),Tt(r,[],function(t){pt(r);for(var e=Ft(r,t),n=0,a=e.length;n<a;n++)x(r,e[n]);u(r,o),D(r,!1)}))}function _e(t,e,n,a,r){for(var o,i,l,s,u=[],c=typeof e,f=0,d=(e=e&&"string"!=c&&"function"!=c&&e.length!==H?e:[e]).length;f<d;f++)for(l=0,s=(i=e[f]&&e[f].split&&!e[f].match(/[\[\(:]/)?e[f].split(","):[e[f]]).length;l<s;l++)(o=n("string"==typeof i[l]?i[l].trim():i[l]))&&o.length&&(u=u.concat(o));var h=p.selector[t];if(h.length)for(f=0,d=h.length;f<d;f++)u=h[f](a,r,u);return z(u)}function we(t){return(t=t||{}).filter&&t.search===H&&(t.search=t.filter),P.extend({search:"none",order:"current",page:"all"},t)}function Ce(t){for(var e=0,n=t.length;e<n;e++)if(0<t[e].length)return t[0]=t[e],t[0].length=1,t.length=1,t.context=[t.context[e]],t;return t.length=0,t}function Te(o,t,e,n){function i(t,e){var n;if(Array.isArray(t)||t instanceof P)for(var a=0,r=t.length;a<r;a++)i(t[a],e);else t.nodeName&&"tr"===t.nodeName.toLowerCase()?l.push(t):(n=P("<tr><td></td></tr>").addClass(e),P("td",n).addClass(e).html(t)[0].colSpan=T(o),l.push(n[0]))}var l=[];i(e,n),t._details&&t._details.detach(),t._details=P(l),t._detailsShow&&t._details.insertAfter(t.nTr)}function xe(t,e){var n=t.context;if(n.length&&t.length){var a=n[0].aoData[t[0]];if(a._details){(a._detailsShow=e)?(a._details.insertAfter(a.nTr),P(a.nTr).addClass("dt-hasChild")):(a._details.detach(),P(a.nTr).removeClass("dt-hasChild")),R(n[0],null,"childRow",[e,t.row(t[0])]);var s=n[0],r=new B(s),a=".dt.DT_details",e="draw"+a,t="column-sizing"+a,a="destroy"+a,u=s.aoData;if(r.off(e+" "+t+" "+a),N(u,"_details").length>0){r.on(e,function(t,e){if(s!==e)return;r.rows({page:"current"}).eq(0).each(function(t){var e=u[t];if(e._detailsShow)e._details.insertAfter(e.nTr)})});r.on(t,function(t,e,n,a){if(s!==e)return;var r,o=T(e);for(var i=0,l=u.length;i<l;i++){r=u[i];if(r._details)r._details.each(function(){var t=P(this).children("td");if(t.length==1)t.attr("colspan",o)})}});r.on(a,function(t,e){if(s!==e)return;for(var n=0,a=u.length;n<a;n++)if(u[n]._details)Re(r,n)})}Le(n)}}}function Ae(t,e,n,a,r){for(var o=[],i=0,l=r.length;i<l;i++)o.push(S(t,r[i],e));return o}var Ie=[],o=Array.prototype,B=function(t,e){if(!(this instanceof B))return new B(t,e);function n(t){var e,n,a,r;t=t,a=w.settings,r=P.map(a,function(t,e){return t.nTable}),(t=t?t.nTable&&t.oApi?[t]:t.nodeName&&"table"===t.nodeName.toLowerCase()?-1!==(e=P.inArray(t,r))?[a[e]]:null:t&&"function"==typeof t.settings?t.settings().toArray():("string"==typeof t?n=P(t):t instanceof P&&(n=t),n?n.map(function(t){return-1!==(e=P.inArray(this,r))?a[e]:null}).toArray():void 0):[])&&o.push.apply(o,t)}var o=[];if(Array.isArray(t))for(var a=0,r=t.length;a<r;a++)n(t[a]);else n(t);this.context=z(o),e&&P.merge(this,e),this.selector={rows:null,cols:null,opts:null},B.extend(this,this,Ie)},Fe=(w.Api=B,P.extend(B.prototype,{any:function(){return 0!==this.count()},concat:o.concat,context:[],count:function(){return this.flatten().length},each:function(t){for(var e=0,n=this.length;e<n;e++)t.call(this,this[e],e,this);return this},eq:function(t){var e=this.context;return e.length>t?new B(e[t],this[t]):null},filter:function(t){var e=[];if(o.filter)e=o.filter.call(this,t,this);else for(var n=0,a=this.length;n<a;n++)t.call(this,this[n],n,this)&&e.push(this[n]);return new B(this.context,e)},flatten:function(){var t=[];return new B(this.context,t.concat.apply(t,this.toArray()))},join:o.join,indexOf:o.indexOf||function(t,e){for(var n=e||0,a=this.length;n<a;n++)if(this[n]===t)return n;return-1},iterator:function(t,e,n,a){var r,o,i,l,s,u,c,f,d=[],h=this.context,p=this.selector;for("string"==typeof t&&(a=n,n=e,e=t,t=!1),o=0,i=h.length;o<i;o++){var g=new B(h[o]);if("table"===e)(r=n.call(g,h[o],o))!==H&&d.push(r);else if("columns"===e||"rows"===e)(r=n.call(g,h[o],this[o],o))!==H&&d.push(r);else if("column"===e||"column-rows"===e||"row"===e||"cell"===e)for(c=this[o],"column-rows"===e&&(u=Fe(h[o],p.opts)),l=0,s=c.length;l<s;l++)f=c[l],(r="cell"===e?n.call(g,h[o],f.row,f.column,o,l):n.call(g,h[o],f,o,l,u))!==H&&d.push(r)}return d.length||a?((t=(a=new B(h,t?d.concat.apply([],d):d)).selector).rows=p.rows,t.cols=p.cols,t.opts=p.opts,a):this},lastIndexOf:o.lastIndexOf||function(t,e){return this.indexOf.apply(this.toArray.reverse(),arguments)},length:0,map:function(t){var e=[];if(o.map)e=o.map.call(this,t,this);else for(var n=0,a=this.length;n<a;n++)e.push(t.call(this,this[n],n));return new B(this.context,e)},pluck:function(t){var e=w.util.get(t);return this.map(function(t){return e(t)})},pop:o.pop,push:o.push,reduce:o.reduce||function(t,e){return et(this,t,e,0,this.length,1)},reduceRight:o.reduceRight||function(t,e){return et(this,t,e,this.length-1,-1,-1)},reverse:o.reverse,selector:null,shift:o.shift,slice:function(){return new B(this.context,this)},sort:o.sort,splice:o.splice,toArray:function(){return o.slice.call(this)},to$:function(){return P(this)},toJQuery:function(){return P(this)},unique:function(){return new B(this.context,z(this))},unshift:o.unshift}),B.extend=function(t,e,n){if(n.length&&e&&(e instanceof B||e.__dt_wrapper))for(var a,r=0,o=n.length;r<o;r++)e[(a=n[r]).name]="function"===a.type?function(e,n,a){return function(){var t=n.apply(e,arguments);return B.extend(t,t,a.methodExt),t}}(t,a.val,a):"object"===a.type?{}:a.val,e[a.name].__dt_wrapper=!0,B.extend(t,e[a.name],a.propExt)},B.register=e=function(t,e){if(Array.isArray(t))for(var n=0,a=t.length;n<a;n++)B.register(t[n],e);else for(var r=t.split("."),o=Ie,i=0,l=r.length;i<l;i++){var s,u,c=function(t,e){for(var n=0,a=t.length;n<a;n++)if(t[n].name===e)return t[n];return null}(o,u=(s=-1!==r[i].indexOf("()"))?r[i].replace("()",""):r[i]);c||o.push(c={name:u,val:{},methodExt:[],propExt:[],type:"object"}),i===l-1?(c.val=e,c.type="function"==typeof e?"function":P.isPlainObject(e)?"object":"other"):o=s?c.methodExt:c.propExt}},B.registerPlural=t=function(t,e,n){B.register(t,n),B.register(e,function(){var t=n.apply(this,arguments);return t===this?this:t instanceof B?t.length?Array.isArray(t[0])?new B(t.context,t[0]):t[0]:H:t})},e("tables()",function(t){return t!==H&&null!==t?new B(ye(t,this.context)):this}),e("table()",function(t){var t=this.tables(t),e=t.context;return e.length?new B(e[0]):t}),t("tables().nodes()","table().node()",function(){return this.iterator("table",function(t){return t.nTable},1)}),t("tables().body()","table().body()",function(){return this.iterator("table",function(t){return t.nTBody},1)}),t("tables().header()","table().header()",function(){return this.iterator("table",function(t){return t.nTHead},1)}),t("tables().footer()","table().footer()",function(){return this.iterator("table",function(t){return t.nTFoot},1)}),t("tables().containers()","table().container()",function(){return this.iterator("table",function(t){return t.nTableWrapper},1)}),e("draw()",function(e){return this.iterator("table",function(t){"page"===e?y(t):u(t,!1===(e="string"==typeof e?"full-hold"!==e:e))})}),e("page()",function(e){return e===H?this.page.info().page:this.iterator("table",function(t){Yt(t,e)})}),e("page.info()",function(t){var e,n,a,r,o;return 0===this.context.length?H:(n=(e=this.context[0])._iDisplayStart,a=e.oFeatures.bPaginate?e._iDisplayLength:-1,r=e.fnRecordsDisplay(),{page:(o=-1===a)?0:Math.floor(n/a),pages:o?1:Math.ceil(r/a),start:n,end:e.fnDisplayEnd(),length:a,recordsTotal:e.fnRecordsTotal(),recordsDisplay:r,serverSide:"ssp"===E(e)})}),e("page.len()",function(e){return e===H?0!==this.context.length?this.context[0]._iDisplayLength:H:this.iterator("table",function(t){$t(t,e)})}),e("ajax.json()",function(){var t=this.context;if(0<t.length)return t[0].json}),e("ajax.params()",function(){var t=this.context;if(0<t.length)return t[0].oAjaxData}),e("ajax.reload()",function(e,n){return this.iterator("table",function(t){De(t,!1===n,e)})}),e("ajax.url()",function(e){var t=this.context;return e===H?0===t.length?H:(t=t[0]).ajax?P.isPlainObject(t.ajax)?t.ajax.url:t.ajax:t.sAjaxSource:this.iterator("table",function(t){P.isPlainObject(t.ajax)?t.ajax.url=e:t.ajax=e})}),e("ajax.url().load()",function(e,n){return this.iterator("table",function(t){De(t,!1===n,e)})}),function(t,e){var n,a=[],r=t.aiDisplay,o=t.aiDisplayMaster,i=e.search,l=e.order,e=e.page;if("ssp"==E(t))return"removed"===i?[]:f(0,o.length);if("current"==e)for(u=t._iDisplayStart,c=t.fnDisplayEnd();u<c;u++)a.push(r[u]);else if("current"==l||"applied"==l){if("none"==i)a=o.slice();else if("applied"==i)a=r.slice();else if("removed"==i){for(var s={},u=0,c=r.length;u<c;u++)s[r[u]]=null;a=P.map(o,function(t){return s.hasOwnProperty(t)?null:t})}}else if("index"==l||"original"==l)for(u=0,c=t.aoData.length;u<c;u++)("none"==i||-1===(n=P.inArray(u,r))&&"removed"==i||0<=n&&"applied"==i)&&a.push(u);return a}),Le=(e("rows()",function(e,n){e===H?e="":P.isPlainObject(e)&&(n=e,e=""),n=we(n);var t=this.iterator("table",function(t){return _e("row",e,function(n){var t=d(n),a=r.aoData;if(null!==t&&!o)return[t];if(i=i||Fe(r,o),null!==t&&-1!==P.inArray(t,i))return[t];if(null===n||n===H||""===n)return i;if("function"==typeof n)return P.map(i,function(t){var e=a[t];return n(t,e._aData,e.nTr)?t:null});if(n.nodeName)return t=n._DT_RowIndex,e=n._DT_CellIndex,t!==H?a[t]&&a[t].nTr===n?[t]:[]:e?a[e.row]&&a[e.row].nTr===n.parentNode?[e.row]:[]:(t=P(n).closest("*[data-dt-row]")).length?[t.data("dt-row")]:[];if("string"==typeof n&&"#"===n.charAt(0)){var e=r.aIds[n.replace(/^#/,"")];if(e!==H)return[e.idx]}t=_(m(r.aoData,i,"nTr"));return P(t).filter(n).map(function(){return this._DT_RowIndex}).toArray()},r=t,o=n);var r,o,i},1);return t.selector.rows=e,t.selector.opts=n,t}),e("rows().nodes()",function(){return this.iterator("row",function(t,e){return t.aoData[e].nTr||H},1)}),e("rows().data()",function(){return this.iterator(!0,"rows",function(t,e){return m(t.aoData,e,"_aData")},1)}),t("rows().cache()","row().cache()",function(n){return this.iterator("row",function(t,e){t=t.aoData[e];return"search"===n?t._aFilterData:t._aSortData},1)}),t("rows().invalidate()","row().invalidate()",function(n){return this.iterator("row",function(t,e){bt(t,e,n)})}),t("rows().indexes()","row().index()",function(){return this.iterator("row",function(t,e){return e},1)}),t("rows().ids()","row().id()",function(t){for(var e=[],n=this.context,a=0,r=n.length;a<r;a++)for(var o=0,i=this[a].length;o<i;o++){var l=n[a].rowIdFn(n[a].aoData[this[a][o]]._aData);e.push((!0===t?"#":"")+l)}return new B(n,e)}),t("rows().remove()","row().remove()",function(){var f=this;return this.iterator("row",function(t,e,n){var a,r,o,i,l,s,u=t.aoData,c=u[e];for(u.splice(e,1),a=0,r=u.length;a<r;a++)if(s=(l=u[a]).anCells,null!==l.nTr&&(l.nTr._DT_RowIndex=a),null!==s)for(o=0,i=s.length;o<i;o++)s[o]._DT_CellIndex.row=a;gt(t.aiDisplayMaster,e),gt(t.aiDisplay,e),gt(f[n],e,!1),0<t._iRecordsDisplay&&t._iRecordsDisplay--,Se(t);n=t.rowIdFn(c._aData);n!==H&&delete t.aIds[n]}),this.iterator("table",function(t){for(var e=0,n=t.aoData.length;e<n;e++)t.aoData[e].idx=e}),this}),e("rows.add()",function(o){var t=this.iterator("table",function(t){for(var e,n=[],a=0,r=o.length;a<r;a++)(e=o[a]).nodeName&&"TR"===e.nodeName.toUpperCase()?n.push(ut(t,e)[0]):n.push(x(t,e));return n},1),e=this.rows(-1);return e.pop(),P.merge(e,t),e}),e("row()",function(t,e){return Ce(this.rows(t,e))}),e("row().data()",function(t){var e,n=this.context;return t===H?n.length&&this.length?n[0].aoData[this[0]]._aData:H:((e=n[0].aoData[this[0]])._aData=t,Array.isArray(t)&&e.nTr&&e.nTr.id&&b(n[0].rowId)(t,e.nTr.id),bt(n[0],this[0],"data"),this)}),e("row().node()",function(){var t=this.context;return t.length&&this.length&&t[0].aoData[this[0]].nTr||null}),e("row.add()",function(e){e instanceof P&&e.length&&(e=e[0]);var t=this.iterator("table",function(t){return e.nodeName&&"TR"===e.nodeName.toUpperCase()?ut(t,e)[0]:x(t,e)});return this.row(t[0])}),P(v).on("plugin-init.dt",function(t,e){var n=new B(e),a="on-plugin-init",r="stateSaveParams."+a,o="destroy. "+a,a=(n.on(r,function(t,e,n){for(var a=e.rowIdFn,r=e.aoData,o=[],i=0;i<r.length;i++)r[i]._detailsShow&&o.push("#"+a(r[i]._aData));n.childRows=o}),n.on(o,function(){n.off(r+" "+o)}),n.state.loaded());a&&a.childRows&&n.rows(P.map(a.childRows,function(t){return t.replace(/:/g,"\\:")})).every(function(){R(e,null,"requestChild",[this])})}),w.util.throttle(function(t){de(t[0])},500)),Re=function(t,e){var n=t.context;n.length&&(e=n[0].aoData[e!==H?e:t[0]])&&e._details&&(e._details.remove(),e._detailsShow=H,e._details=H,P(e.nTr).removeClass("dt-hasChild"),Le(n))},Pe="row().child",je=Pe+"()",He=(e(je,function(t,e){var n=this.context;return t===H?n.length&&this.length?n[0].aoData[this[0]]._details:H:(!0===t?this.child.show():!1===t?Re(this):n.length&&this.length&&Te(n[0],n[0].aoData[this[0]],t,e),this)}),e([Pe+".show()",je+".show()"],function(t){return xe(this,!0),this}),e([Pe+".hide()",je+".hide()"],function(){return xe(this,!1),this}),e([Pe+".remove()",je+".remove()"],function(){return Re(this),this}),e(Pe+".isShown()",function(){var t=this.context;return t.length&&this.length&&t[0].aoData[this[0]]._detailsShow||!1}),/^([^:]+):(name|visIdx|visible)$/),Ne=(e("columns()",function(n,a){n===H?n="":P.isPlainObject(n)&&(a=n,n=""),a=we(a);var t=this.iterator("table",function(t){return e=n,l=a,s=(i=t).aoColumns,u=N(s,"sName"),c=N(s,"nTh"),_e("column",e,function(n){var a,t=d(n);if(""===n)return f(s.length);if(null!==t)return[0<=t?t:s.length+t];if("function"==typeof n)return a=Fe(i,l),P.map(s,function(t,e){return n(e,Ae(i,e,0,0,a),c[e])?e:null});var r="string"==typeof n?n.match(He):"";if(r)switch(r[2]){case"visIdx":case"visible":var e,o=parseInt(r[1],10);return o<0?[(e=P.map(s,function(t,e){return t.bVisible?e:null}))[e.length+o]]:[rt(i,o)];case"name":return P.map(u,function(t,e){return t===r[1]?e:null});default:return[]}return n.nodeName&&n._DT_CellIndex?[n._DT_CellIndex.column]:(t=P(c).filter(n).map(function(){return P.inArray(this,c)}).toArray()).length||!n.nodeName?t:(t=P(n).closest("*[data-dt-column]")).length?[t.data("dt-column")]:[]},i,l);var i,e,l,s,u,c},1);return t.selector.cols=n,t.selector.opts=a,t}),t("columns().header()","column().header()",function(t,e){return this.iterator("column",function(t,e){return t.aoColumns[e].nTh},1)}),t("columns().footer()","column().footer()",function(t,e){return this.iterator("column",function(t,e){return t.aoColumns[e].nTf},1)}),t("columns().data()","column().data()",function(){return this.iterator("column-rows",Ae,1)}),t("columns().dataSrc()","column().dataSrc()",function(){return this.iterator("column",function(t,e){return t.aoColumns[e].mData},1)}),t("columns().cache()","column().cache()",function(o){return this.iterator("column-rows",function(t,e,n,a,r){return m(t.aoData,r,"search"===o?"_aFilterData":"_aSortData",e)},1)}),t("columns().nodes()","column().nodes()",function(){return this.iterator("column-rows",function(t,e,n,a,r){return m(t.aoData,r,"anCells",e)},1)}),t("columns().visible()","column().visible()",function(f,n){var e=this,t=this.iterator("column",function(t,e){if(f===H)return t.aoColumns[e].bVisible;var n,a,r=e,e=f,o=t.aoColumns,i=o[r],l=t.aoData;if(e===H)i.bVisible;else if(i.bVisible!==e){if(e)for(var s=P.inArray(!0,N(o,"bVisible"),r+1),u=0,c=l.length;u<c;u++)a=l[u].nTr,n=l[u].anCells,a&&a.insertBefore(n[r],n[s]||null);else P(N(t.aoData,"anCells",r)).detach();i.bVisible=e}});return f!==H&&this.iterator("table",function(t){Dt(t,t.aoHeader),Dt(t,t.aoFooter),t.aiDisplay.length||P(t.nTBody).find("td[colspan]").attr("colspan",T(t)),de(t),e.iterator("column",function(t,e){R(t,null,"column-visibility",[t,e,f,n])}),n!==H&&!n||e.columns.adjust()}),t}),t("columns().indexes()","column().index()",function(n){return this.iterator("column",function(t,e){return"visible"===n?ot(t,e):e},1)}),e("columns.adjust()",function(){return this.iterator("table",function(t){O(t)},1)}),e("column.index()",function(t,e){var n;if(0!==this.context.length)return n=this.context[0],"fromVisible"===t||"toData"===t?rt(n,e):"fromData"===t||"toVisible"===t?ot(n,e):void 0}),e("column()",function(t,e){return Ce(this.columns(t,e))}),e("cells()",function(g,t,b){var a,r,o,i,l,s,e;return P.isPlainObject(g)&&(g.row===H?(b=g,g=null):(b=t,t=null)),P.isPlainObject(t)&&(b=t,t=null),null===t||t===H?this.iterator("table",function(t){return a=t,t=g,e=we(b),f=a.aoData,d=Fe(a,e),n=_(m(f,d,"anCells")),h=P(Y([],n)),p=a.aoColumns.length,_e("cell",t,function(t){var e,n="function"==typeof t;if(null===t||t===H||n){for(o=[],i=0,l=d.length;i<l;i++)for(r=d[i],s=0;s<p;s++)u={row:r,column:s},(!n||(c=f[r],t(u,S(a,r,s),c.anCells?c.anCells[s]:null)))&&o.push(u);return o}return P.isPlainObject(t)?t.column!==H&&t.row!==H&&-1!==P.inArray(t.row,d)?[t]:[]:(e=h.filter(t).map(function(t,e){return{row:e._DT_CellIndex.row,column:e._DT_CellIndex.column}}).toArray()).length||!t.nodeName?e:(c=P(t).closest("*[data-dt-row]")).length?[{row:c.data("dt-row"),column:c.data("dt-column")}]:[]},a,e);var a,e,r,o,i,l,s,u,c,f,d,n,h,p}):(e=b?{page:b.page,order:b.order,search:b.search}:{},a=this.columns(t,e),r=this.rows(g,e),e=this.iterator("table",function(t,e){var n=[];for(o=0,i=r[e].length;o<i;o++)for(l=0,s=a[e].length;l<s;l++)n.push({row:r[e][o],column:a[e][l]});return n},1),e=b&&b.selected?this.cells(e,b):e,P.extend(e.selector,{cols:t,rows:g,opts:b}),e)}),t("cells().nodes()","cell().node()",function(){return this.iterator("cell",function(t,e,n){t=t.aoData[e];return t&&t.anCells?t.anCells[n]:H},1)}),e("cells().data()",function(){return this.iterator("cell",function(t,e,n){return S(t,e,n)},1)}),t("cells().cache()","cell().cache()",function(a){return a="search"===a?"_aFilterData":"_aSortData",this.iterator("cell",function(t,e,n){return t.aoData[e][a][n]},1)}),t("cells().render()","cell().render()",function(a){return this.iterator("cell",function(t,e,n){return S(t,e,n,a)},1)}),t("cells().indexes()","cell().index()",function(){return this.iterator("cell",function(t,e,n){return{row:e,column:n,columnVisible:ot(t,n)}},1)}),t("cells().invalidate()","cell().invalidate()",function(a){return this.iterator("cell",function(t,e,n){bt(t,e,a,n)})}),e("cell()",function(t,e,n){return Ce(this.cells(t,e,n))}),e("cell().data()",function(t){var e=this.context,n=this[0];return t===H?e.length&&n.length?S(e[0],n[0].row,n[0].column):H:(ct(e[0],n[0].row,n[0].column,t),bt(e[0],n[0].row,"data",n[0].column),this)}),e("order()",function(e,t){var n=this.context;return e===H?0!==n.length?n[0].aaSorting:H:("number"==typeof e?e=[[e,t]]:e.length&&!Array.isArray(e[0])&&(e=Array.prototype.slice.call(arguments)),this.iterator("table",function(t){t.aaSorting=e.slice()}))}),e("order.listener()",function(e,n,a){return this.iterator("table",function(t){ue(t,e,n,a)})}),e("order.fixed()",function(e){var t;return e?this.iterator("table",function(t){t.aaSortingFixed=P.extend(!0,{},e)}):(t=(t=this.context).length?t[0].aaSortingFixed:H,Array.isArray(t)?{pre:t}:t)}),e(["columns().order()","column().order()"],function(a){var r=this;return this.iterator("table",function(t,e){var n=[];P.each(r[e],function(t,e){n.push([e,a])}),t.aaSorting=n})}),e("search()",function(e,n,a,r){var t=this.context;return e===H?0!==t.length?t[0].oPreviousSearch.sSearch:H:this.iterator("table",function(t){t.oFeatures.bFilter&&Rt(t,P.extend({},t.oPreviousSearch,{sSearch:e+"",bRegex:null!==n&&n,bSmart:null===a||a,bCaseInsensitive:null===r||r}),1)})}),t("columns().search()","column().search()",function(a,r,o,i){return this.iterator("column",function(t,e){var n=t.aoPreSearchCols;if(a===H)return n[e].sSearch;t.oFeatures.bFilter&&(P.extend(n[e],{sSearch:a+"",bRegex:null!==r&&r,bSmart:null===o||o,bCaseInsensitive:null===i||i}),Rt(t,t.oPreviousSearch,1))})}),e("state()",function(){return this.context.length?this.context[0].oSavedState:null}),e("state.clear()",function(){return this.iterator("table",function(t){t.fnStateSaveCallback.call(t.oInstance,t,{})})}),e("state.loaded()",function(){return this.context.length?this.context[0].oLoadedState:null}),e("state.save()",function(){return this.iterator("table",function(t){de(t)})}),w.use=function(t,e){"lib"===e||t.fn?P=t:"win"==e||t.document?v=(j=t).document:"datetime"!==e&&"DateTime"!==t.type||(w.DateTime=t)},w.factory=function(t,e){var n=!1;return t&&t.document&&(v=(j=t).document),e&&e.fn&&e.fn.jquery&&(P=e,n=!0),n},w.versionCheck=w.fnVersionCheck=function(t){for(var e,n,a=w.version.split("."),r=t.split("."),o=0,i=r.length;o<i;o++)if((e=parseInt(a[o],10)||0)!==(n=parseInt(r[o],10)||0))return n<e;return!0},w.isDataTable=w.fnIsDataTable=function(t){var r=P(t).get(0),o=!1;return t instanceof w.Api||(P.each(w.settings,function(t,e){var n=e.nScrollHead?P("table",e.nScrollHead)[0]:null,a=e.nScrollFoot?P("table",e.nScrollFoot)[0]:null;e.nTable!==r&&n!==r&&a!==r||(o=!0)}),o)},w.tables=w.fnTables=function(e){var t=!1,n=(P.isPlainObject(e)&&(t=e.api,e=e.visible),P.map(w.settings,function(t){if(!e||P(t.nTable).is(":visible"))return t.nTable}));return t?new B(n):n},w.camelToHungarian=C,e("$()",function(t,e){e=this.rows(e).nodes(),e=P(e);return P([].concat(e.filter(t).toArray(),e.find(t).toArray()))}),P.each(["on","one","off"],function(t,n){e(n+"()",function(){var t=Array.prototype.slice.call(arguments),e=(t[0]=P.map(t[0].split(/\s/),function(t){return t.match(/\.dt\b/)?t:t+".dt"}).join(" "),P(this.tables().nodes()));return e[n].apply(e,t),this})}),e("clear()",function(){return this.iterator("table",function(t){pt(t)})}),e("settings()",function(){return new B(this.context,this.context)}),e("init()",function(){var t=this.context;return t.length?t[0].oInit:null}),e("data()",function(){return this.iterator("table",function(t){return N(t.aoData,"_aData")}).flatten()}),e("destroy()",function(c){return c=c||!1,this.iterator("table",function(e){var n,t=e.oClasses,a=e.nTable,r=e.nTBody,o=e.nTHead,i=e.nTFoot,l=P(a),r=P(r),s=P(e.nTableWrapper),u=P.map(e.aoData,function(t){return t.nTr}),i=(e.bDestroying=!0,R(e,"aoDestroyCallback","destroy",[e]),c||new B(e).columns().visible(!0),s.off(".DT").find(":not(tbody *)").off(".DT"),P(j).off(".DT-"+e.sInstance),a!=o.parentNode&&(l.children("thead").detach(),l.append(o)),i&&a!=i.parentNode&&(l.children("tfoot").detach(),l.append(i)),e.aaSorting=[],e.aaSortingFixed=[],ce(e),P(u).removeClass(e.asStripeClasses.join(" ")),P("th, td",o).removeClass(t.sSortable+" "+t.sSortableAsc+" "+t.sSortableDesc+" "+t.sSortableNone),r.children().detach(),r.append(u),e.nTableWrapper.parentNode),o=c?"remove":"detach",u=(l[o](),s[o](),!c&&i&&(i.insertBefore(a,e.nTableReinsertBefore),l.css("width",e.sDestroyWidth).removeClass(t.sTable),n=e.asDestroyStripes.length)&&r.children().each(function(t){P(this).addClass(e.asDestroyStripes[t%n])}),P.inArray(e,w.settings));-1!==u&&w.settings.splice(u,1)})}),P.each(["column","row","cell"],function(t,s){e(s+"s().every()",function(o){var i=this.selector.opts,l=this;return this.iterator(s,function(t,e,n,a,r){o.call(l[s](e,"cell"===s?n:i,"cell"===s?i:H),e,n,a,r)})})}),e("i18n()",function(t,e,n){var a=this.context[0],t=A(t)(a.oLanguage);return t===H&&(t=e),"string"==typeof(t=n!==H&&P.isPlainObject(t)?t[n]!==H?t[n]:t._:t)?t.replace("%d",n):t}),w.version="1.13.8",w.settings=[],w.models={},w.models.oSearch={bCaseInsensitive:!0,sSearch:"",bRegex:!1,bSmart:!0,return:!1},w.models.oRow={nTr:null,anCells:null,_aData:[],_aSortData:null,_aFilterData:null,_sFilterRow:null,_sRowStripe:"",src:null,idx:-1},w.models.oColumn={idx:null,aDataSort:null,asSorting:null,bSearchable:null,bSortable:null,bVisible:null,_sManualType:null,_bAttrSrc:!1,fnCreatedCell:null,fnGetData:null,fnSetData:null,mData:null,mRender:null,nTh:null,nTf:null,sClass:null,sContentPadding:null,sDefaultContent:null,sName:null,sSortDataType:"std",sSortingClass:null,sSortingClassJUI:null,sTitle:null,sType:null,sWidth:null,sWidthOrig:null},w.defaults={aaData:null,aaSorting:[[0,"asc"]],aaSortingFixed:[],ajax:null,aLengthMenu:[10,25,50,100],aoColumns:null,aoColumnDefs:null,aoSearchCols:[],asStripeClasses:null,bAutoWidth:!0,bDeferRender:!1,bDestroy:!1,bFilter:!0,bInfo:!0,bLengthChange:!0,bPaginate:!0,bProcessing:!1,bRetrieve:!1,bScrollCollapse:!1,bServerSide:!1,bSort:!0,bSortMulti:!0,bSortCellsTop:!1,bSortClasses:!0,bStateSave:!1,fnCreatedRow:null,fnDrawCallback:null,fnFooterCallback:null,fnFormatNumber:function(t){return t.toString().replace(/\B(?=(\d{3})+(?!\d))/g,this.oLanguage.sThousands)},fnHeaderCallback:null,fnInfoCallback:null,fnInitComplete:null,fnPreDrawCallback:null,fnRowCallback:null,fnServerData:null,fnServerParams:null,fnStateLoadCallback:function(t){try{return JSON.parse((-1===t.iStateDuration?sessionStorage:localStorage).getItem("DataTables_"+t.sInstance+"_"+location.pathname))}catch(t){return{}}},fnStateLoadParams:null,fnStateLoaded:null,fnStateSaveCallback:function(t,e){try{(-1===t.iStateDuration?sessionStorage:localStorage).setItem("DataTables_"+t.sInstance+"_"+location.pathname,JSON.stringify(e))}catch(t){}},fnStateSaveParams:null,iStateDuration:7200,iDeferLoading:null,iDisplayLength:10,iDisplayStart:0,iTabIndex:0,oClasses:{},oLanguage:{oAria:{sSortAscending:": activate to sort column ascending",sSortDescending:": activate to sort column descending"},oPaginate:{sFirst:"First",sLast:"Last",sNext:"Next",sPrevious:"Previous"},sEmptyTable:"No data available in table",sInfo:"Showing _START_ to _END_ of _TOTAL_ entries",sInfoEmpty:"Showing 0 to 0 of 0 entries",sInfoFiltered:"(filtered from _MAX_ total entries)",sInfoPostFix:"",sDecimal:"",sThousands:",",sLengthMenu:"Show _MENU_ entries",sLoadingRecords:"Loading...",sProcessing:"",sSearch:"Search:",sSearchPlaceholder:"",sUrl:"",sZeroRecords:"No matching records found"},oSearch:P.extend({},w.models.oSearch),sAjaxDataProp:"data",sAjaxSource:null,sDom:"lfrtip",searchDelay:null,sPaginationType:"simple_numbers",sScrollX:"",sScrollXInner:"",sScrollY:"",sServerMethod:"GET",renderer:null,rowId:"DT_RowId"},i(w.defaults),w.defaults.column={aDataSort:null,iDataSort:-1,asSorting:["asc","desc"],bSearchable:!0,bSortable:!0,bVisible:!0,fnCreatedCell:null,mData:null,mRender:null,sCellType:"td",sClass:"",sContentPadding:"",sDefaultContent:null,sName:"",sSortDataType:"std",sTitle:null,sType:null,sWidth:null},i(w.defaults.column),w.models.oSettings={oFeatures:{bAutoWidth:null,bDeferRender:null,bFilter:null,bInfo:null,bLengthChange:null,bPaginate:null,bProcessing:null,bServerSide:null,bSort:null,bSortMulti:null,bSortClasses:null,bStateSave:null},oScroll:{bCollapse:null,iBarWidth:0,sX:null,sXInner:null,sY:null},oLanguage:{fnInfoCallback:null},oBrowser:{bScrollOversize:!1,bScrollbarLeft:!1,bBounding:!1,barWidth:0},ajax:null,aanFeatures:[],aoData:[],aiDisplay:[],aiDisplayMaster:[],aIds:{},aoColumns:[],aoHeader:[],aoFooter:[],oPreviousSearch:{},aoPreSearchCols:[],aaSorting:null,aaSortingFixed:[],asStripeClasses:null,asDestroyStripes:[],sDestroyWidth:0,aoRowCallback:[],aoHeaderCallback:[],aoFooterCallback:[],aoDrawCallback:[],aoRowCreatedCallback:[],aoPreDrawCallback:[],aoInitComplete:[],aoStateSaveParams:[],aoStateLoadParams:[],aoStateLoaded:[],sTableId:"",nTable:null,nTHead:null,nTFoot:null,nTBody:null,nTableWrapper:null,bDeferLoading:!1,bInitialised:!1,aoOpenRows:[],sDom:null,searchDelay:null,sPaginationType:"two_button",iStateDuration:0,aoStateSave:[],aoStateLoad:[],oSavedState:null,oLoadedState:null,sAjaxSource:null,sAjaxDataProp:null,jqXHR:null,json:H,oAjaxData:H,fnServerData:null,aoServerParams:[],sServerMethod:null,fnFormatNumber:null,aLengthMenu:null,iDraw:0,bDrawing:!1,iDrawError:-1,_iDisplayLength:10,_iDisplayStart:0,_iRecordsTotal:0,_iRecordsDisplay:0,oClasses:{},bFiltered:!1,bSorted:!1,bSortCellsTop:null,oInit:null,aoDestroyCallback:[],fnRecordsTotal:function(){return"ssp"==E(this)?+this._iRecordsTotal:this.aiDisplayMaster.length},fnRecordsDisplay:function(){return"ssp"==E(this)?+this._iRecordsDisplay:this.aiDisplay.length},fnDisplayEnd:function(){var t=this._iDisplayLength,e=this._iDisplayStart,n=e+t,a=this.aiDisplay.length,r=this.oFeatures,o=r.bPaginate;return r.bServerSide?!1===o||-1===t?e+a:Math.min(e+t,this._iRecordsDisplay):!o||a<n||-1===t?a:n},oInstance:null,sInstance:null,iTabIndex:0,nScrollHead:null,nScrollFoot:null,aLastSort:[],oPlugins:{},rowIdFn:null,rowId:null},w.ext=p={buttons:{},classes:{},builder:"-source-",errMode:"alert",feature:[],search:[],selector:{cell:[],column:[],row:[]},internal:{},legacy:{ajax:null},pager:{},renderer:{pageButton:{},header:{}},order:{},type:{detect:[],search:{},order:{}},_unique:0,fnVersionCheck:w.fnVersionCheck,iApiIndex:0,oJUIClasses:{},sVersion:w.version},P.extend(p,{afnFiltering:p.search,aTypes:p.type.detect,ofnSearch:p.type.search,oSort:p.type.order,afnSortData:p.order,aoFeatures:p.feature,oApi:p.internal,oStdClasses:p.classes,oPagination:p.pager}),P.extend(w.ext.classes,{sTable:"dataTable",sNoFooter:"no-footer",sPageButton:"paginate_button",sPageButtonActive:"current",sPageButtonDisabled:"disabled",sStripeOdd:"odd",sStripeEven:"even",sRowEmpty:"dataTables_empty",sWrapper:"dataTables_wrapper",sFilter:"dataTables_filter",sInfo:"dataTables_info",sPaging:"dataTables_paginate paging_",sLength:"dataTables_length",sProcessing:"dataTables_processing",sSortAsc:"sorting_asc",sSortDesc:"sorting_desc",sSortable:"sorting",sSortableAsc:"sorting_desc_disabled",sSortableDesc:"sorting_asc_disabled",sSortableNone:"sorting_disabled",sSortColumn:"sorting_",sFilterInput:"",sLengthSelect:"",sScrollWrapper:"dataTables_scroll",sScrollHead:"dataTables_scrollHead",sScrollHeadInner:"dataTables_scrollHeadInner",sScrollBody:"dataTables_scrollBody",sScrollFoot:"dataTables_scrollFoot",sScrollFootInner:"dataTables_scrollFootInner",sHeaderTH:"",sFooterTH:"",sSortJUIAsc:"",sSortJUIDesc:"",sSortJUI:"",sSortJUIAscAllowed:"",sSortJUIDescAllowed:"",sSortJUIWrapper:"",sSortIcon:"",sJUIHeader:"",sJUIFooter:""}),w.ext.pager);function Oe(t,e){var n=[],a=Ne.numbers_length,r=Math.floor(a/2);return e<=a?n=f(0,e):t<=r?((n=f(0,a-2)).push("ellipsis"),n.push(e-1)):((e-1-r<=t?n=f(e-(a-2),e):((n=f(t-r+2,t+r-1)).push("ellipsis"),n.push(e-1),n)).splice(0,0,"ellipsis"),n.splice(0,0,0)),n.DT_el="span",n}P.extend(Ne,{simple:function(t,e){return["previous","next"]},full:function(t,e){return["first","previous","next","last"]},numbers:function(t,e){return[Oe(t,e)]},simple_numbers:function(t,e){return["previous",Oe(t,e),"next"]},full_numbers:function(t,e){return["first","previous",Oe(t,e),"next","last"]},first_last_numbers:function(t,e){return["first",Oe(t,e),"last"]},_numbers:Oe,numbers_length:7}),P.extend(!0,w.ext.renderer,{pageButton:{_:function(u,t,c,e,f,d){function h(t,e){for(var n,a=b.sPageButtonDisabled,r=function(t){Yt(u,t.data.action,!0)},o=0,i=e.length;o<i;o++)if(n=e[o],Array.isArray(n)){var l=P("<"+(n.DT_el||"div")+"/>").appendTo(t);h(l,n)}else{var s=!1;switch(p=null,g=n){case"ellipsis":t.append('<span class="ellipsis">&#x2026;</span>');break;case"first":p=m.sFirst,0===f&&(s=!0);break;case"previous":p=m.sPrevious,0===f&&(s=!0);break;case"next":p=m.sNext,0!==d&&f!==d-1||(s=!0);break;case"last":p=m.sLast,0!==d&&f!==d-1||(s=!0);break;default:p=u.fnFormatNumber(n+1),g=f===n?b.sPageButtonActive:""}null!==p&&(l=u.oInit.pagingTag||"a",s&&(g+=" "+a),me(P("<"+l+">",{class:b.sPageButton+" "+g,"aria-controls":u.sTableId,"aria-disabled":s?"true":null,"aria-label":S[n],role:"link","aria-current":g===b.sPageButtonActive?"page":null,"data-dt-idx":n,tabindex:s?-1:u.iTabIndex,id:0===c&&"string"==typeof n?u.sTableId+"_"+n:null}).html(p).appendTo(t),{action:n},r))}}var p,g,n,b=u.oClasses,m=u.oLanguage.oPaginate,S=u.oLanguage.oAria.paginate||{};try{n=P(t).find(v.activeElement).data("dt-idx")}catch(t){}h(P(t).empty(),e),n!==H&&P(t).find("[data-dt-idx="+n+"]").trigger("focus")}}}),P.extend(w.ext.type.detect,[function(t,e){e=e.oLanguage.sDecimal;return l(t,e)?"num"+e:null},function(t,e){var n;return(!t||t instanceof Date||X.test(t))&&(null!==(n=Date.parse(t))&&!isNaN(n)||h(t))?"date":null},function(t,e){e=e.oLanguage.sDecimal;return l(t,e,!0)?"num-fmt"+e:null},function(t,e){e=e.oLanguage.sDecimal;return a(t,e)?"html-num"+e:null},function(t,e){e=e.oLanguage.sDecimal;return a(t,e,!0)?"html-num-fmt"+e:null},function(t,e){return h(t)||"string"==typeof t&&-1!==t.indexOf("<")?"html":null}]),P.extend(w.ext.type.search,{html:function(t){return h(t)?t:"string"==typeof t?t.replace(U," ").replace(V,""):""},string:function(t){return!h(t)&&"string"==typeof t?t.replace(U," "):t}});function ke(t,e,n,a){var r;return 0===t||t&&"-"!==t?"number"==(r=typeof t)||"bigint"==r?t:+(t=(t=e?$(t,e):t).replace&&(n&&(t=t.replace(n,"")),a)?t.replace(a,""):t):-1/0}function Me(n){P.each({num:function(t){return ke(t,n)},"num-fmt":function(t){return ke(t,n,q)},"html-num":function(t){return ke(t,n,V)},"html-num-fmt":function(t){return ke(t,n,V,q)}},function(t,e){p.type.order[t+n+"-pre"]=e,t.match(/^html\-/)&&(p.type.search[t+n]=p.type.search.html)})}P.extend(p.type.order,{"date-pre":function(t){t=Date.parse(t);return isNaN(t)?-1/0:t},"html-pre":function(t){return h(t)?"":t.replace?t.replace(/<.*?>/g,"").toLowerCase():t+""},"string-pre":function(t){return h(t)?"":"string"==typeof t?t.toLowerCase():t.toString?t.toString():""},"string-asc":function(t,e){return t<e?-1:e<t?1:0},"string-desc":function(t,e){return t<e?1:e<t?-1:0}}),Me(""),P.extend(!0,w.ext.renderer,{header:{_:function(r,o,i,l){P(r.nTable).on("order.dt.DT",function(t,e,n,a){r===e&&(e=i.idx,o.removeClass(l.sSortAsc+" "+l.sSortDesc).addClass("asc"==a[e]?l.sSortAsc:"desc"==a[e]?l.sSortDesc:i.sSortingClass))})},jqueryui:function(r,o,i,l){P("<div/>").addClass(l.sSortJUIWrapper).append(o.contents()).append(P("<span/>").addClass(l.sSortIcon+" "+i.sSortingClassJUI)).appendTo(o),P(r.nTable).on("order.dt.DT",function(t,e,n,a){r===e&&(e=i.idx,o.removeClass(l.sSortAsc+" "+l.sSortDesc).addClass("asc"==a[e]?l.sSortAsc:"desc"==a[e]?l.sSortDesc:i.sSortingClass),o.find("span."+l.sSortIcon).removeClass(l.sSortJUIAsc+" "+l.sSortJUIDesc+" "+l.sSortJUI+" "+l.sSortJUIAscAllowed+" "+l.sSortJUIDescAllowed).addClass("asc"==a[e]?l.sSortJUIAsc:"desc"==a[e]?l.sSortJUIDesc:i.sSortingClassJUI))})}}});function We(t){return"string"==typeof(t=Array.isArray(t)?t.join(","):t)?t.replace(/&/g,"&amp;").replace(/</g,"&lt;").replace(/>/g,"&gt;").replace(/"/g,"&quot;"):t}function Ee(t,e,n,a,r){return j.moment?t[e](r):j.luxon?t[n](r):a?t[a](r):t}var Be=!1;function Ue(t,e,n){var a;if(j.moment){if(!(a=j.moment.utc(t,e,n,!0)).isValid())return null}else if(j.luxon){if(!(a=e&&"string"==typeof t?j.luxon.DateTime.fromFormat(t,e):j.luxon.DateTime.fromISO(t)).isValid)return null;a.setLocale(n)}else e?(Be||alert("DataTables warning: Formatted date without Moment.js or Luxon - https://datatables.net/tn/17"),Be=!0):a=new Date(t);return a}function Ve(s){return function(a,r,o,i){0===arguments.length?(o="en",a=r=null):1===arguments.length?(o="en",r=a,a=null):2===arguments.length&&(o=r,r=a,a=null);var l="datetime-"+r;return w.ext.type.order[l]||(w.ext.type.detect.unshift(function(t){return t===l&&l}),w.ext.type.order[l+"-asc"]=function(t,e){t=t.valueOf(),e=e.valueOf();return t===e?0:t<e?-1:1},w.ext.type.order[l+"-desc"]=function(t,e){t=t.valueOf(),e=e.valueOf();return t===e?0:e<t?-1:1}),function(t,e){var n;return null!==t&&t!==H||(t="--now"===i?(n=new Date,new Date(Date.UTC(n.getFullYear(),n.getMonth(),n.getDate(),n.getHours(),n.getMinutes(),n.getSeconds()))):""),"type"===e?l:""===t?"sort"!==e?"":Ue("0000-01-01 00:00:00",null,o):!(null===r||a!==r||"sort"===e||"type"===e||t instanceof Date)||null===(n=Ue(t,a,o))?t:"sort"===e?n:(t=null===r?Ee(n,"toDate","toJSDate","")[s]():Ee(n,"format","toFormat","toISOString",r),"display"===e?We(t):t)}}}var Xe=",",Je=".";if(j.Intl!==H)try{for(var qe=(new Intl.NumberFormat).formatToParts(100000.1),n=0;n<qe.length;n++)"group"===qe[n].type?Xe=qe[n].value:"decimal"===qe[n].type&&(Je=qe[n].value)}catch(t){}function $e(e){return function(){var t=[ge(this[w.ext.iApiIndex])].concat(Array.prototype.slice.call(arguments));return w.ext.internal[e].apply(this,t)}}return w.datetime=function(n,a){var r="datetime-detect-"+n;a=a||"en",w.ext.type.order[r]||(w.ext.type.detect.unshift(function(t){var e=Ue(t,n,a);return!(""!==t&&!e)&&r}),w.ext.type.order[r+"-pre"]=function(t){return Ue(t,n,a)||0})},w.render={date:Ve("toLocaleDateString"),datetime:Ve("toLocaleString"),time:Ve("toLocaleTimeString"),number:function(a,r,o,i,l){return null!==a&&a!==H||(a=Xe),null!==r&&r!==H||(r=Je),{display:function(t){if("number"!=typeof t&&"string"!=typeof t)return t;if(""===t||null===t)return t;var e=t<0?"-":"",n=parseFloat(t);if(isNaN(n))return We(t);n=n.toFixed(o),t=Math.abs(n);n=parseInt(t,10),t=o?r+(t-n).toFixed(o).substring(2):"";return(e=0===n&&0===parseFloat(t)?"":e)+(i||"")+n.toString().replace(/\B(?=(\d{3})+(?!\d))/g,a)+t+(l||"")}}},text:function(){return{display:We,filter:We}}},P.extend(w.ext.internal,{_fnExternApiFunc:$e,_fnBuildAjax:Tt,_fnAjaxUpdate:xt,_fnAjaxParameters:At,_fnAjaxUpdateDraw:It,_fnAjaxDataSrc:Ft,_fnAddColumn:nt,_fnColumnOptions:at,_fnAdjustColumnSizing:O,_fnVisibleToColumnIndex:rt,_fnColumnIndexToVisible:ot,_fnVisbleColumns:T,_fnGetColumns:it,_fnColumnTypes:lt,_fnApplyColumnDefs:st,_fnHungarianMap:i,_fnCamelToHungarian:C,_fnLanguageCompat:Z,_fnBrowserDetect:tt,_fnAddData:x,_fnAddTr:ut,_fnNodeToDataIndex:function(t,e){return e._DT_RowIndex!==H?e._DT_RowIndex:null},_fnNodeToColumnIndex:function(t,e,n){return P.inArray(n,t.aoData[e].anCells)},_fnGetCellData:S,_fnSetCellData:ct,_fnSplitObjNotation:dt,_fnGetObjectDataFn:A,_fnSetObjectDataFn:b,_fnGetDataMaster:ht,_fnClearTable:pt,_fnDeleteIndex:gt,_fnInvalidate:bt,_fnGetRowElements:mt,_fnCreateTr:St,_fnBuildHead:yt,_fnDrawHead:Dt,_fnDraw:y,_fnReDraw:u,_fnAddOptionsHtml:_t,_fnDetectHeader:wt,_fnGetUniqueThs:Ct,_fnFeatureHtmlFilter:Lt,_fnFilterComplete:Rt,_fnFilterCustom:Pt,_fnFilterColumn:jt,_fnFilter:Ht,_fnFilterCreateSearch:Nt,_fnEscapeRegex:Ot,_fnFilterData:Wt,_fnFeatureHtmlInfo:Ut,_fnUpdateInfo:Vt,_fnInfoMacros:Xt,_fnInitialise:Jt,_fnInitComplete:qt,_fnLengthChange:$t,_fnFeatureHtmlLength:Gt,_fnFeatureHtmlPaginate:zt,_fnPageChange:Yt,_fnFeatureHtmlProcessing:Zt,_fnProcessingDisplay:D,_fnFeatureHtmlTable:Kt,_fnScrollDraw:Qt,_fnApplyToChildren:k,_fnCalculateColumnWidths:ee,_fnThrottle:ne,_fnConvertToWidth:ae,_fnGetWidestNode:re,_fnGetMaxLenString:oe,_fnStringToCss:M,_fnSortFlatten:I,_fnSort:ie,_fnSortAria:le,_fnSortListener:se,_fnSortAttachListener:ue,_fnSortingClasses:ce,_fnSortData:fe,_fnSaveState:de,_fnLoadState:he,_fnImplementState:pe,_fnSettingsFromNode:ge,_fnLog:W,_fnMap:F,_fnBindAction:me,_fnCallbackReg:L,_fnCallbackFire:R,_fnLengthOverflow:Se,_fnRenderer:ve,_fnDataSource:E,_fnRowAttributes:vt,_fnExtend:be,_fnCalculateEnd:function(){}}),((P.fn.dataTable=w).$=P).fn.dataTableSettings=w.settings,P.fn.dataTableExt=w.ext,P.fn.DataTable=function(t){return P(this).dataTable(t).api()},P.each(w,function(t,e){P.fn.DataTable[t]=e}),w}); \ No newline at end of file
diff --git a/lib/toaster/toastergui/static/js/libtoaster.js b/lib/toaster/toastergui/static/js/libtoaster.js
index f2c45c833..d4ac31234 100644
--- a/lib/toaster/toastergui/static/js/libtoaster.js
+++ b/lib/toaster/toastergui/static/js/libtoaster.js
@@ -657,7 +657,7 @@ $(document).ready(function() {
hljs.initHighlightingOnLoad();
// Prevent invalid links from jumping page scroll
- $('a[href=#]').click(function() {
+ $('a[href="#"]').click(function() {
return false;
});
diff --git a/lib/toaster/toastergui/static/js/projectpage.js b/lib/toaster/toastergui/static/js/projectpage.js
index 506471e09..a3c95810a 100644
--- a/lib/toaster/toastergui/static/js/projectpage.js
+++ b/lib/toaster/toastergui/static/js/projectpage.js
@@ -61,7 +61,7 @@ function projectPageInit(ctx) {
distroChangeInput.val(urlParams.setDistro);
distroChangeBtn.click();
} else {
- updateDistroName(prjInfo.distro.name);
+ updateDistroName(prjInfo.distro?.name);
}
/* Now we're really ready show the page */
diff --git a/lib/toaster/toastergui/templates/base.html b/lib/toaster/toastergui/templates/base.html
index 9e19cc33c..e90be6962 100644
--- a/lib/toaster/toastergui/templates/base.html
+++ b/lib/toaster/toastergui/templates/base.html
@@ -14,11 +14,11 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta http-equiv="Content-Type" content="text/html;charset=UTF-8" />
- <script src="{% static 'js/jquery-2.0.3.min.js' %}">
+ <script src="{% static 'js/jquery-3.7.1.min.js' %}">
</script>
<script src="{% static 'js/jquery.cookie.js' %}">
</script>
- <script src="{% static 'js/bootstrap.min.js' %}">
+ <script src="{% static 'js/bootstrap-3.4.1.min.js' %}">
</script>
<script src="{% static 'js/typeahead.jquery.js' %}">
</script>
@@ -94,7 +94,7 @@
</a>
<a class="brand" href="/">Toaster</a>
{% if DEBUG %}
- <span class="glyphicon glyphicon-info-sign" title="<strong>Toaster version information</strong>" data-content="<dl><dt>Git branch</dt><dd>{{TOASTER_BRANCH}}</dd><dt>Git revision</dt><dd>{{TOASTER_REVISION}}</dd></dl>"></i>
+ <span id="toaster-version-info-sign" class="glyphicon glyphicon-info-sign" title="<strong>Toaster version information</strong>" data-content="<dl><dt>Git branch</dt><dd>{{TOASTER_BRANCH}}</dd><dt>Git revision</dt><dd>{{TOASTER_REVISION}}</dd></dl>"></i>
{% endif %}
</div>
</div>
@@ -123,7 +123,7 @@
{% endif %}
{% endif %}
<li id="navbar-docs">
- <a target="_blank" href="https://www.yoctoproject.org/docs/latest/toaster-manual/toaster-manual.html">
+ <a target="_blank" href="http://docs.yoctoproject.org/toaster-manual/index.html#toaster-user-manual">
<i class="glyphicon glyphicon-book"></i>
Documentation
</a>
@@ -132,7 +132,8 @@
{% if project_enable %}
<a class="btn btn-default navbar-btn navbar-right" id="new-project-button" href="{% url 'newproject' %}">New project</a>
{% endif %}
- </div>
+ <a class="btn btn-default navbar-btn navbar-right" id="import_page" style="margin-right: 5px !important" id="import-cmdline-button" href="{% url 'cmdlines' %}">Import command line builds</a>
+ </div>
</div>
</nav>
diff --git a/lib/toaster/toastergui/templates/base_specific.html b/lib/toaster/toastergui/templates/base_specific.html
index e377cadd7..425f7ed73 100644
--- a/lib/toaster/toastergui/templates/base_specific.html
+++ b/lib/toaster/toastergui/templates/base_specific.html
@@ -14,11 +14,11 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta http-equiv="Content-Type" content="text/html;charset=UTF-8" />
- <script src="{% static 'js/jquery-2.0.3.min.js' %}">
+ <script src="{% static 'js/jquery-3.7.1.min.js' %}">
</script>
<script src="{% static 'js/jquery.cookie.js' %}">
</script>
- <script src="{% static 'js/bootstrap.min.js' %}">
+ <script src="{% static 'js/bootstrap-3.4.1.min.js' %}">
</script>
<script src="{% static 'js/typeahead.jquery.js' %}">
</script>
diff --git a/lib/toaster/toastergui/templates/command_line_builds.html b/lib/toaster/toastergui/templates/command_line_builds.html
new file mode 100644
index 000000000..05db6727e
--- /dev/null
+++ b/lib/toaster/toastergui/templates/command_line_builds.html
@@ -0,0 +1,209 @@
+{% extends "base.html" %}
+{% load projecttags %}
+{% load humanize %}
+{% load static %}
+
+{% block title %} Import Builds from eventlogs - Toaster {% endblock %}
+
+{% block pagecontent %}
+
+<div class="container-fluid">
+ <div id="overlay" class="hide">
+ <div class="spinner">
+ <div class="fa-spin">
+ </div>
+ </div>
+ </div>
+ <div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <div class="row">
+ <div class="col-md-6">
+ <h1>Import command line builds</h1>
+ </div>
+ {% if import_all %}
+ <div class="col-md-6">
+ <button id="import_all" type="button" class="btn btn-primary navbar-btn navbar-right">
+ <span class="glyphicon glyphicon-upload" style="vertical-align: top;"></span> Import All
+ </button>
+ </div>
+ {% endif %}
+ </div>
+ </div>
+ {% if messages %}
+ <div class="row-fluid" id="empty-state-{{table_name}}">
+ {% for message in messages %}
+ <div class="alert alert-danger">{{message}}</div>
+ {%endfor%}
+ </div>
+ {% endif %}
+ <div class="row">
+ <h4 style="margin-left: 15px;"><strong>Import eventlog file</strong></h4>
+ <form method="POST" enctype="multipart/form-data" action="{% url 'cmdlines' %}" id="form_file">
+ {% csrf_token %}
+ <div class="col-md-6" style="padding-left: 20px;">
+ <div class="row">
+ <input type="hidden" value="{{dir}}" name="dir">
+ <div class="col-md-3"> {{ form.eventlog_file}} </div>
+ </div>
+ <div class="row" style="padding-top: 10px;">
+ <div class="col-md-6">
+ <button id="file_import" type="submit" disabled="disabled" class="btn btn-default navbar-btn" >
+ <span class="glyphicon glyphicon-upload" style="vertical-align: top;"></span> Import
+ </button>
+ </div>
+ </div>
+ </div>
+ </form>
+ </div>
+
+ <div class="row" style="padding-top: 20px;">
+ <div class="col-md-8 ">
+ <h4><strong>Eventlogs from existing build directory: </strong>
+ <a href="#" data-toggle="tooltip" title="{{dir}}">
+ <svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-info-circle" viewBox="0 0 16 16" data-toggle="tooltip">
+ <path d="M8 15A7 7 0 1 1 8 1a7 7 0 0 1 0 14m0 1A8 8 0 1 0 8 0a8 8 0 0 0 0 16"/>
+ <path d="m8.93 6.588-2.29.287-.082.38.45.083c.294.07.352.176.288.469l-.738 3.468c-.194.897.105 1.319.808 1.319.545 0 1.178-.252 1.465-.598l.088-.416c-.2.176-.492.246-.686.246-.275 0-.375-.193-.304-.533zM9 4.5a1 1 0 1 1-2 0 1 1 0 0 1 2 0"/>
+ </svg>
+ </a>
+ </h4>
+ {% if files %}
+ <div class="table-responsive">
+ <table class="table col-md-6 table-bordered table-hover" id="eventlog-table" style="border-collapse: collapse;">
+ <thead>
+ <tr class="row">
+ <th scope="col">Name</th>
+ <th scope="col">Size</th>
+ <th scope="col">Action</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for file in files %}
+ <tr class="row" style="height: 48px;">
+ <th scope="row" class="col-md-4" style="vertical-align: middle;">
+ <input type="hidden" value="{{file.name}}" name="{{file.name}}">{{file.name}}
+ </th>
+ <td class="col-md-4 align-middle" style="vertical-align: middle;">{{file.size|filesizeformat}}</td>
+ <td class="col-md-4 align-middle" style="vertical-align: middle;">
+ {% if file.imported == True and file.build_id is not None %}
+ <a href="{% url 'builddashboard' file.build_id %}">Build Details</a>
+ {% elif request.session.file == file.name or request.session.all_builds %}
+ <a data-toggle="tooltip" title="Build in progress">
+ <span class="glyphicon glyphicon-upload" style="font-size: 18px; color:grey"></span>
+ </a>
+ {%else%}
+ <a onclick="_ajax_update('{{file.name}}', false, '{{dir}}')" data-toggle="tooltip" title="Import File">
+ <span class="glyphicon glyphicon-upload" style="font-size: 18px;"></span>
+ </a>
+ {%endif%}
+ </td>
+ </tr>
+ {% endfor%}
+ </tbody>
+ </table>
+ </div>
+ {% else %}
+ <div class="row-fluid" id="empty-state-{{table_name}}">
+ <div class="alert alert-info">Sorry - no files found</div>
+ </div>
+ {%endif%}
+ </div>
+ </div>
+ </div>
+ </div>
+</div>
+
+<link rel="stylesheet" href="{% static 'css/jquery.dataTables-1.13.8.min.css' %}" type='text/css'/>
+<script src="{% static 'js/jquery.dataTables-1.13.8.min.js' %}"> </script>
+<script>
+
+function _ajax_update(file, all, dir){
+ function getCookie(name) {
+ var cookieValue = null;
+ if (document.cookie && document.cookie !== '') {
+ var cookies = document.cookie.split(';');
+ for (var i = 0; i < cookies.length; i++) {
+ var cookie = jQuery.trim(cookies[i]);
+ // Does this cookie string begin with the name we want?
+ if (cookie.substring(0, name.length + 1) === (name + '=')) {
+ cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
+ break;
+ }
+ }
+ }
+ return cookieValue;
+ }
+ var csrftoken = getCookie('csrftoken');
+
+ function csrfSafeMethod(method) {
+ // these HTTP methods do not require CSRF protection
+ return (/^(GET|HEAD|OPTIONS|TRACE)$/.test(method));
+ }
+ $.ajaxSetup({
+ beforeSend: function (xhr, settings) {
+ if (!csrfSafeMethod(settings.type) && !this.crossDomain) {
+ xhr.setRequestHeader("X-CSRFToken", csrftoken);
+ }
+ }
+ });
+
+ $.ajax({
+ url:'/toastergui/cmdline/',
+ type: "POST",
+ data: {file: file, all: all, dir: dir},
+ success:function(data){
+ if (data['response']=='building'){
+ location.reload()
+ } else {
+ window.location = '/toastergui/builds/'
+ }
+ },
+ complete:function(data){
+ },
+ error:function (xhr, textStatus, thrownError){
+ console.log('fail');
+ }
+ });
+}
+
+$('#import_all').on('click', function(){
+ _ajax_update("{{files | safe}}", true, "{{dir | safe}}");
+});
+
+
+$('#import_page').hide();
+
+$(function () {
+ $('[data-toggle="tooltip"]').tooltip()
+})
+
+
+$("#id_eventlog_file").change(function(){
+ $('#file_import').prop("disabled", false);
+ $('#file_import').addClass('btn-primary')
+ $('#file_import').removeClass('btn-default')
+})
+
+$(document).ajaxStart(function(){
+ $('#overlay').removeClass('hide');
+ window.setTimeout(
+ function() {
+ window.location = '/toastergui/builds/'
+ }, 10000)
+});
+
+$( "#form_file").on( "submit", function( event ) {
+ $('#overlay').removeClass('hide');
+ window.setTimeout(
+ function() {
+ window.location = '/toastergui/builds/'
+ }, 10000)
+});
+
+$(document).ready( function () {
+ $('#eventlog-table').DataTable({order: [[0, 'desc']], "pageLength": 50});
+});
+
+</script>
+
+{% endblock %}
diff --git a/lib/toaster/toastergui/templates/configvars.html b/lib/toaster/toastergui/templates/configvars.html
index 33fef9316..691dace3a 100644
--- a/lib/toaster/toastergui/templates/configvars.html
+++ b/lib/toaster/toastergui/templates/configvars.html
@@ -66,7 +66,7 @@
<td class="description">
{% if variable.description %}
{{variable.description}}
- <a href="https://www.yoctoproject.org/docs/current/ref-manual/ref-manual.html#var-{{variable.variable_name|variable_parent_name}}" target="_blank">
+ <a href="http://docs.yoctoproject.org/ref-manual/variables.html#term-{{variable.variable_name|variable_parent_name}}" target="_blank">
<span class="glyphicon glyphicon-new-window get-info"></span></a>
{% endif %}
</td>
diff --git a/lib/toaster/toastergui/templates/js-unit-tests.html b/lib/toaster/toastergui/templates/js-unit-tests.html
index ca248962f..41553c4f9 100644
--- a/lib/toaster/toastergui/templates/js-unit-tests.html
+++ b/lib/toaster/toastergui/templates/js-unit-tests.html
@@ -11,7 +11,7 @@
<script src="{% static 'js/layerDepsModal.js' %}"></script>
<script src="{% static 'js/projectpage.js' %}"></script>
-<script src="{% static 'js/bootstrap.min.js' %}"></script>
+<script src="{% static 'js/bootstrap-3.4.1.min.js' %}"></script>
<script src="{% static 'js/filtersnippet.js' %}"></script>
<script src="{% static 'js/importlayer.js' %}"></script>
<script src="{% static 'js/highlight.pack.js' %}"></script>
diff --git a/lib/toaster/toastergui/templates/landing.html b/lib/toaster/toastergui/templates/landing.html
index bfaaf6fc8..589ee2263 100644
--- a/lib/toaster/toastergui/templates/landing.html
+++ b/lib/toaster/toastergui/templates/landing.html
@@ -12,10 +12,10 @@
<div class="col-md-6">
<h1>This is Toaster</h1>
- <p>A web interface to <a href="https://www.openembedded.org">OpenEmbedded</a> and <a href="https://www.yoctoproject.org/tools-resources/projects/bitbake">BitBake</a>, the <a href="https://www.yoctoproject.org">Yocto Project</a> build system.</p>
+ <p>A web interface to <a href="https://www.openembedded.org">OpenEmbedded</a> and <a href="https://docs.yoctoproject.org/bitbake.html">BitBake</a>, the <a href="https://www.yoctoproject.org">Yocto Project</a> build system.</p>
<p class="top-air">
- <a class="btn btn-info btn-lg" href="https://www.yoctoproject.org/docs/latest/toaster-manual/toaster-manual.html#toaster-manual-setup-and-use">
+ <a class="btn btn-info btn-lg" href="http://docs.yoctoproject.org/toaster-manual/setup-and-use.html#setting-up-and-using-toaster" style="min-width: 460px;">
Toaster is ready to capture your command line builds
</a>
</p>
@@ -23,7 +23,7 @@
{% if lvs_nos %}
{% if project_enable %}
<p class="top-air">
- <a class="btn btn-primary btn-lg" href="{% url 'newproject' %}">
+ <a class="btn btn-primary btn-lg" href="{% url 'newproject' %}" style="min-width: 460px;">
Create your first Toaster project to run manage builds
</a>
</p>
@@ -33,7 +33,7 @@
Toaster has no layer information. Without layer information, you cannot run builds. To generate layer information you can:
<ul>
<li>
- <a href="https://www.yoctoproject.org/docs/latest/toaster-manual/toaster-manual.html#layer-source">Configure a layer source</a>
+ <a href="http://docs.yoctoproject.org/toaster-manual/reference.html#layer-source">Configure a layer source</a>
</li>
<li>
<a href="{% url 'newproject' %}">Create a project</a>, then import layers
@@ -42,9 +42,15 @@
</div>
{% endif %}
+ <p class="top-air">
+ <a class="btn btn-info btn-lg" href="{% url 'cmdlines' %}" style="min-width: 460px;">
+ Import command line event logs from build directory
+ </a>
+ </p>
+
<ul class="list-unstyled lead">
<li>
- <a href="https://www.yoctoproject.org/docs/latest/toaster-manual/toaster-manual.html">
+ <a href="http://docs.yoctoproject.org/toaster-manual/index.html#toaster-user-manual">
Read the Toaster manual
</a>
</li>
diff --git a/lib/toaster/toastergui/templates/landing_not_managed.html b/lib/toaster/toastergui/templates/landing_not_managed.html
deleted file mode 100644
index e7200b841..000000000
--- a/lib/toaster/toastergui/templates/landing_not_managed.html
+++ /dev/null
@@ -1,34 +0,0 @@
-{% extends "base.html" %}
-
-{% load static %}
-{% load projecttags %}
-{% load humanize %}
-
-{% block title %} Welcome to Toaster {% endblock %}
-
-{% block pagecontent %}
-
- <div class="container">
- <div class="row">
- <!-- Empty - no build module -->
- <div class="page-header top-air">
- <h1>
- This page only works with Toaster in 'Build' mode
- </h1>
- </div>
- <div class="alert alert-info lead">
- <p">
- The 'Build' mode allows you to configure and run your Yocto Project builds from Toaster.
- <ul>
- <li><a href="https://www.yoctoproject.org/docs/latest/toaster-manual/toaster-manual.html#intro-modes">
- Read about the 'Build' mode
- </a></li>
- <li><a href="/">
- View your builds
- </a></li>
- </ul>
- </p>
- </div>
- </div>
-
-{% endblock %}
diff --git a/lib/toaster/toastergui/templates/layerdetails.html b/lib/toaster/toastergui/templates/layerdetails.html
index 1e26e31c8..923ca3bfe 100644
--- a/lib/toaster/toastergui/templates/layerdetails.html
+++ b/lib/toaster/toastergui/templates/layerdetails.html
@@ -355,7 +355,7 @@
{% if layerversion.layer_source == layer_source.TYPE_LAYERINDEX %}
<dt>Layer index</dt>
<dd>
- <a href="http://layers.openembedded.org/layerindex/branch/{{layerversion.release.name}}/layer/{{layerversion.layer.name}}">Layer index {{layerversion.layer.name}}</a>
+ <a href="https://layers.openembedded.org/layerindex/branch/{{layerversion.release.name}}/layer/{{layerversion.layer.name}}">Layer index {{layerversion.layer.name}}</a>
</dd>
{% endif %}
</dl>
diff --git a/lib/toaster/toastergui/templates/mrb_section.html b/lib/toaster/toastergui/templates/mrb_section.html
index 98d9fac82..9fc7dfaee 100644
--- a/lib/toaster/toastergui/templates/mrb_section.html
+++ b/lib/toaster/toastergui/templates/mrb_section.html
@@ -63,7 +63,7 @@
<%/if%>
</div>
- <div data-build-state="<%:state%>">
+ <div class="build-state" data-build-state="<%:state%>">
<%if state == 'Cloning'%>
<%include tmpl='#cloning-repos-build-template'/%>
<%else state == 'Parsing'%>
diff --git a/lib/toaster/toastergui/templates/package_built_dependencies.html b/lib/toaster/toastergui/templates/package_built_dependencies.html
index a5d589357..2493954de 100644
--- a/lib/toaster/toastergui/templates/package_built_dependencies.html
+++ b/lib/toaster/toastergui/templates/package_built_dependencies.html
@@ -18,7 +18,7 @@
</ul>
<div class="tab-content">
<div class="tab-pane active" id="dependencies">
- {% ifequal runtime_deps|length 0 %}
+ {% if runtime_deps|length == 0 %}
<div class="alert alert-info">
<strong>{{package.fullpackagespec}}</strong> has no runtime dependencies.
</div>
@@ -54,8 +54,8 @@
{% endfor %}
</tbody>
</table>
- {% endifequal %}
- {% ifnotequal other_deps|length 0 %}
+ {% endif %}
+ {% if other_deps|length != 0 %}
<h3>Other runtime relationships</h3>
<table class="table table-bordered table-hover">
<thead>
@@ -93,7 +93,7 @@
{% endfor %}
</tbody>
</table>
- {% endifnotequal %}
+ {% endif %}
</div> <!-- tab-pane -->
</div> <!-- tab-content -->
{% endblock tabcontent %}
diff --git a/lib/toaster/toastergui/templates/package_detail_base.html b/lib/toaster/toastergui/templates/package_detail_base.html
index 66f8e7f06..a4fcd2aa4 100644
--- a/lib/toaster/toastergui/templates/package_detail_base.html
+++ b/lib/toaster/toastergui/templates/package_detail_base.html
@@ -127,7 +127,7 @@
{% comment %}
# Removed per team meeting of 1/29/2014 until
# decision on index search algorithm
- <a href="http://layers.openembedded.org" target="_blank">
+ <a href="https://layers.openembedded.org" target="_blank">
<i class="glyphicon glyphicon-share get-info"></i>
</a>
{% endcomment %}
diff --git a/lib/toaster/toastergui/templates/package_included_dependencies.html b/lib/toaster/toastergui/templates/package_included_dependencies.html
index 95e56ded2..1f5ed6d91 100644
--- a/lib/toaster/toastergui/templates/package_included_dependencies.html
+++ b/lib/toaster/toastergui/templates/package_included_dependencies.html
@@ -14,7 +14,7 @@
{% include "package_included_tabs.html" with active_tab="dependencies" %}
<div class="tab-content">
<div class="tab-pane active" id="dependencies">
- {% ifnotequal runtime_deps|length 0 %}
+ {% if runtime_deps|length != 0 %}
<table class="table table-bordered table-hover">
<thead>
<tr>
@@ -48,9 +48,9 @@
<div class="alert alert-info">
<strong>{{package.fullpackagespec}}</strong> has no runtime dependencies.
</div>
- {% endifnotequal %}
+ {% endif %}
- {% ifnotequal other_deps|length 0 %}
+ {% if other_deps|length != 0 %}
<h3>Other runtime relationships</h3>
<table class="table table-bordered table-hover">
<thead>
@@ -103,7 +103,7 @@
{% endfor %}
</tbody>
</table>
- {% endifnotequal %}
+ {% endif %}
</div> <!-- end tab-pane -->
</div> <!-- end tab content -->
{% endwith %}
diff --git a/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html b/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html
index fb310c7fc..dae4549e2 100644
--- a/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html
+++ b/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html
@@ -15,7 +15,7 @@
<div class="tab-content">
<div class="tab-pane active" id="brought-in-by">
- {% ifequal reverse_count 0 %}
+ {% if reverse_count == 0 %}
<div class="alert alert-info">
<strong>{{package.fullpackagespec}}</strong> has no reverse runtime dependencies.
</div>
@@ -43,7 +43,7 @@
{% endfor %}
</tbody>
</table>
- {% endifequal %}
+ {% endif %}
</div> <!-- end tab-pane -->
</div> <!-- end tab content -->
{% endwith %}
diff --git a/lib/toaster/toastergui/templates/project.html b/lib/toaster/toastergui/templates/project.html
index d8ad2c79d..22239a82f 100644
--- a/lib/toaster/toastergui/templates/project.html
+++ b/lib/toaster/toastergui/templates/project.html
@@ -139,7 +139,7 @@
<ul>
<li><a href="{% url 'projectlayers' project.id %}">Choose from the layers compatible with this project</a></li>
<li><a href="{% url 'importlayer' project.id %}">Import a layer</a></li>
- <li><a href="https://www.yoctoproject.org/docs/current/dev-manual/dev-manual.html#understanding-and-creating-layers" target="_blank">Read about layers in the documentation</a></li>
+ <li><a href="http://docs.yoctoproject.org/dev-manual/common-tasks.html#understanding-and-creating-layers" target="_blank">Read about layers in the documentation</a></li>
<li>Or type a layer name below</li>
</ul>
</div>
diff --git a/lib/toaster/toastergui/templates/project_specific.html b/lib/toaster/toastergui/templates/project_specific.html
index 42725c0db..76d45b1b3 100644
--- a/lib/toaster/toastergui/templates/project_specific.html
+++ b/lib/toaster/toastergui/templates/project_specific.html
@@ -137,7 +137,7 @@
<ul>
<li><a href="{% url 'projectlayers' project.id %}">Choose from the layers compatible with this project</a></li>
<li><a href="{% url 'importlayer' project.id %}">Import a layer</a></li>
- <li><a href="https://www.yoctoproject.org/docs/current/dev-manual/dev-manual.html#understanding-and-creating-layers" target="_blank">Read about layers in the documentation</a></li>
+ <li><a href="http://docs.yoctoproject.org/dev-manual/common-tasks.html#understanding-and-creating-layers" target="_blank">Read about layers in the documentation</a></li>
<li>Or type a layer name below</li>
</ul>
</div>
diff --git a/lib/toaster/toastergui/templates/projectconf.html b/lib/toaster/toastergui/templates/projectconf.html
index bd49f1f58..c30683583 100644
--- a/lib/toaster/toastergui/templates/projectconf.html
+++ b/lib/toaster/toastergui/templates/projectconf.html
@@ -73,7 +73,7 @@
{% if image_install_append_defined %}
<dt>
- <span class="js-config-var-name js-config-var-managed-name">IMAGE_INSTALL_append</span>
+ <span class="js-config-var-name js-config-var-managed-name">IMAGE_INSTALL:append</span>
<span class="glyphicon glyphicon-question-sign get-help" title="Specifies additional packages to install into an image. If your build creates more than one image, the packages will be installed in all of them"></span>
</dt>
<dd class="variable-list">
@@ -83,7 +83,7 @@
<form id="change-image_install-form" class="form-inline" style="display:none;">
<div class="row">
<div class="col-md-4">
- <span class="help-block">To set IMAGE_INSTALL_append to more than one package, type the package names separated by a space.</span>
+ <span class="help-block">To set IMAGE_INSTALL:append to more than one package, type the package names separated by a space.</span>
</div>
</div>
<div class="form-group">
@@ -167,8 +167,8 @@
{% for fstype in vars_fstypes %}
<input type="hidden" class="js-checkbox-fstypes-list" value="{{fstype}}">
{% endfor %}
- {% for b in vars_blacklist %}
- <input type="hidden" class="js-config-blacklist-name" value="{{b}}">
+ {% for b in vars_disallowed %}
+ <input type="hidden" class="js-config-disallowed-name" value="{{b}}">
{% endfor %}
{% for b in vars_managed %}
<input type="hidden" class="js-config-managed-name" value="{{b}}">
@@ -201,12 +201,12 @@
<p>Toaster cannot set any variables that impact 1) the configuration of the build servers,
or 2) where artifacts produced by the build are stored. Such variables include: </p>
<p>
- <code><a href="https://www.yoctoproject.org/docs/current/ref-manual/ref-manual.html#var-BB_DISKMON_DIRS" target="_blank">BB_DISKMON_DIRS</a></code>
- <code><a href="https://www.yoctoproject.org/docs/current/ref-manual/ref-manual.html#var-BB_NUMBER_THREADS" target="_blank">BB_NUMBER_THREADS</a></code>
+ <code><a href="http://docs.yoctoproject.org/ref-manual/variables.html#term-BB_DISKMON_DIRS" target="_blank">BB_DISKMON_DIRS</a></code>
+ <code><a href="http://docs.yoctoproject.org/ref-manual/variables.html#term-BB_NUMBER_THREADS" target="_blank">BB_NUMBER_THREADS</a></code>
<code>CVS_PROXY_HOST</code>
<code>CVS_PROXY_PORT</code>
- <code><a href="https://www.yoctoproject.org/docs/current/ref-manual/ref-manual.html#var-PARALLEL_MAKE" target="_blank">PARALLEL_MAKE</a></code>
- <code><a href="https://www.yoctoproject.org/docs/current/ref-manual/ref-manual.html#var-TMPDIR" target="_blank">TMPDIR</a></code></p>
+ <code><a href="http://docs.yoctoproject.org/ref-manual/variables.html#term-PARALLEL_MAKE" target="_blank">PARALLEL_MAKE</a></code>
+ <code><a href="http://docs.yoctoproject.org/ref-manual/variables.html#term-TMPDIR" target="_blank">TMPDIR</a></code></p>
<p>Plus the following standard shell environment variables:</p>
<p><code>http_proxy</code> <code>ftp_proxy</code> <code>https_proxy</code> <code>all_proxy</code></p>
</div>
@@ -238,9 +238,9 @@ function validate_new_variable() {
}
}
- var blacklist_configvars = document.getElementsByClassName('js-config-blacklist-name');
- for (var i = 0, length = blacklist_configvars.length; i < length; i++) {
- if (blacklist_configvars[i].value.toUpperCase() == variable.toUpperCase()) {
+ var disallowed_configvars = document.getElementsByClassName('js-config-disallowed-name');
+ for (var i = 0, length = disallowed_configvars.length; i < length; i++) {
+ if (disallowed_configvars[i].value.toUpperCase() == variable.toUpperCase()) {
error_msg = "You cannot edit this variable in Toaster because it is set by the build servers";
}
}
@@ -771,10 +771,10 @@ $(document).ready(function() {
{% if image_install_append_defined %}
- // init IMAGE_INSTALL_append trash icon
+ // init IMAGE_INSTALL:append trash icon
setDeleteTooltip($('#delete-image_install-icon'));
- // change IMAGE_INSTALL_append variable
+ // change IMAGE_INSTALL:append variable
$('#change-image_install-icon').click(function() {
// preset the edit value
var current_val = $("span#image_install").text().trim();
@@ -814,7 +814,7 @@ $(document).ready(function() {
$('#apply-change-image_install').click(function(){
// insure these non-empty values have single space prefix
var value = " " + $('#new-image_install').val().trim();
- postEditAjaxRequest({"configvarChange" : 'IMAGE_INSTALL_append:'+value});
+ postEditAjaxRequest({"configvarChange" : 'IMAGE_INSTALL:append:'+value});
$('#image_install').text(value);
$('#image_install').removeClass('text-muted');
$("#change-image_install-form").slideUp(function () {
@@ -826,10 +826,10 @@ $(document).ready(function() {
});
});
- // delete IMAGE_INSTALL_append variable value
+ // delete IMAGE_INSTALL:append variable value
$('#delete-image_install-icon').click(function(){
$(this).tooltip('hide');
- postEditAjaxRequest({"configvarChange" : 'IMAGE_INSTALL_append:'+''});
+ postEditAjaxRequest({"configvarChange" : 'IMAGE_INSTALL:append:'+''});
$('#image_install').parent().fadeOut(1000, function(){
$('#image_install').addClass('text-muted');
$('#image_install').text('Not set');
@@ -1011,7 +1011,7 @@ $(document).ready(function() {
$(".save").attr("disabled","disabled");
// Reload page if admin-removed core managed value is manually added back in
- if (0 <= " DISTRO DL_DIR IMAGE_FSTYPES IMAGE_INSTALL_append PACKAGE_CLASSES SSTATE_DIR ".indexOf( " "+variable+" " )) {
+ if (0 <= " DISTRO DL_DIR IMAGE_FSTYPES IMAGE_INSTALL:append PACKAGE_CLASSES SSTATE_DIR ".indexOf( " "+variable+" " )) {
// delayed reload to avoid race condition with postEditAjaxRequest
do_reload=true;
}
diff --git a/lib/toaster/toastergui/templates/recipe.html b/lib/toaster/toastergui/templates/recipe.html
index 3f76e656f..4b5301b54 100644
--- a/lib/toaster/toastergui/templates/recipe.html
+++ b/lib/toaster/toastergui/templates/recipe.html
@@ -186,9 +186,9 @@
<i class="icon-question-sign get-help hover-help" title="{{task.get_outcome_help}}"></i>
</td>
<td>
- {% ifnotequal task.sstate_result task.SSTATE_NA %}
+ {% if task.sstate_result != task.SSTATE_NA %}
{{task.get_sstate_result_display}}
- {% endifnotequal %}
+ {% endif %}
</td>
</tr>
diff --git a/lib/toaster/toastergui/templates/target.html b/lib/toaster/toastergui/templates/target.html
index 1924a0dad..d5f60e77a 100644
--- a/lib/toaster/toastergui/templates/target.html
+++ b/lib/toaster/toastergui/templates/target.html
@@ -8,11 +8,11 @@
{% block nav-target %}
{% for t in build.get_sorted_target_list %}
- {% ifequal target.pk t.pk %}
+ {% if target.pk == t.pk %}
<li class="active"><a href="{% url 'target' build.pk t.pk %}">{{t.target}}</a><li>
{% else %}
<li><a href="{% url 'target' build.pk t.pk %}">{{t.target}}</a><li>
- {% endifequal %}
+ {% endif %}
{% endfor %}
{% endblock %}
diff --git a/lib/toaster/toastergui/templatetags/projecttags.py b/lib/toaster/toastergui/templatetags/projecttags.py
index c432f59a7..bd398f001 100644
--- a/lib/toaster/toastergui/templatetags/projecttags.py
+++ b/lib/toaster/toastergui/templatetags/projecttags.py
@@ -167,8 +167,8 @@ def check_filter_status(options, filter):
def variable_parent_name(value):
""" filter extended variable names to the parent name
"""
- value=re.sub('_\$.*', '', value)
- return re.sub('_[a-z].*', '', value)
+ value = re.sub(r'_\$.*', '', value)
+ return re.sub(r'_[a-z].*', '', value)
@register.filter
def filter_setin_files(file_list, matchstr):
diff --git a/lib/toaster/toastergui/urls.py b/lib/toaster/toastergui/urls.py
index d2df4e604..7f8489d3a 100644
--- a/lib/toaster/toastergui/urls.py
+++ b/lib/toaster/toastergui/urls.py
@@ -6,7 +6,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-from django.conf.urls import url
+from django.urls import re_path as url
from django.views.generic import RedirectView
from toastergui import tables
@@ -95,6 +95,7 @@ urlpatterns = [
# project URLs
url(r'^newproject/$', views.newproject, name='newproject'),
+ url(r'^cmdline/$', views.CommandLineBuilds.as_view(), name='cmdlines'),
url(r'^projects/$',
tables.ProjectsTable.as_view(template_name="projects-toastertable.html"),
name='all-projects'),
@@ -206,8 +207,7 @@ urlpatterns = [
url(r'^js-unit-tests/$', views.jsunittests, name='js-unit-tests'),
# image customisation functionality
- url(r'^xhr_customrecipe/(?P<recipe_id>\d+)'
- '/packages/(?P<package_id>\d+|)$',
+ url(r'^xhr_customrecipe/(?P<recipe_id>\d+)/packages/(?P<package_id>\d+|)$',
api.XhrCustomRecipePackages.as_view(),
name='xhr_customrecipe_packages'),
diff --git a/lib/toaster/toastergui/views.py b/lib/toaster/toastergui/views.py
index 9a5e48e3b..40aed265d 100644
--- a/lib/toaster/toastergui/views.py
+++ b/lib/toaster/toastergui/views.py
@@ -6,24 +6,36 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import ast
import re
+import subprocess
+import sys
+
+import bb.cooker
+from bb.ui import toasterui
+from bb.ui import eventreplay
from django.db.models import F, Q, Sum
from django.db import IntegrityError
-from django.shortcuts import render, redirect, get_object_or_404
+from django.shortcuts import render, redirect, get_object_or_404, HttpResponseRedirect
from django.utils.http import urlencode
from orm.models import Build, Target, Task, Layer, Layer_Version, Recipe
from orm.models import LogMessage, Variable, Package_Dependency, Package
from orm.models import Task_Dependency, Package_File
from orm.models import Target_Installed_Package, Target_File
from orm.models import TargetKernelFile, TargetSDKFile, Target_Image_File
-from orm.models import BitbakeVersion, CustomImageRecipe
+from orm.models import BitbakeVersion, CustomImageRecipe, EventLogsImports
from django.urls import reverse, resolve
+from django.contrib import messages
+
from django.core.exceptions import ObjectDoesNotExist
+from django.core.files.storage import FileSystemStorage
+from django.core.files.uploadedfile import InMemoryUploadedFile, TemporaryUploadedFile
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.http import HttpResponseNotFound, JsonResponse
from django.utils import timezone
+from django.views.generic import TemplateView
from datetime import timedelta, datetime
from toastergui.templatetags.projecttags import json as jsonfilter
from decimal import Decimal
@@ -32,13 +44,20 @@ import os
from os.path import dirname
import mimetypes
+from toastergui.forms import LoadFileForm
+
+from collections import namedtuple
+
import logging
+from toastermain.logs import log_view_mixin
+
logger = logging.getLogger("toaster")
# Project creation and managed build enable
project_enable = ('1' == os.environ.get('TOASTER_BUILDSERVER'))
is_project_specific = ('1' == os.environ.get('TOASTER_PROJECTSPECIFIC'))
+import_page = False
class MimeTypeFinder(object):
# setting this to False enables additional non-standard mimetypes
@@ -56,6 +75,7 @@ class MimeTypeFinder(object):
return guessed_type
# single point to add global values into the context before rendering
+@log_view_mixin
def toaster_render(request, page, context):
context['project_enable'] = project_enable
context['project_specific'] = is_project_specific
@@ -665,16 +685,17 @@ def recipe_packages(request, build_id, recipe_id):
return response
from django.http import HttpResponse
+@log_view_mixin
def xhr_dirinfo(request, build_id, target_id):
top = request.GET.get('start', '/')
return HttpResponse(_get_dir_entries(build_id, target_id, top), content_type = "application/json")
from django.utils.functional import Promise
-from django.utils.encoding import force_text
+from django.utils.encoding import force_str
class LazyEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, Promise):
- return force_text(obj)
+ return force_str(obj)
return super(LazyEncoder, self).default(obj)
from toastergui.templatetags.projecttags import filtered_filesizeformat
@@ -1404,7 +1425,7 @@ if True:
if not os.path.isdir('%s/conf' % request.POST['importdir']):
raise BadParameterException("Bad path or missing 'conf' directory (%s)" % request.POST['importdir'])
from django.core import management
- management.call_command('buildimport', '--command=import', '--name=%s' % request.POST['projectname'], '--path=%s' % request.POST['importdir'], interactive=False)
+ management.call_command('buildimport', '--command=import', '--name=%s' % request.POST['projectname'], '--path=%s' % request.POST['importdir'])
prj = Project.objects.get(name = request.POST['projectname'])
prj.merged_attr = True
prj.save()
@@ -1606,12 +1627,13 @@ if True:
# make sure we have a machine set for this project
ProjectVariable.objects.get_or_create(project=new_project,
name="MACHINE",
- value="qemux86")
+ value="qemux86-64")
context = {'project': new_project}
return toaster_render(request, "js-unit-tests.html", context)
from django.views.decorators.csrf import csrf_exempt
@csrf_exempt
+ @log_view_mixin
def xhr_testreleasechange(request, pid):
def response(data):
return HttpResponse(jsonfilter(data),
@@ -1648,6 +1670,7 @@ if True:
except Exception as e:
return response({"error": str(e) })
+ @log_view_mixin
def xhr_configvaredit(request, pid):
try:
prj = Project.objects.get(id = pid)
@@ -1683,12 +1706,12 @@ if True:
t=request.POST['configvarDel'].strip()
pt = ProjectVariable.objects.get(pk = int(t)).delete()
- # return all project settings, filter out blacklist and elsewhere-managed variables
- vars_managed,vars_fstypes,vars_blacklist = get_project_configvars_context()
+ # return all project settings, filter out disallowed and elsewhere-managed variables
+ vars_managed,vars_fstypes,vars_disallowed = get_project_configvars_context()
configvars_query = ProjectVariable.objects.filter(project_id = pid).all()
for var in vars_managed:
configvars_query = configvars_query.exclude(name = var)
- for var in vars_blacklist:
+ for var in vars_disallowed:
configvars_query = configvars_query.exclude(name = var)
return_data = {
@@ -1708,7 +1731,7 @@ if True:
except ProjectVariable.DoesNotExist:
pass
try:
- return_data['image_install_append'] = ProjectVariable.objects.get(project = prj, name = "IMAGE_INSTALL_append").value,
+ return_data['image_install:append'] = ProjectVariable.objects.get(project = prj, name = "IMAGE_INSTALL:append").value,
except ProjectVariable.DoesNotExist:
pass
try:
@@ -1726,6 +1749,7 @@ if True:
return HttpResponse(json.dumps({"error":str(e) + "\n" + traceback.format_exc()}), content_type = "application/json")
+ @log_view_mixin
def customrecipe_download(request, pid, recipe_id):
recipe = get_object_or_404(CustomImageRecipe, pk=recipe_id)
@@ -1781,7 +1805,7 @@ if True:
'MACHINE', 'BBLAYERS'
}
- vars_blacklist = {
+ vars_disallowed = {
'PARALLEL_MAKE','BB_NUMBER_THREADS',
'BB_DISKMON_DIRS','BB_NUMBER_THREADS','CVS_PROXY_HOST','CVS_PROXY_PORT',
'PARALLEL_MAKE','TMPDIR',
@@ -1790,7 +1814,7 @@ if True:
vars_fstypes = Target_Image_File.SUFFIXES
- return(vars_managed,sorted(vars_fstypes),vars_blacklist)
+ return(vars_managed,sorted(vars_fstypes),vars_disallowed)
def projectconf(request, pid):
@@ -1799,12 +1823,12 @@ if True:
except Project.DoesNotExist:
return HttpResponseNotFound("<h1>Project id " + pid + " is unavailable</h1>")
- # remove blacklist and externally managed varaibles from this list
- vars_managed,vars_fstypes,vars_blacklist = get_project_configvars_context()
+ # remove disallowed and externally managed varaibles from this list
+ vars_managed,vars_fstypes,vars_disallowed = get_project_configvars_context()
configvars = ProjectVariable.objects.filter(project_id = pid).all()
for var in vars_managed:
configvars = configvars.exclude(name = var)
- for var in vars_blacklist:
+ for var in vars_disallowed:
configvars = configvars.exclude(name = var)
context = {
@@ -1812,7 +1836,7 @@ if True:
'configvars': configvars,
'vars_managed': vars_managed,
'vars_fstypes': vars_fstypes,
- 'vars_blacklist': vars_blacklist,
+ 'vars_disallowed': vars_disallowed,
}
try:
@@ -1839,7 +1863,7 @@ if True:
except ProjectVariable.DoesNotExist:
pass
try:
- context['image_install_append'] = ProjectVariable.objects.get(project = prj, name = "IMAGE_INSTALL_append").value
+ context['image_install:append'] = ProjectVariable.objects.get(project = prj, name = "IMAGE_INSTALL:append").value
context['image_install_append_defined'] = "1"
except ProjectVariable.DoesNotExist:
pass
@@ -1933,3 +1957,163 @@ if True:
except (ObjectDoesNotExist, IOError):
return toaster_render(request, "unavailable_artifact.html")
+
+class CommandLineBuilds(TemplateView):
+ model = EventLogsImports
+ template_name = 'command_line_builds.html'
+
+ def get_context_data(self, **kwargs):
+ context = super(CommandLineBuilds, self).get_context_data(**kwargs)
+ #get value from BB_DEFAULT_EVENTLOG defined in bitbake.conf
+ eventlog = subprocess.check_output(['bitbake-getvar', 'BB_DEFAULT_EVENTLOG', '--value'])
+ if eventlog:
+ logs_dir = os.path.dirname(eventlog.decode().strip('\n'))
+ files = os.listdir(logs_dir)
+ imported_files = EventLogsImports.objects.all()
+ files_list = []
+
+ # Filter files that end with ".json"
+ event_files = []
+ for file in files:
+ if file.endswith(".json"):
+ # because BB_DEFAULT_EVENTLOG is a directory, we need to check if the file is a valid eventlog
+ with open("{}/{}".format(logs_dir, file)) as efile:
+ content = efile.read()
+ if 'allvariables' in content:
+ event_files.append(file)
+
+ #build dict for template using db data
+ for event_file in event_files:
+ if imported_files.filter(name=event_file):
+ files_list.append({
+ 'name': event_file,
+ 'imported': True,
+ 'build_id': imported_files.filter(name=event_file)[0].build_id,
+ 'size': os.path.getsize("{}/{}".format(logs_dir, event_file))
+ })
+ else:
+ files_list.append({
+ 'name': event_file,
+ 'imported': False,
+ 'build_id': None,
+ 'size': os.path.getsize("{}/{}".format(logs_dir, event_file))
+ })
+ context['import_all'] = True
+
+ context['files'] = files_list
+ context['dir'] = logs_dir
+ else:
+ context['files'] = []
+ context['dir'] = ''
+
+ # enable session variable
+ if not self.request.session.get('file'):
+ self.request.session['file'] = ""
+
+ context['form'] = LoadFileForm()
+ context['project_enable'] = project_enable
+ return context
+
+ def post(self, request, **kwargs):
+ logs_dir = request.POST.get('dir')
+ all_files = request.POST.get('all')
+
+ # check if a build is already in progress
+ if Build.objects.filter(outcome=Build.IN_PROGRESS):
+ messages.add_message(
+ self.request,
+ messages.ERROR,
+ "A build is already in progress. Please wait for it to complete before starting a new build."
+ )
+ return JsonResponse({'response': 'building'})
+ imported_files = EventLogsImports.objects.all()
+ try:
+ if all_files == 'true':
+ # use of session variable to deactivate icon for builds in progress
+ request.session['all_builds'] = True
+ request.session.modified = True
+ request.session.save()
+
+ files = ast.literal_eval(request.POST.get('file'))
+ for file in files:
+ if imported_files.filter(name=file.get('name')).exists():
+ imported_files.filter(name=file.get('name'))[0].imported = True
+ else:
+ with open("{}/{}".format(logs_dir, file.get('name'))) as eventfile:
+ # load variables from the first line
+ variables = None
+ while line := eventfile.readline().strip():
+ try:
+ variables = json.loads(line)['allvariables']
+ break
+ except (KeyError, json.JSONDecodeError):
+ continue
+ if not variables:
+ raise Exception("File content missing build variables")
+ eventfile.seek(0)
+ params = namedtuple('ConfigParams', ['observe_only'])(True)
+ player = eventreplay.EventPlayer(eventfile, variables)
+
+ toasterui.main(player, player, params)
+ event_log_import = EventLogsImports.objects.create(name=file.get('name'), imported=True)
+ event_log_import.build_id = Build.objects.last().id
+ event_log_import.save()
+ else:
+ if self.request.FILES.get('eventlog_file'):
+ file = self.request.FILES['eventlog_file']
+ else:
+ file = request.POST.get('file')
+ # use of session variable to deactivate icon for build in progress
+ request.session['file'] = file
+ request.session['all_builds'] = False
+ request.session.modified = True
+ request.session.save()
+
+ if imported_files.filter(name=file).exists():
+ imported_files.filter(name=file)[0].imported = True
+ else:
+ if isinstance(file, InMemoryUploadedFile) or isinstance(file, TemporaryUploadedFile):
+ variables = None
+ while line := file.readline().strip():
+ try:
+ variables = json.loads(line)['allvariables']
+ break
+ except (KeyError, json.JSONDecodeError):
+ continue
+ if not variables:
+ raise Exception("File content missing build variables")
+ file.seek(0)
+ params = namedtuple('ConfigParams', ['observe_only'])(True)
+ player = eventreplay.EventPlayer(file, variables)
+ if not os.path.exists('{}/{}'.format(logs_dir, file.name)):
+ fs = FileSystemStorage(location=logs_dir)
+ fs.save(file.name, file)
+ toasterui.main(player, player, params)
+ else:
+ with open("{}/{}".format(logs_dir, file)) as eventfile:
+ # load variables from the first line
+ variables = None
+ while line := eventfile.readline().strip():
+ try:
+ variables = json.loads(line)['allvariables']
+ break
+ except (KeyError, json.JSONDecodeError):
+ continue
+ if not variables:
+ raise Exception("File content missing build variables")
+ eventfile.seek(0)
+ params = namedtuple('ConfigParams', ['observe_only'])(True)
+ player = eventreplay.EventPlayer(eventfile, variables)
+ toasterui.main(player, player, params)
+ event_log_import = EventLogsImports.objects.create(name=file, imported=True)
+ event_log_import.build_id = Build.objects.last().id
+ event_log_import.save()
+ request.session['file'] = ""
+ except Exception:
+ messages.add_message(
+ self.request,
+ messages.ERROR,
+ "The file content is not in the correct format. Update file content or upload a different file."
+ )
+ return HttpResponseRedirect("/toastergui/cmdline/")
+ return HttpResponseRedirect('/toastergui/builds/')
diff --git a/lib/toaster/toastergui/widgets.py b/lib/toaster/toastergui/widgets.py
index ceff52942..b32abf40b 100644
--- a/lib/toaster/toastergui/widgets.py
+++ b/lib/toaster/toastergui/widgets.py
@@ -7,6 +7,7 @@
#
from django.views.generic import View, TemplateView
+from django.utils.decorators import method_decorator
from django.views.decorators.cache import cache_control
from django.shortcuts import HttpResponse
from django.core.cache import cache
@@ -31,6 +32,7 @@ import re
import os
from toastergui.tablefilter import TableFilterMap
+from toastermain.logs import log_view_mixin
try:
from urllib import unquote_plus
@@ -63,8 +65,8 @@ class ToasterTable(TemplateView):
self.default_orderby = ""
# prevent HTTP caching of table data
- @cache_control(must_revalidate=True,
- max_age=0, no_store=True, no_cache=True)
+ @method_decorator(cache_control(must_revalidate=True,
+ max_age=0, no_store=True, no_cache=True))
def dispatch(self, *args, **kwargs):
return super(ToasterTable, self).dispatch(*args, **kwargs)
@@ -83,6 +85,7 @@ class ToasterTable(TemplateView):
return context
+ @log_view_mixin
def get(self, request, *args, **kwargs):
if request.GET.get('format', None) == 'json':
@@ -304,6 +307,7 @@ class ToasterTable(TemplateView):
self.setup_columns(**kwargs)
+ self.apply_orderby('pk')
if search:
self.apply_search(search)
if filters:
@@ -413,6 +417,7 @@ class ToasterTypeAhead(View):
def __init__(self, *args, **kwargs):
super(ToasterTypeAhead, self).__init__()
+ @log_view_mixin
def get(self, request, *args, **kwargs):
def response(data):
return HttpResponse(json.dumps(data,
@@ -468,6 +473,7 @@ class MostRecentBuildsView(View):
return False
+ @log_view_mixin
def get(self, request, *args, **kwargs):
"""
Returns a list of builds in JSON format.
diff --git a/lib/toaster/toastermain/logs.py b/lib/toaster/toastermain/logs.py
new file mode 100644
index 000000000..62d871963
--- /dev/null
+++ b/lib/toaster/toastermain/logs.py
@@ -0,0 +1,158 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+
+import os
+import logging
+import json
+from pathlib import Path
+from django.http import HttpRequest
+
+BUILDDIR = Path(os.environ.get('BUILDDIR', '/tmp'))
+
+def log_api_request(request, response, view, logger_name='api'):
+ """Helper function for LogAPIMixin"""
+
+ repjson = {
+ 'view': view,
+ 'path': request.path,
+ 'method': request.method,
+ 'status': response.status_code
+ }
+
+ logger = logging.getLogger(logger_name)
+ logger.info(
+ json.dumps(repjson, indent=4, separators=(", ", " : "))
+ )
+
+
+def log_view_mixin(view):
+ def log_view_request(*args, **kwargs):
+ # get request from args else kwargs
+ request = None
+ if len(args) > 0:
+ for req in args:
+ if isinstance(req, HttpRequest):
+ request = req
+ break
+ elif request is None:
+ request = kwargs.get('request')
+
+ response = view(*args, **kwargs)
+ view_name = 'unknown'
+ if hasattr(request, 'resolver_match'):
+ if hasattr(request.resolver_match, 'view_name'):
+ view_name = request.resolver_match.view_name
+
+ log_api_request(
+ request, response, view_name, 'toaster')
+ return response
+ return log_view_request
+
+
+
+class LogAPIMixin:
+ """Logs API requests
+
+ tested with:
+ - APIView
+ - ModelViewSet
+ - ReadOnlyModelViewSet
+ - GenericAPIView
+
+ Note: you can set `view_name` attribute in View to override get_view_name()
+ """
+
+ def get_view_name(self):
+ if hasattr(self, 'view_name'):
+ return self.view_name
+ return super().get_view_name()
+
+ def finalize_response(self, request, response, *args, **kwargs):
+ log_api_request(request, response, self.get_view_name())
+ return super().finalize_response(request, response, *args, **kwargs)
+
+
+LOGGING_SETTINGS = {
+ 'version': 1,
+ 'disable_existing_loggers': False,
+ 'filters': {
+ 'require_debug_false': {
+ '()': 'django.utils.log.RequireDebugFalse'
+ }
+ },
+ 'formatters': {
+ 'datetime': {
+ 'format': '%(asctime)s %(levelname)s %(message)s'
+ },
+ 'verbose': {
+ 'format': '{levelname} {asctime} {module} {name}.{funcName} {process:d} {thread:d} {message}',
+ 'datefmt': "%d/%b/%Y %H:%M:%S",
+ 'style': '{',
+ },
+ 'api': {
+ 'format': '\n{levelname} {asctime} {name}.{funcName}:\n{message}',
+ 'style': '{'
+ }
+ },
+ 'handlers': {
+ 'mail_admins': {
+ 'level': 'ERROR',
+ 'filters': ['require_debug_false'],
+ 'class': 'django.utils.log.AdminEmailHandler'
+ },
+ 'console': {
+ 'level': 'DEBUG',
+ 'class': 'logging.StreamHandler',
+ 'formatter': 'datetime',
+ },
+ 'file_django': {
+ 'level': 'INFO',
+ 'class': 'logging.handlers.TimedRotatingFileHandler',
+ 'filename': BUILDDIR / 'toaster_logs/django.log',
+ 'when': 'D', # interval type
+ 'interval': 1, # defaults to 1
+ 'backupCount': 10, # how many files to keep
+ 'formatter': 'verbose',
+ },
+ 'file_api': {
+ 'level': 'INFO',
+ 'class': 'logging.handlers.TimedRotatingFileHandler',
+ 'filename': BUILDDIR / 'toaster_logs/api.log',
+ 'when': 'D',
+ 'interval': 1,
+ 'backupCount': 10,
+ 'formatter': 'verbose',
+ },
+ 'file_toaster': {
+ 'level': 'INFO',
+ 'class': 'logging.handlers.TimedRotatingFileHandler',
+ 'filename': BUILDDIR / 'toaster_logs/web.log',
+ 'when': 'D',
+ 'interval': 1,
+ 'backupCount': 10,
+ 'formatter': 'verbose',
+ },
+ },
+ 'loggers': {
+ 'django.request': {
+ 'handlers': ['file_django', 'console'],
+ 'level': 'WARN',
+ 'propagate': True,
+ },
+ 'django': {
+ 'handlers': ['file_django', 'console'],
+ 'level': 'WARNING',
+ 'propogate': True,
+ },
+ 'toaster': {
+ 'handlers': ['file_toaster'],
+ 'level': 'INFO',
+ 'propagate': False,
+ },
+ 'api': {
+ 'handlers': ['file_api'],
+ 'level': 'INFO',
+ 'propagate': False,
+ }
+ }
+}
diff --git a/lib/toaster/toastermain/management/commands/buildimport.py b/lib/toaster/toastermain/management/commands/buildimport.py
index 59da6ff7a..f7139aa04 100644
--- a/lib/toaster/toastermain/management/commands/buildimport.py
+++ b/lib/toaster/toastermain/management/commands/buildimport.py
@@ -451,7 +451,7 @@ class Command(BaseCommand):
# Catch vars relevant to Toaster (in case no Toaster section)
self.update_project_vars(project,'DISTRO')
self.update_project_vars(project,'MACHINE')
- self.update_project_vars(project,'IMAGE_INSTALL_append')
+ self.update_project_vars(project,'IMAGE_INSTALL:append')
self.update_project_vars(project,'IMAGE_FSTYPES')
self.update_project_vars(project,'PACKAGE_CLASSES')
# These vars are typically only assigned by Toaster
@@ -545,7 +545,7 @@ class Command(BaseCommand):
# Find the directory's release, and promote to default_release if local paths
release = self.find_import_release(layers_list,lv_dict,default_release)
# create project, SANITY: reuse any project of same name
- project = Project.objects.create_project(project_name,release,project)
+ project = Project.objects.create_project(project_name,release,project, imported=True)
# Apply any new layers or variables
self.apply_conf_variables(project,layers_list,lv_dict,release)
# WORKAROUND: since we now derive the release, redirect 'newproject_specific' to 'project_specific'
diff --git a/lib/toaster/toastermain/management/commands/checksocket.py b/lib/toaster/toastermain/management/commands/checksocket.py
index 811fd5d51..b2c002da7 100644
--- a/lib/toaster/toastermain/management/commands/checksocket.py
+++ b/lib/toaster/toastermain/management/commands/checksocket.py
@@ -13,7 +13,7 @@ import errno
import socket
from django.core.management.base import BaseCommand, CommandError
-from django.utils.encoding import force_text
+from django.utils.encoding import force_str
DEFAULT_ADDRPORT = "0.0.0.0:8000"
@@ -51,7 +51,7 @@ class Command(BaseCommand):
if hasattr(err, 'errno') and err.errno in errors:
errtext = errors[err.errno]
else:
- errtext = force_text(err)
+ errtext = force_str(err)
raise CommandError(errtext)
self.stdout.write("OK")
diff --git a/lib/toaster/toastermain/settings.py b/lib/toaster/toastermain/settings.py
index a4b370c8d..e06adc5a9 100644
--- a/lib/toaster/toastermain/settings.py
+++ b/lib/toaster/toastermain/settings.py
@@ -9,6 +9,8 @@
# Django settings for Toaster project.
import os
+from pathlib import Path
+from toastermain.logs import LOGGING_SETTINGS
DEBUG = True
@@ -39,6 +41,9 @@ DATABASES = {
}
}
+# New in Django 3.2
+DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
+
# Needed when Using sqlite especially to add a longer timeout for waiting
# for the database lock to be released
# https://docs.djangoproject.com/en/1.6/ref/databases/#database-is-locked-errors
@@ -84,14 +89,17 @@ else:
from pytz.exceptions import UnknownTimeZoneError
try:
if pytz.timezone(zonename) is not None:
- zonefilelist[hashlib.md5(open(filepath, 'rb').read()).hexdigest()] = zonename
+ with open(filepath, 'rb') as f:
+ zonefilelist[hashlib.md5(f.read()).hexdigest()] = zonename
except UnknownTimeZoneError as ValueError:
# we expect timezone failures here, just move over
pass
except ImportError:
- zonefilelist[hashlib.md5(open(filepath, 'rb').read()).hexdigest()] = zonename
+ with open(filepath, 'rb') as f:
+ zonefilelist[hashlib.md5(f.read()).hexdigest()] = zonename
- TIME_ZONE = zonefilelist[hashlib.md5(open('/etc/localtime', 'rb').read()).hexdigest()]
+ with open('/etc/localtime', 'rb') as f:
+ TIME_ZONE = zonefilelist[hashlib.md5(f.read()).hexdigest()]
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
@@ -103,10 +111,6 @@ SITE_ID = 1
# to load the internationalization machinery.
USE_I18N = True
-# If you set this to False, Django will not format dates, numbers and
-# calendars according to the current locale.
-USE_L10N = True
-
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
@@ -147,6 +151,8 @@ STATICFILES_FINDERS = (
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'NOT_SUITABLE_FOR_HOSTED_DEPLOYMENT'
+TMPDIR = os.environ.get('TOASTER_DJANGO_TMPDIR', '/tmp')
+
class InvalidString(str):
def __mod__(self, other):
from django.template.base import TemplateSyntaxError
@@ -183,7 +189,13 @@ TEMPLATES = [
'django.template.loaders.app_directories.Loader',
#'django.template.loaders.eggs.Loader',
],
- 'string_if_invalid': InvalidString("%s"),
+ # https://docs.djangoproject.com/en/4.2/ref/templates/api/#how-invalid-variables-are-handled
+ # Generally, string_if_invalid should only be enabled in order to debug
+ # a specific template problem, then cleared once debugging is complete.
+ # If you assign a value other than '' to string_if_invalid,
+ # you will experience rendering problems with these templates and sites.
+ # 'string_if_invalid': InvalidString("%s"),
+ 'string_if_invalid': "",
'debug': DEBUG,
},
},
@@ -207,7 +219,7 @@ CACHES = {
# },
'default': {
'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
- 'LOCATION': '/tmp/toaster_cache_%d' % os.getuid(),
+ 'LOCATION': '%s/toaster_cache_%d' % (TMPDIR, os.getuid()),
'TIMEOUT': 1,
}
}
@@ -239,6 +251,9 @@ INSTALLED_APPS = (
'django.contrib.humanize',
'bldcollector',
'toastermain',
+
+ # 3rd-lib
+ "log_viewer",
)
@@ -299,43 +314,21 @@ for t in os.walk(os.path.dirname(currentdir)):
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
-LOGGING = {
- 'version': 1,
- 'disable_existing_loggers': False,
- 'filters': {
- 'require_debug_false': {
- '()': 'django.utils.log.RequireDebugFalse'
- }
- },
- 'formatters': {
- 'datetime': {
- 'format': '%(asctime)s %(levelname)s %(message)s'
- }
- },
- 'handlers': {
- 'mail_admins': {
- 'level': 'ERROR',
- 'filters': ['require_debug_false'],
- 'class': 'django.utils.log.AdminEmailHandler'
- },
- 'console': {
- 'level': 'DEBUG',
- 'class': 'logging.StreamHandler',
- 'formatter': 'datetime',
- }
- },
- 'loggers': {
- 'toaster' : {
- 'handlers': ['console'],
- 'level': 'DEBUG',
- },
- 'django.request': {
- 'handlers': ['console'],
- 'level': 'WARN',
- 'propagate': True,
- },
- }
-}
+LOGGING = LOGGING_SETTINGS
+
+# Build paths inside the project like this: BASE_DIR / 'subdir'.
+BUILDDIR = os.environ.get("BUILDDIR", TMPDIR)
+
+# LOG VIEWER
+# https://pypi.org/project/django-log-viewer/
+LOG_VIEWER_FILES_PATTERN = '*.log*'
+LOG_VIEWER_FILES_DIR = os.path.join(BUILDDIR, "toaster_logs/")
+LOG_VIEWER_PAGE_LENGTH = 25 # total log lines per-page
+LOG_VIEWER_MAX_READ_LINES = 100000 # total log lines will be read
+LOG_VIEWER_PATTERNS = ['INFO', 'DEBUG', 'WARNING', 'ERROR', 'CRITICAL']
+
+# Optionally you can set the next variables in order to customize the admin:
+LOG_VIEWER_FILE_LIST_TITLE = "Logs list"
if DEBUG and SQL_DEBUG:
LOGGING['loggers']['django.db.backends'] = {
diff --git a/lib/toaster/toastermain/settings_test.py b/lib/toaster/toastermain/settings_test.py
index 6538d9e45..74def2d24 100644
--- a/lib/toaster/toastermain/settings_test.py
+++ b/lib/toaster/toastermain/settings_test.py
@@ -19,10 +19,10 @@ TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
- 'NAME': '/tmp/toaster-test-db.sqlite',
+ 'NAME': '%s/toaster-test-db.sqlite' % TMPDIR,
'TEST': {
'ENGINE': 'django.db.backends.sqlite3',
- 'NAME': '/tmp/toaster-test-db.sqlite',
+ 'NAME': '%s/toaster-test-db.sqlite' % TMPDIR,
}
}
}
diff --git a/lib/toaster/toastermain/urls.py b/lib/toaster/toastermain/urls.py
index 5fb520b38..3be46fcf0 100644
--- a/lib/toaster/toastermain/urls.py
+++ b/lib/toaster/toastermain/urls.py
@@ -6,7 +6,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-from django.conf.urls import include, url
+from django.urls import re_path as url, include
from django.views.generic import RedirectView, TemplateView
from django.views.decorators.cache import never_cache
import bldcollector.views
@@ -28,6 +28,8 @@ urlpatterns = [
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
+ url(r'^logs/', include('log_viewer.urls')),
+
# This is here to maintain backward compatibility and will be deprecated
# in the future.
url(r'^orm/eventfile$', bldcollector.views.eventfile),
diff --git a/lib/toaster/tox.ini b/lib/toaster/tox.ini
new file mode 100644
index 000000000..1516a527a
--- /dev/null
+++ b/lib/toaster/tox.ini
@@ -0,0 +1,24 @@
+[tox]
+envlist = py38, py39, py310, py311, py312
+skipsdist = True
+toxworkdir = {env:TOX_WORKDIR:.tox}
+passenv = *
+
+[testenv]
+passenv =
+ SSTATE_DIR
+ DL_DIR
+ TOASTER_DJANGO_TMPDIR
+setenv =
+ DJANGO_SETTINGS_MODULE=toastermain.settings_test
+ TOASTER_BUILDSERVER=1
+ BUILDDIR = {env:BUILDDIR}
+ EVENTREPLAY_DIR = {env:EVENTREPLAY_DIR:BUILDDIR}
+commands =
+ python3 {toxinidir}/manage.py test tests.db tests.commands tests.builds tests.browser tests.functional tests.views
+deps =
+ -r {toxinidir}/../../toaster-requirements.txt
+ -r {toxinidir}/tests/toaster-tests-requirements.txt
+
+[testenv:chrome]
+commands={[testenv]commands} --splinter-webdriver=chrome \ No newline at end of file
diff --git a/toaster-requirements.txt b/toaster-requirements.txt
index 735b61454..d8e48b7f3 100644
--- a/toaster-requirements.txt
+++ b/toaster-requirements.txt
@@ -1,3 +1,4 @@
-Django>2.2,<2.3
+Django>4.2,<4.3
beautifulsoup4>=4.4.0
pytz
+django-log-viewer==1.1.7